From 2ef9c50c6e405717d06362787c4549ca4f1c6485 Mon Sep 17 00:00:00 2001 From: drbrain Date: Mon, 20 Dec 2010 03:22:49 +0000 Subject: Import RDoc 3 git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@30249 b2dd03c8-39d4-4d8f-98ff-823fe69b080e --- lib/rdoc/token_stream.rb | 50 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 lib/rdoc/token_stream.rb (limited to 'lib/rdoc/token_stream.rb') diff --git a/lib/rdoc/token_stream.rb b/lib/rdoc/token_stream.rb new file mode 100644 index 0000000000..fb887f2fa4 --- /dev/null +++ b/lib/rdoc/token_stream.rb @@ -0,0 +1,50 @@ +## +# A TokenStream is a list of tokens, gathered during the parse of some entity +# (say a method). Entities populate these streams by being registered with the +# lexer. Any class can collect tokens by including TokenStream. From the +# outside, you use such an object by calling the start_collecting_tokens +# method, followed by calls to add_token and pop_token. + +module RDoc::TokenStream + + ## + # Adds +tokens+ to the collected tokens + + def add_tokens(*tokens) + tokens.flatten.each { |token| @token_stream << token } + end + + alias add_token add_tokens + + ## + # Starts collecting tokens + + def collect_tokens + @token_stream = [] + end + + alias start_collecting_tokens collect_tokens + + ## + # Remove the last token from the collected tokens + + def pop_token + @token_stream.pop + end + + ## + # Current token stream + + def token_stream + @token_stream + end + + ## + # Returns a string representation of the token stream + + def tokens_to_s + token_stream.map { |token| token.text }.join '' + end + +end + -- cgit v1.2.3