aboutsummaryrefslogtreecommitdiffstats
path: root/lib/rdoc/tokenstream.rb
diff options
context:
space:
mode:
Diffstat (limited to 'lib/rdoc/tokenstream.rb')
-rw-r--r--lib/rdoc/tokenstream.rb52
1 files changed, 0 insertions, 52 deletions
diff --git a/lib/rdoc/tokenstream.rb b/lib/rdoc/tokenstream.rb
deleted file mode 100644
index b1e86543f7..0000000000
--- a/lib/rdoc/tokenstream.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-module RDoc; end
-
-##
-# A TokenStream is a list of tokens, gathered during the parse of some entity
-# (say a method). Entities populate these streams by being registered with the
-# lexer. Any class can collect tokens by including TokenStream. From the
-# outside, you use such an object by calling the start_collecting_tokens
-# method, followed by calls to add_token and pop_token.
-
-module RDoc::TokenStream
-
- ##
- # Adds +tokens+ to the collected tokens
-
- def add_tokens(*tokens)
- tokens.flatten.each { |token| @token_stream << token }
- end
-
- alias add_token add_tokens
-
- ##
- # Starts collecting tokens
-
- def collect_tokens
- @token_stream = []
- end
-
- alias start_collecting_tokens collect_tokens
-
- ##
- # Remove the last token from the collected tokens
-
- def pop_token
- @token_stream.pop
- end
-
- ##
- # Current token stream
-
- def token_stream
- @token_stream
- end
-
- ##
- # Returns a string representation of the token stream
-
- def tokens_to_s
- token_stream.map { |token| token.text }.join ''
- end
-
-end
-