summaryrefslogtreecommitdiff
path: root/lib/rdoc/tokenstream.rb
diff options
context:
space:
mode:
authordave <dave@b2dd03c8-39d4-4d8f-98ff-823fe69b080e>2003-12-01 07:12:49 +0000
committerdave <dave@b2dd03c8-39d4-4d8f-98ff-823fe69b080e>2003-12-01 07:12:49 +0000
commit87762adcb0d38d6c575448f67c2906964215f3a1 (patch)
treec74f6e5b2b51f641cdc7895a37eb4c161b2c6d72 /lib/rdoc/tokenstream.rb
parentc1c55573bdaecbd972f57b46dd22dfbd0e1a02dd (diff)
Add RDoc
git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@5073 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
Diffstat (limited to 'lib/rdoc/tokenstream.rb')
-rw-r--r--lib/rdoc/tokenstream.rb25
1 files changed, 25 insertions, 0 deletions
diff --git a/lib/rdoc/tokenstream.rb b/lib/rdoc/tokenstream.rb
new file mode 100644
index 0000000000..0a0720d8a9
--- /dev/null
+++ b/lib/rdoc/tokenstream.rb
@@ -0,0 +1,25 @@
+# A TokenStream is a list of tokens, gathered during the parse
+# of some entity (say a method). Entities populate these streams
+# by being registered with the lexer. Any class can collect tokens
+# by including TokenStream. From the outside, you use such an object
+# by calling the start_collecting_tokens method, followed by calls
+# to add_token and pop_token
+
+module TokenStream
+ def token_stream
+ @token_stream
+ end
+
+ def start_collecting_tokens
+ @token_stream = []
+ end
+ def add_token(tk)
+ @token_stream << tk
+ end
+ def add_tokens(tks)
+ tks.each {|tk| add_token(tk)}
+ end
+ def pop_token
+ @token_stream.pop
+ end
+end