PATH:
opt
/
alt
/
ruby19
/
lib64
/
ruby
/
1.9.1
/
rdoc
## # A TokenStream is a list of tokens, gathered during the parse of some entity # (say a method). Entities populate these streams by being registered with the # lexer. Any class can collect tokens by including TokenStream. From the # outside, you use such an object by calling the start_collecting_tokens # method, followed by calls to add_token and pop_token. module RDoc::TokenStream ## # Adds +tokens+ to the collected tokens def add_tokens(*tokens) tokens.flatten.each { |token| @token_stream << token } end alias add_token add_tokens ## # Starts collecting tokens def collect_tokens @token_stream = [] end alias start_collecting_tokens collect_tokens ## # Remove the last token from the collected tokens def pop_token @token_stream.pop end ## # Current token stream def token_stream @token_stream end ## # Returns a string representation of the token stream def tokens_to_s token_stream.map { |token| token.text }.join '' end end
[-] ghost_method.rb
[edit]
[-] cross_reference.rb
[edit]
[-] ruby_lex.rb
[edit]
[+]
parser
[-] erbio.rb
[edit]
[-] rdoc.rb
[edit]
[-] parser.rb
[edit]
[-] meta_method.rb
[edit]
[-] method_attr.rb
[edit]
[-] markup.rb
[edit]
[-] rubygems_hook.rb
[edit]
[-] anon_class.rb
[edit]
[-] code_object.rb
[edit]
[+]
ri
[+]
stats
[-] any_method.rb
[edit]
[-] single_class.rb
[edit]
[-] attr.rb
[edit]
[-] context.rb
[edit]
[-] normal_class.rb
[edit]
[-] require.rb
[edit]
[-] alias.rb
[edit]
[-] ruby_token.rb
[edit]
[-] class_module.rb
[edit]
[+]
generator
[-] top_level.rb
[edit]
[+]
..
[-] ri.rb
[edit]
[-] include.rb
[edit]
[-] encoding.rb
[edit]
[+]
markup
[-] token_stream.rb
[edit]
[-] code_objects.rb
[edit]
[-] task.rb
[edit]
[-] generator.rb
[edit]
[-] constant.rb
[edit]
[-] stats.rb
[edit]
[-] normal_module.rb
[edit]
[-] text.rb
[edit]
[-] known_classes.rb
[edit]
[-] options.rb
[edit]