summaryrefslogtreecommitdiff
path: root/lib/coderay/tokens.rb
diff options
context:
space:
mode:
Diffstat (limited to 'lib/coderay/tokens.rb')
-rw-r--r--lib/coderay/tokens.rb91
1 files changed, 0 insertions, 91 deletions
diff --git a/lib/coderay/tokens.rb b/lib/coderay/tokens.rb
index ee28a4e..045cf4a 100644
--- a/lib/coderay/tokens.rb
+++ b/lib/coderay/tokens.rb
@@ -83,97 +83,6 @@ module CodeRay
super
end
- # Returns the tokens compressed by joining consecutive
- # tokens of the same kind.
- #
- # This can not be undone, but should yield the same output
- # in most Encoders. It basically makes the output smaller.
- #
- # Combined with dump, it saves space for the cost of time.
- #
- # If the scanner is written carefully, this is not required -
- # for example, consecutive //-comment lines could already be
- # joined in one comment token by the Scanner.
- def optimize
- raise NotImplementedError, 'Tokens#optimize needs to be rewritten.'
- # last_kind = last_text = nil
- # new = self.class.new
- # for text, kind in self
- # if text.is_a? String
- # if kind == last_kind
- # last_text << text
- # else
- # new << [last_text, last_kind] if last_kind
- # last_text = text
- # last_kind = kind
- # end
- # else
- # new << [last_text, last_kind] if last_kind
- # last_kind = last_text = nil
- # new << [text, kind]
- # end
- # end
- # new << [last_text, last_kind] if last_kind
- # new
- end
-
- # Compact the object itself; see optimize.
- def optimize!
- replace optimize
- end
-
- # Ensure that all begin_group tokens have a correspondent end_group.
- #
- # TODO: Test this!
- def fix
- raise NotImplementedError, 'Tokens#fix needs to be rewritten.'
- # tokens = self.class.new
- # # Check token nesting using a stack of kinds.
- # opened = []
- # for type, kind in self
- # case type
- # when :begin_group
- # opened.push [:begin_group, kind]
- # when :begin_line
- # opened.push [:end_line, kind]
- # when :end_group, :end_line
- # expected = opened.pop
- # if [type, kind] != expected
- # # Unexpected end; decide what to do based on the kind:
- # # - token was never opened: delete the end (just skip it)
- # next unless opened.rindex expected
- # # - token was opened earlier: also close tokens in between
- # tokens << token until (token = opened.pop) == expected
- # end
- # end
- # tokens << [type, kind]
- # end
- # # Close remaining opened tokens
- # tokens << token while token = opened.pop
- # tokens
- end
-
- def fix!
- replace fix
- end
-
- # TODO: Scanner#split_into_lines
- #
- # Makes sure that:
- # - newlines are single tokens
- # (which means all other token are single-line)
- # - there are no open tokens at the end the line
- #
- # This makes it simple for encoders that work line-oriented,
- # like HTML with list-style numeration.
- def split_into_lines
- raise NotImplementedError
- end
-
- def split_into_lines!
- replace split_into_lines
- end
-
# Split the tokens into parts of the given +sizes+.
#
# The result will be an Array of Tokens objects. The parts have