summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatthäus G. Chajdas <dev@anteru.net>2020-09-08 19:45:20 +0200
committerMatthäus G. Chajdas <dev@anteru.net>2020-09-08 19:45:20 +0200
commit98f816ae5ca7d98f388ace349a29b154fa9dc9e1 (patch)
treed0494ebb40613432ef5c157c69c722393164710e
parentb6d1d68de705e1cfc28a4188f792b29c545bf7ed (diff)
parent945ed5ef268e2f3c7bbea42dfae1f8f844096f61 (diff)
downloadpygments-git-98f816ae5ca7d98f388ace349a29b154fa9dc9e1.tar.gz
Merge branch 'master' into bug/angular-html
-rw-r--r--.github/actions/pyodide-package/action.yml5
-rw-r--r--.github/workflows/build.yaml13
-rw-r--r--.github/workflows/docs.yaml8
-rw-r--r--AUTHORS6
-rw-r--r--CHANGES29
-rw-r--r--LICENSE2
-rw-r--r--Makefile5
-rw-r--r--doc/_templates/demo.html2
-rw-r--r--doc/_templates/indexsidebar.html2
-rw-r--r--doc/_themes/pygments14/layout.html2
-rw-r--r--doc/_themes/pygments14/static/pygments14.css_t2
-rw-r--r--doc/conf.py2
-rw-r--r--doc/docs/lexerdevelopment.rst17
-rw-r--r--doc/languages.rst51
-rw-r--r--external/markdown-processor.py2
-rw-r--r--external/moin-parser.py2
-rw-r--r--external/rst-directive.py2
-rw-r--r--pygments/__init__.py2
-rw-r--r--pygments/__main__.py2
-rw-r--r--pygments/cmdline.py13
-rw-r--r--pygments/console.py2
-rw-r--r--pygments/filter.py8
-rw-r--r--pygments/filters/__init__.py14
-rw-r--r--pygments/formatter.py2
-rw-r--r--pygments/formatters/__init__.py2
-rwxr-xr-xpygments/formatters/_mapping.py2
-rw-r--r--pygments/formatters/bbcode.py2
-rw-r--r--pygments/formatters/html.py30
-rw-r--r--pygments/formatters/img.py2
-rw-r--r--pygments/formatters/irc.py2
-rw-r--r--pygments/formatters/latex.py8
-rw-r--r--pygments/formatters/other.py2
-rw-r--r--pygments/formatters/rtf.py2
-rw-r--r--pygments/formatters/svg.py2
-rw-r--r--pygments/formatters/terminal.py2
-rw-r--r--pygments/formatters/terminal256.py2
-rw-r--r--pygments/lexer.py14
-rw-r--r--pygments/lexers/__init__.py5
-rw-r--r--pygments/lexers/_asy_builtins.py2
-rw-r--r--pygments/lexers/_cl_builtins.py2
-rw-r--r--pygments/lexers/_cocoa_builtins.py2
-rw-r--r--pygments/lexers/_csound_builtins.py39
-rw-r--r--pygments/lexers/_lasso_builtins.py2
-rw-r--r--pygments/lexers/_lua_builtins.py2
-rw-r--r--pygments/lexers/_mapping.py6
-rw-r--r--pygments/lexers/_mql_builtins.py2
-rw-r--r--pygments/lexers/_mysql_builtins.py1282
-rw-r--r--pygments/lexers/_openedge_builtins.py2
-rw-r--r--pygments/lexers/_php_builtins.py5
-rw-r--r--pygments/lexers/_postgres_builtins.py89
-rw-r--r--pygments/lexers/_scilab_builtins.py2
-rw-r--r--pygments/lexers/_sourcemod_builtins.py2
-rw-r--r--pygments/lexers/_stan_builtins.py2
-rw-r--r--pygments/lexers/_stata_builtins.py2
-rw-r--r--pygments/lexers/_tsql_builtins.py2
-rw-r--r--pygments/lexers/_usd_builtins.py10
-rw-r--r--pygments/lexers/_vbscript_builtins.py2
-rw-r--r--pygments/lexers/_vim_builtins.py2
-rw-r--r--pygments/lexers/actionscript.py2
-rw-r--r--pygments/lexers/agile.py2
-rw-r--r--pygments/lexers/algebra.py2
-rw-r--r--pygments/lexers/ambient.py2
-rw-r--r--pygments/lexers/ampl.py2
-rw-r--r--pygments/lexers/apl.py6
-rw-r--r--pygments/lexers/archetype.py2
-rw-r--r--pygments/lexers/arrow.py20
-rw-r--r--pygments/lexers/asm.py53
-rw-r--r--pygments/lexers/automation.py2
-rw-r--r--pygments/lexers/bare.py104
-rw-r--r--pygments/lexers/basic.py19
-rw-r--r--pygments/lexers/bibtex.py2
-rw-r--r--pygments/lexers/boa.py2
-rw-r--r--pygments/lexers/business.py2
-rw-r--r--pygments/lexers/c_cpp.py2
-rw-r--r--pygments/lexers/c_like.py2
-rw-r--r--pygments/lexers/capnproto.py2
-rw-r--r--pygments/lexers/chapel.py2
-rw-r--r--pygments/lexers/clean.py15
-rw-r--r--pygments/lexers/compiled.py2
-rw-r--r--pygments/lexers/configs.py6
-rw-r--r--pygments/lexers/console.py2
-rw-r--r--pygments/lexers/crystal.py7
-rw-r--r--pygments/lexers/csound.py18
-rw-r--r--pygments/lexers/css.py6
-rw-r--r--pygments/lexers/d.py2
-rw-r--r--pygments/lexers/dalvik.py2
-rw-r--r--pygments/lexers/data.py10
-rw-r--r--pygments/lexers/devicetree.py83
-rw-r--r--pygments/lexers/diff.py2
-rw-r--r--pygments/lexers/dotnet.py8
-rw-r--r--pygments/lexers/dsls.py8
-rw-r--r--pygments/lexers/dylan.py12
-rw-r--r--pygments/lexers/ecl.py2
-rw-r--r--pygments/lexers/eiffel.py2
-rw-r--r--pygments/lexers/elm.py6
-rw-r--r--pygments/lexers/email.py11
-rw-r--r--pygments/lexers/erlang.py51
-rw-r--r--pygments/lexers/esoteric.py2
-rw-r--r--pygments/lexers/ezhil.py4
-rw-r--r--pygments/lexers/factor.py2
-rw-r--r--pygments/lexers/fantom.py2
-rw-r--r--pygments/lexers/felix.py2
-rw-r--r--pygments/lexers/floscript.py2
-rw-r--r--pygments/lexers/forth.py73
-rw-r--r--pygments/lexers/fortran.py2
-rw-r--r--pygments/lexers/foxpro.py4
-rw-r--r--pygments/lexers/freefem.py2
-rw-r--r--pygments/lexers/functional.py2
-rw-r--r--pygments/lexers/gdscript.py31
-rw-r--r--pygments/lexers/go.py2
-rw-r--r--pygments/lexers/grammar_notation.py2
-rw-r--r--pygments/lexers/graph.py2
-rw-r--r--pygments/lexers/graphics.py2
-rw-r--r--pygments/lexers/haskell.py5
-rw-r--r--pygments/lexers/haxe.py2
-rw-r--r--pygments/lexers/hdl.py8
-rw-r--r--pygments/lexers/hexdump.py2
-rw-r--r--pygments/lexers/html.py2
-rw-r--r--pygments/lexers/idl.py2
-rw-r--r--pygments/lexers/igor.py15
-rw-r--r--pygments/lexers/inferno.py2
-rw-r--r--pygments/lexers/installers.py2
-rw-r--r--pygments/lexers/int_fiction.py2
-rw-r--r--pygments/lexers/iolang.py2
-rw-r--r--pygments/lexers/j.py2
-rw-r--r--pygments/lexers/javascript.py24
-rw-r--r--pygments/lexers/julia.py18
-rw-r--r--pygments/lexers/jvm.py9
-rw-r--r--pygments/lexers/lisp.py51
-rw-r--r--pygments/lexers/make.py12
-rw-r--r--pygments/lexers/markup.py75
-rw-r--r--pygments/lexers/math.py2
-rw-r--r--pygments/lexers/matlab.py22
-rw-r--r--pygments/lexers/mime.py4
-rw-r--r--pygments/lexers/ml.py33
-rw-r--r--pygments/lexers/modeling.py2
-rw-r--r--pygments/lexers/modula2.py2
-rw-r--r--pygments/lexers/monte.py2
-rw-r--r--pygments/lexers/mosel.py2
-rw-r--r--pygments/lexers/ncl.py2
-rw-r--r--pygments/lexers/nimrod.py2
-rw-r--r--pygments/lexers/nit.py2
-rw-r--r--pygments/lexers/nix.py2
-rw-r--r--pygments/lexers/oberon.py2
-rw-r--r--pygments/lexers/objective.py2
-rw-r--r--pygments/lexers/ooc.py2
-rw-r--r--pygments/lexers/other.py2
-rw-r--r--pygments/lexers/parasail.py2
-rw-r--r--pygments/lexers/parsers.py69
-rw-r--r--pygments/lexers/pascal.py2
-rw-r--r--pygments/lexers/pawn.py2
-rw-r--r--pygments/lexers/perl.py4
-rw-r--r--pygments/lexers/php.py61
-rw-r--r--pygments/lexers/pointless.py71
-rw-r--r--pygments/lexers/pony.py4
-rw-r--r--pygments/lexers/praat.py16
-rw-r--r--pygments/lexers/prolog.py2
-rw-r--r--pygments/lexers/promql.py183
-rw-r--r--pygments/lexers/python.py24
-rw-r--r--pygments/lexers/qvt.py2
-rw-r--r--pygments/lexers/r.py14
-rw-r--r--pygments/lexers/rdf.py2
-rw-r--r--pygments/lexers/rebol.py2
-rw-r--r--pygments/lexers/resource.py2
-rw-r--r--pygments/lexers/ride.py2
-rw-r--r--pygments/lexers/rnc.py2
-rw-r--r--pygments/lexers/roboconf.py2
-rw-r--r--pygments/lexers/robotframework.py35
-rw-r--r--pygments/lexers/ruby.py24
-rw-r--r--pygments/lexers/rust.py4
-rw-r--r--pygments/lexers/sas.py2
-rw-r--r--pygments/lexers/scdoc.py4
-rw-r--r--pygments/lexers/scripting.py16
-rw-r--r--pygments/lexers/sgf.py2
-rw-r--r--pygments/lexers/shell.py43
-rw-r--r--pygments/lexers/sieve.py2
-rw-r--r--pygments/lexers/slash.py4
-rw-r--r--pygments/lexers/smalltalk.py2
-rw-r--r--pygments/lexers/smv.py2
-rw-r--r--pygments/lexers/snobol.py2
-rw-r--r--pygments/lexers/solidity.py27
-rw-r--r--pygments/lexers/special.py2
-rw-r--r--pygments/lexers/sql.py237
-rw-r--r--pygments/lexers/stata.py14
-rw-r--r--pygments/lexers/supercollider.py2
-rw-r--r--pygments/lexers/tcl.py2
-rw-r--r--pygments/lexers/templates.py130
-rw-r--r--pygments/lexers/teraterm.py18
-rw-r--r--pygments/lexers/testing.py2
-rw-r--r--pygments/lexers/text.py2
-rw-r--r--pygments/lexers/textedit.py2
-rw-r--r--pygments/lexers/textfmts.py35
-rw-r--r--pygments/lexers/theorem.py2
-rw-r--r--pygments/lexers/tnt.py35
-rw-r--r--pygments/lexers/trafficscript.py2
-rw-r--r--pygments/lexers/typoscript.py2
-rw-r--r--pygments/lexers/unicon.py21
-rw-r--r--pygments/lexers/urbi.py2
-rw-r--r--pygments/lexers/usd.py10
-rw-r--r--pygments/lexers/varnish.py2
-rw-r--r--pygments/lexers/verification.py2
-rw-r--r--pygments/lexers/web.py2
-rw-r--r--pygments/lexers/webidl.py10
-rw-r--r--pygments/lexers/webmisc.py2
-rw-r--r--pygments/lexers/whiley.py2
-rw-r--r--pygments/lexers/x10.py2
-rw-r--r--pygments/lexers/xorg.py4
-rw-r--r--pygments/lexers/yang.py12
-rw-r--r--pygments/lexers/zig.py9
-rw-r--r--pygments/modeline.py2
-rw-r--r--pygments/plugin.py2
-rw-r--r--pygments/regexopt.py2
-rw-r--r--pygments/scanner.py2
-rw-r--r--pygments/sphinxext.py2
-rw-r--r--pygments/style.py2
-rw-r--r--pygments/styles/__init__.py5
-rw-r--r--pygments/styles/abap.py2
-rw-r--r--pygments/styles/algol.py2
-rw-r--r--pygments/styles/algol_nu.py2
-rw-r--r--pygments/styles/arduino.py2
-rw-r--r--pygments/styles/autumn.py2
-rw-r--r--pygments/styles/borland.py2
-rw-r--r--pygments/styles/bw.py2
-rw-r--r--pygments/styles/colorful.py2
-rw-r--r--pygments/styles/default.py2
-rw-r--r--pygments/styles/emacs.py2
-rw-r--r--pygments/styles/friendly.py2
-rw-r--r--pygments/styles/fruity.py2
-rw-r--r--pygments/styles/igor.py2
-rw-r--r--pygments/styles/inkpot.py2
-rw-r--r--pygments/styles/lovelace.py2
-rw-r--r--pygments/styles/manni.py2
-rw-r--r--pygments/styles/monokai.py2
-rw-r--r--pygments/styles/murphy.py2
-rw-r--r--pygments/styles/native.py2
-rw-r--r--pygments/styles/paraiso_dark.py2
-rw-r--r--pygments/styles/paraiso_light.py2
-rw-r--r--pygments/styles/pastie.py2
-rw-r--r--pygments/styles/perldoc.py2
-rw-r--r--pygments/styles/rainbow_dash.py2
-rw-r--r--pygments/styles/rrt.py2
-rw-r--r--pygments/styles/sas.py2
-rw-r--r--pygments/styles/solarized.py2
-rw-r--r--pygments/styles/stata_dark.py2
-rw-r--r--pygments/styles/stata_light.py2
-rw-r--r--pygments/styles/tango.py2
-rw-r--r--pygments/styles/trac.py2
-rw-r--r--pygments/styles/vim.py2
-rw-r--r--pygments/styles/vs.py2
-rw-r--r--pygments/styles/xcode.py2
-rw-r--r--pygments/token.py2
-rw-r--r--pygments/unistring.py2
-rw-r--r--pygments/util.py2
-rwxr-xr-xscripts/check_sources.py4
-rwxr-xr-xscripts/debug_lexer.py8
-rw-r--r--scripts/get_vimkw.py2
-rwxr-xr-xsetup.py2
-rw-r--r--tests/__init__.py2
-rw-r--r--tests/examplefiles/bare.bare43
-rw-r--r--tests/examplefiles/example.promql8
-rw-r--r--tests/examplefiles/example.ptls30
-rw-r--r--tests/examplefiles/fennelview.fnl134
-rw-r--r--tests/examplefiles/mysql.txt132
-rw-r--r--tests/examplefiles/psysh_test.psysh47
-rw-r--r--tests/examplefiles/test.sco2
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_0_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_0_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_3_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_3_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_0_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_0_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_3_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_3_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_0_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_0_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_3_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_3_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_0_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_0_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_3_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_3_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_0_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_0_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_3_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_3_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_0_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_0_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_3_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_3_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_0_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_0_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_3_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_3_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_0_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_0_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_3_anchor.html10
-rw-r--r--tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_3_noanchor.html10
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_1_start_1_special_0_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_1_start_1_special_0_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_1_start_1_special_3_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_1_start_1_special_3_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_1_start_8_special_0_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_1_start_8_special_0_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_1_start_8_special_3_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_1_start_8_special_3_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_2_start_1_special_0_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_2_start_1_special_0_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_2_start_1_special_3_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_2_start_1_special_3_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_2_start_8_special_0_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_2_start_8_special_0_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_2_start_8_special_3_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_cls_step_2_start_8_special_3_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_0_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_0_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_3_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_3_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_0_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_0_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_3_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_3_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_0_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_0_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_3_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_3_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_0_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_0_noanchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_3_anchor.html38
-rw-r--r--tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_3_noanchor.html38
-rw-r--r--tests/test_apache_conf.py2
-rw-r--r--tests/test_asm.py2
-rw-r--r--tests/test_basic.py2
-rw-r--r--tests/test_basic_api.py2
-rw-r--r--tests/test_bibtex.py2
-rw-r--r--tests/test_cfm.py2
-rw-r--r--tests/test_clexer.py2
-rw-r--r--tests/test_cmdline.py2
-rw-r--r--tests/test_coffeescript.py85
-rw-r--r--tests/test_crystal.py2
-rw-r--r--tests/test_csound.py2
-rw-r--r--tests/test_data.py28
-rw-r--r--tests/test_examplefiles.py2
-rw-r--r--tests/test_grammar_notation.py2
-rw-r--r--tests/test_haskell.py2
-rw-r--r--tests/test_hdl.py2
-rw-r--r--tests/test_html_formatter.py2
-rw-r--r--tests/test_inherit.py2
-rw-r--r--tests/test_irc_formatter.py2
-rw-r--r--tests/test_java.py2
-rw-r--r--tests/test_javascript.py141
-rw-r--r--tests/test_julia.py2
-rw-r--r--tests/test_kotlin.py2
-rw-r--r--tests/test_latex_formatter.py2
-rw-r--r--tests/test_lexers_other.py2
-rw-r--r--tests/test_make.py29
-rw-r--r--tests/test_markdown_lexer.py2
-rw-r--r--tests/test_matlab.py1
-rw-r--r--tests/test_modeline.py2
-rw-r--r--tests/test_mysql.py249
-rw-r--r--tests/test_objectiveclexer.py2
-rw-r--r--tests/test_perllexer.py2
-rw-r--r--tests/test_php.py2
-rw-r--r--tests/test_praat.py2
-rw-r--r--tests/test_promql.py310
-rw-r--r--tests/test_properties.py2
-rw-r--r--tests/test_python.py2
-rw-r--r--tests/test_qbasiclexer.py2
-rw-r--r--tests/test_r.py2
-rw-r--r--tests/test_regexlexer.py2
-rw-r--r--tests/test_regexopt.py2
-rw-r--r--tests/test_rtf_formatter.py2
-rw-r--r--tests/test_ruby.py2
-rw-r--r--tests/test_shell.py20
-rw-r--r--tests/test_smarty.py2
-rw-r--r--tests/test_sql.py2
-rw-r--r--tests/test_terminal_formatter.py2
-rw-r--r--tests/test_textfmts.py2
-rw-r--r--tests/test_token.py2
-rw-r--r--tests/test_unistring.py2
-rw-r--r--tests/test_using_api.py2
-rw-r--r--tests/test_util.py2
-rw-r--r--tests/test_whiley.py2
-rw-r--r--tox.ini2
383 files changed, 4823 insertions, 2049 deletions
diff --git a/.github/actions/pyodide-package/action.yml b/.github/actions/pyodide-package/action.yml
new file mode 100644
index 00000000..2187cd9f
--- /dev/null
+++ b/.github/actions/pyodide-package/action.yml
@@ -0,0 +1,5 @@
+name: 'Update Pyodide package'
+description: 'Update the WASM compiled Pygments with Pyodide'
+runs:
+ using: 'docker'
+ image: 'birkenfeld/pyodide-pygments-builder'
diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml
index 1eba408a..ff131aa9 100644
--- a/.github/workflows/build.yaml
+++ b/.github/workflows/build.yaml
@@ -4,7 +4,6 @@ on: [push, pull_request]
jobs:
build:
-
runs-on: ${{ matrix.os }}
strategy:
matrix:
@@ -29,3 +28,15 @@ jobs:
- name: Test package
run: py.test
if: runner.os == 'Windows' && ! contains(matrix['python-version'], 'pypy')
+
+ lint:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ with:
+ python-version: 3.8
+ - name: Check out regexlint
+ run: git clone https://github.com/pygments/regexlint
+ - name: Run regexlint
+ run: make regexlint REGEXLINT=`pwd`/regexlint
diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
index 16d1f58f..6a4bb730 100644
--- a/.github/workflows/docs.yaml
+++ b/.github/workflows/docs.yaml
@@ -15,15 +15,15 @@ jobs:
python-version: 3.7
- name: Checkout Pygments
uses: actions/checkout@v1
- - name: Checkout Pyodide
- run: git clone https://github.com/pygments/pyodide-artifacts
- - name: Sphinx install
+ - name: Install Sphinx
run: pip install Sphinx
+ - name: Create Pyodide WASM package
+ uses: ./.github/actions/pyodide-package
- name: Sphinx build
run: |
cd doc
WEBSITE_BUILD=1 make dirhtml
- cp -a ../pyodide-artifacts/pyodide _build/dirhtml/_static
+ cp -a ../pyodide _build/dirhtml/_static
touch _build/dirhtml/.nojekyll
echo -e 'pygments.org\nwww.pygments.org' > _build/dirhtml/CNAME
echo 'Automated deployment of docs for GitHub pages.' > _build/dirhtml/README
diff --git a/AUTHORS b/AUTHORS
index 5f234f66..5058c612 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -35,7 +35,7 @@ Other contributors, listed alphabetically, are:
* Stéphane Blondon -- SGF and Sieve lexers
* Frits van Bommel -- assembler lexers
* Pierre Bourdon -- bugfixes
-* Martijn Braam -- Kernel log lexer
+* Martijn Braam -- Kernel log lexer, BARE lexer
* Matthias Bussonnier -- ANSI style handling for terminal-256 formatter
* chebee7i -- Python traceback lexer improvements
* Hiram Chirino -- Scaml and Jade lexers
@@ -138,7 +138,7 @@ Other contributors, listed alphabetically, are:
* Stephen McKamey -- Duel/JBST lexer
* Brian McKenna -- F# lexer
* Charles McLaughlin -- Puppet lexer
-* Kurt McKee -- Tera Term macro lexer
+* Kurt McKee -- Tera Term macro lexer, PostgreSQL updates, MySQL overhaul
* Lukas Meuser -- BBCode formatter, Lua lexer
* Cat Miller -- Pig lexer
* Paul Miller -- LiveScript lexer
@@ -155,6 +155,7 @@ Other contributors, listed alphabetically, are:
* Nam T. Nguyen -- Monokai style
* Jesper Noehr -- HTML formatter "anchorlinenos"
* Mike Nolta -- Julia lexer
+* Avery Nortonsmith -- Pointless lexer
* Jonas Obrist -- BBCode lexer
* Edward O'Callaghan -- Cryptol lexer
* David Oliva -- Rebol lexer
@@ -191,6 +192,7 @@ Other contributors, listed alphabetically, are:
* René Schwaiger -- Rainbow Dash style
* Sebastian Schweizer -- Whiley lexer
* Tassilo Schweyer -- Io, MOOCode lexers
+* Pablo Seminario -- PromQL lexer
* Ted Shaw -- AutoIt lexer
* Joerg Sieker -- ABAP lexer
* Robert Simmons -- Standard ML lexer
diff --git a/CHANGES b/CHANGES
index 659bdd86..ab7ae23f 100644
--- a/CHANGES
+++ b/CHANGES
@@ -13,9 +13,13 @@ Version 2.7.0
- Added lexers:
+ * Arrow (PR#1481, PR#1499)
+ * BARE (PR#1488)
* Devicetree (PR#1434)
* F* (PR#1409)
* GDScript (PR#1457)
+ * Pointless (PR#1494)
+ * PromQL (PR#1506)
* Singularity (PR#1285)
* TiddlyWiki5 (PR#1390)
* TNT (PR#1414)
@@ -23,23 +27,40 @@ Version 2.7.0
- Updated lexers:
+ * APL (PR#1503)
* C++ (PR#1350, which also fixes: #1222, #996, #906, #828, #1162, #1166,
#1396)
* Chapel (PR#1423)
+ * CMake (#1491)
+ * CSound (#1509)
+ * Cython (PR#1507)
* Dart (PR#1449)
* Fortran (PR#1442)
- * HTTP (PR#1432)
+ * GAS (PR#1530)
+ * HTTP (PR#1432, #1520, PR#1521)
* Inform 6 (PR#1461)
+ * JSON (#1065, PR#1528)
* Lean (PR#1415)
* Matlab (PR#1399)
+ * Markdown (#1492, PR#1495)
* NASM (PR#1465)
* Nim (PR#1426)
+ * PostgreSQL (PR#1513)
+ * PowerShell (PR#1497, PR#1398)
+ * Protobuf (PR#1505)
+ * Robot (PR#1480)
* SQL (PR#1402)
- * SystemVerilog (PR#1436, PR#1452, PR#1454, PR#1460, PR#1462, PR#1463, PR#1464)
+ * SystemVerilog (PR#1436, PR#1452, PR#1454, PR#1460, PR#1462, PR#1463, PR#1464, #1496, PR#1504)
* TeraTerm (PR#1337)
+ * XML (#1502)
- Added a new filter for math symbols (PR#1406)
- The Kconfig lexer will match Kconfig derivative names now (PR#1458)
+- Improved HTML formatter output (PR#1500)
+- ``.markdown`` is now recognized as an extension for Markdown files (PR#1476)
+- Fixed line number colors for Solarized (PR#1477, #1356)
+- Improvements to exception handling (PR#1478)
+
Version 2.6.1
-------------
@@ -47,6 +68,7 @@ Version 2.6.1
- This release fixes a packaging issue. No functional changes.
+
Version 2.6
-----------
(released March 8, 2020)
@@ -77,7 +99,7 @@ Version 2.6
* Perl6/Raku lexer (PR#1344)
* Python3 (PR#1382, PR#1385)
* Rust: Updated lexer to cover more builtins (mostly macros) and miscellaneous
- new syntax (PR#1320)
+ new syntax (PR#1320)
* SQL: Add temporal support keywords (PR#1402)
- The 256-color/true-color terminal formatters now support the italic attribute
@@ -96,6 +118,7 @@ Version 2.6
- Improve font search on Windows (#1247)
- Remove unused script block (#1401)
+
Version 2.5.2
-------------
(released November 29, 2019)
diff --git a/LICENSE b/LICENSE
index 13d1c74b..085810ec 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,4 +1,4 @@
-Copyright (c) 2006-2019 by the respective authors (see AUTHORS file).
+Copyright (c) 2006-2020 by the respective authors (see AUTHORS file).
All rights reserved.
Redistribution and use in source and binary forms, with or without
diff --git a/Makefile b/Makefile
index 7b0ef585..7de5f87a 100644
--- a/Makefile
+++ b/Makefile
@@ -4,7 +4,7 @@
#
# Combines scripts for common tasks.
#
-# :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+# :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
# :license: BSD, see LICENSE for details.
#
@@ -13,7 +13,8 @@ PYTHON ?= python3
export PYTHONPATH = $(shell echo "$$PYTHONPATH"):$(shell python -c 'import os; print ":".join(os.path.abspath(line.strip()) for line in file("PYTHONPATH"))' 2>/dev/null)
.PHONY: all check clean clean-pyc codetags docs mapfiles \
- pylint reindent test test-coverage
+ pylint reindent test test-coverage test-examplefiles \
+ tox-test tox-test-coverage regexlint
all: clean-pyc check test
diff --git a/doc/_templates/demo.html b/doc/_templates/demo.html
index bc788d1a..6a60df7d 100644
--- a/doc/_templates/demo.html
+++ b/doc/_templates/demo.html
@@ -16,7 +16,7 @@
<h1>Demo - Try it out!</h1>
<p>The highlighting here is performed in-browser using
- a WebAssembly translation of Pygments, courtesy of
+ a WebAssembly translation of the latest Pygments master branch, courtesy of
<a href="https://github.com/iodide-project/pyodide">Pyodide</a>.</p>
<p>Your content is neither sent over the web nor stored anywhere.</p>
diff --git a/doc/_templates/indexsidebar.html b/doc/_templates/indexsidebar.html
index 5f7ecf92..96d359d5 100644
--- a/doc/_templates/indexsidebar.html
+++ b/doc/_templates/indexsidebar.html
@@ -20,5 +20,5 @@
<p>You can also open an issue at the
<a href="https://github.com/pygments/pygments/issues">tracker</a>.</p>
-<p class="logo">A <a href="https://pocoo.org/">
+<p class="logo">A <a href="https://www.pocoo.org/">
<img src="{{ pathto("_static/pocoo.png", 1) }}" /></a> project</a></p>
diff --git a/doc/_themes/pygments14/layout.html b/doc/_themes/pygments14/layout.html
index 3e04665d..909fbf9e 100644
--- a/doc/_themes/pygments14/layout.html
+++ b/doc/_themes/pygments14/layout.html
@@ -82,7 +82,7 @@
{% block footer %}
<div class="footer" role="contentinfo">
- &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+ &copy; Copyright 2006-2020, Georg Brandl and Pygments contributors.
Created using <a href="https://sphinx-doc.org/">Sphinx</a> {{
sphinx_version }}. <br/>
Pygments logo created by <a href="https://joelunger.com">Joel Unger</a>.
diff --git a/doc/_themes/pygments14/static/pygments14.css_t b/doc/_themes/pygments14/static/pygments14.css_t
index 72ca942e..8f6e60c7 100644
--- a/doc/_themes/pygments14/static/pygments14.css_t
+++ b/doc/_themes/pygments14/static/pygments14.css_t
@@ -4,7 +4,7 @@
*
* Sphinx stylesheet -- pygments14 theme. Heavily copied from sphinx13.
*
- * :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ * :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
* :license: BSD, see LICENSE for details.
*
*/
diff --git a/doc/conf.py b/doc/conf.py
index 3ab5c2e2..9e896890 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -35,7 +35,7 @@ master_doc = 'index'
# General information about the project.
project = u'Pygments'
-copyright = u'2006-2019, Georg Brandl and Pygments contributors'
+copyright = u'2006-2020, Georg Brandl and Pygments contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
diff --git a/doc/docs/lexerdevelopment.rst b/doc/docs/lexerdevelopment.rst
index c776457b..824e0c59 100644
--- a/doc/docs/lexerdevelopment.rst
+++ b/doc/docs/lexerdevelopment.rst
@@ -20,7 +20,6 @@ containing tuples in the form ``(index, token, value)``. Normally you don't
need to do this since there are base lexers that do most of the work and that
you can subclass.
-
RegexLexer
==========
@@ -101,18 +100,21 @@ First, change the name of your lexer class to CustomLexer:
class CustomLexer(RegexLexer):
"""All your lexer code goes here!"""
-Then you can load the lexer from the command line with the additional
+Then you can load and test the lexer from the command line with the additional
flag ``-x``:
.. code-block:: console
- $ python -m pygments -l your_lexer_file.py -x
+ $ python -m pygments -x -l your_lexer_file.py <inputfile>
To specify a class name other than CustomLexer, append it with a colon:
.. code-block:: console
- $ python -m pygments -l your_lexer.py:SomeLexer -x
+ $ python -m pygments -x -l your_lexer.py:SomeLexer <inputfile>
+
+Use the ``-f`` flag to select a different output format than terminal
+escape sequences.
Or, using the Python API:
@@ -145,6 +147,11 @@ cloned from GitHub.
Select a matching module under ``pygments/lexers``, or create a new module for
your lexer class.
+.. note::
+
+ We encourage you to put your lexer class into its own module, unless it's a
+ very small derivative of an already existing lexer.
+
Next, make sure the lexer is known from outside of the module. All modules in
the ``pygments.lexers`` package specify ``__all__``. For example,
``esoteric.py`` sets::
@@ -556,7 +563,7 @@ appropriate positions. ::
class HtmlPhpLexer(DelegatingLexer):
def __init__(self, **options):
- super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options)
+ super().__init__(HtmlLexer, PhpLexer, **options)
This procedure ensures that e.g. HTML with template tags in it is highlighted
correctly even if the template tags are put into HTML tags or attributes.
diff --git a/doc/languages.rst b/doc/languages.rst
index ecb9d460..34cb95e2 100644
--- a/doc/languages.rst
+++ b/doc/languages.rst
@@ -21,6 +21,7 @@ Programming languages
* `Augeas <https://augeas.net/>`_
* `AutoIt <https://www.autoitscript.com/site/autoit/>`_
* `Awk <https://en.wikipedia.org/wiki/AWK>`_
+* `BARE <https://baremessages.org/>`_
* `BBC Basic <http://www.bbcbasic.co.uk/bbcbasic.html>`_
* `Befunge <https://github.com/catseye/Befunge-93>`_
* `BlitzBasic <https://en.wikipedia.org/wiki/Blitz_BASIC>`_
@@ -127,6 +128,7 @@ Programming languages
* `PHP <https://www.php.net/>`_
* `Perl 5 <https://perl.org>`_
* `Pike <https://pike.lysator.liu.se/>`_
+* `Pointless <https://ptls.dev/>`_
* `Pony <https://www.ponylang.io/>`_
* `PovRay <http://www.povray.org/>`_
* `PostScript <https://en.wikipedia.org/wiki/PostScript>`_
@@ -257,6 +259,7 @@ Other markup
* Notmuch
* `PEG <https://bford.info/packrat/>`_
* POV-Ray scenes
+* `PromQL <https://prometheus.io/docs/prometheus/latest/querying/basics/>`_
* `Puppet <https://puppet.com/>`_
* QML
* Ragel
@@ -293,6 +296,54 @@ Other markup
* YANG
* Windows Registry files
+
+Interactive terminal/shell sessions
+-----------------------------------
+
+To highlight an interactive terminal or shell session, prefix your code snippet
+with a specially formatted prompt.
+
+Supported shells with examples are shown below. In each example, prompt parts in
+brackets ``[any]`` represent optional parts of the prompt, and prompt parts
+without brackets or in parenthesis ``(any)`` represent required parts of the
+prompt.
+
+* **Bash Session** (console, shell-session):
+
+ .. code-block:: console
+
+ [any@any]$ ls -lh
+ [any@any]# ls -lh
+ [any@any]% ls -lh
+ $ ls -lh
+ # ls -lh
+ % ls -lh
+ > ls -lh
+
+* **MSDOS Session** (doscon):
+
+ .. code-block:: doscon
+
+ [any]> dir
+ > dir
+ More? dir
+
+* **Tcsh Session** (tcshcon):
+
+ .. code-block:: tcshcon
+
+ (any)> ls -lh
+ ? ls -lh
+
+* **PowerShell Session** (ps1con):
+
+ .. code-block:: ps1con
+
+ PS[any]> Get-ChildItem
+ PS> Get-ChildItem
+ >> Get-ChildItem
+
+
... that's all?
---------------
diff --git a/external/markdown-processor.py b/external/markdown-processor.py
index 3a7bca4c..ffd538df 100644
--- a/external/markdown-processor.py
+++ b/external/markdown-processor.py
@@ -22,7 +22,7 @@
.. _Markdown: https://pypi.python.org/pypi/Markdown
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/external/moin-parser.py b/external/moin-parser.py
index 4e74447f..5600fea4 100644
--- a/external/moin-parser.py
+++ b/external/moin-parser.py
@@ -31,7 +31,7 @@
If you do not want to do that and are willing to accept larger HTML
output, you can set the INLINESTYLES option below to True.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/external/rst-directive.py b/external/rst-directive.py
index 6f3173c4..a381ce43 100644
--- a/external/rst-directive.py
+++ b/external/rst-directive.py
@@ -31,7 +31,7 @@
.. _directive documentation:
https://docutils.sourceforge.io/docs/howto/rst-directives.html
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/__init__.py b/pygments/__init__.py
index 8525a0e8..8620c733 100644
--- a/pygments/__init__.py
+++ b/pygments/__init__.py
@@ -22,7 +22,7 @@
.. _Pygments master branch:
https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
diff --git a/pygments/__main__.py b/pygments/__main__.py
index af231b3d..8d2ea9e3 100644
--- a/pygments/__main__.py
+++ b/pygments/__main__.py
@@ -5,7 +5,7 @@
Main entry point for ``python -m pygments``.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/cmdline.py b/pygments/cmdline.py
index 4df35230..457af34d 100644
--- a/pygments/cmdline.py
+++ b/pygments/cmdline.py
@@ -5,7 +5,7 @@
Command line interface.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -232,7 +232,7 @@ def main_inner(popts, args, usage):
return 0
if opts.pop('-V', None) is not None:
- print('Pygments version %s, (c) 2006-2019 by Georg Brandl.' % __version__)
+ print('Pygments version %s, (c) 2006-2020 by Georg Brandl.' % __version__)
return 0
# handle ``pygmentize -L``
@@ -515,7 +515,11 @@ def main_inner(popts, args, usage):
# ... and do it!
if '-s' not in opts:
# process whole input as per normal...
- highlight(code, lexer, fmter, outfile)
+ try:
+ highlight(code, lexer, fmter, outfile)
+ finally:
+ if outfn:
+ outfile.close()
return 0
else:
# line by line processing of stdin (eg: for 'tail -f')...
@@ -532,6 +536,9 @@ def main_inner(popts, args, usage):
return 0
except KeyboardInterrupt: # pragma: no cover
return 0
+ finally:
+ if outfn:
+ outfile.close()
def main(args=sys.argv):
diff --git a/pygments/console.py b/pygments/console.py
index a05b256e..5fdc2cf7 100644
--- a/pygments/console.py
+++ b/pygments/console.py
@@ -5,7 +5,7 @@
Format colored console output.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/filter.py b/pygments/filter.py
index faa18bfc..d6a452db 100644
--- a/pygments/filter.py
+++ b/pygments/filter.py
@@ -5,7 +5,7 @@
Module that implements the default filter.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,8 +17,7 @@ def apply_filters(stream, filters, lexer=None):
filter, otherwise the filter receives `None`.
"""
def _apply(filter_, stream):
- for token in filter_.filter(lexer, stream):
- yield token
+ yield from filter_.filter(lexer, stream)
for filter_ in filters:
stream = _apply(filter_, stream)
return stream
@@ -70,5 +69,4 @@ class FunctionFilter(Filter):
def filter(self, lexer, stream):
# pylint: disable=not-callable
- for ttype, value in self.function(lexer, stream, self.options):
- yield ttype, value
+ yield from self.function(lexer, stream, self.options)
diff --git a/pygments/filters/__init__.py b/pygments/filters/__init__.py
index d8b62b4e..b57c64d3 100644
--- a/pygments/filters/__init__.py
+++ b/pygments/filters/__init__.py
@@ -6,7 +6,7 @@
Module containing filter lookup functions and default
filters.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -45,8 +45,7 @@ def get_filter_by_name(filtername, **options):
def get_all_filters():
"""Return a generator of all filter names."""
- for name in FILTERS:
- yield name
+ yield from FILTERS
for name, _ in find_plugin_filters():
yield name
@@ -88,9 +87,7 @@ class CodeTagFilter(Filter):
if ttype in String.Doc or \
ttype in Comment and \
ttype not in Comment.Preproc:
- for sttype, svalue in _replace_special(ttype, value, regex,
- Comment.Special):
- yield sttype, svalue
+ yield from _replace_special(ttype, value, regex, Comment.Special)
else:
yield ttype, value
@@ -851,9 +848,8 @@ class VisibleWhitespaceFilter(Filter):
return wschar
for ttype, value in stream:
- for sttype, svalue in _replace_special(ttype, value, regex,
- Whitespace, replacefunc):
- yield sttype, svalue
+ yield from _replace_special(ttype, value, regex, Whitespace,
+ replacefunc)
else:
spaces, tabs, newlines = self.spaces, self.tabs, self.newlines
# simpler processing
diff --git a/pygments/formatter.py b/pygments/formatter.py
index 60531523..d5da54f1 100644
--- a/pygments/formatter.py
+++ b/pygments/formatter.py
@@ -5,7 +5,7 @@
Base formatter class.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/__init__.py b/pygments/formatters/__init__.py
index 9ff30643..ce11cf5b 100644
--- a/pygments/formatters/__init__.py
+++ b/pygments/formatters/__init__.py
@@ -5,7 +5,7 @@
Pygments formatters.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py
index 48703694..984c1ec8 100755
--- a/pygments/formatters/_mapping.py
+++ b/pygments/formatters/_mapping.py
@@ -9,7 +9,7 @@
Do not alter the FORMATTERS dictionary by hand.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/bbcode.py b/pygments/formatters/bbcode.py
index 784aee3a..dd949666 100644
--- a/pygments/formatters/bbcode.py
+++ b/pygments/formatters/bbcode.py
@@ -5,7 +5,7 @@
BBcode formatter.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/html.py b/pygments/formatters/html.py
index c3100967..421710f7 100644
--- a/pygments/formatters/html.py
+++ b/pygments/formatters/html.py
@@ -5,7 +5,7 @@
Formatter for HTML output.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -62,7 +62,7 @@ def _get_ttype_class(ttype):
CSSFILE_TEMPLATE = '''\
/*
generated by Pygments <https://pygments.org/>
-Copyright 2006-2019 by the Pygments team.
+Copyright 2006-2020 by the Pygments team.
Licensed under the BSD license, see LICENSE for details.
*/
%(styledefs)s
@@ -73,7 +73,7 @@ DOC_HEADER = '''\
"http://www.w3.org/TR/html4/strict.dtd">
<!--
generated by Pygments <https://pygments.org/>
-Copyright 2006-2019 by the Pygments team.
+Copyright 2006-2020 by the Pygments team.
Licensed under the BSD license, see LICENSE for details.
-->
<html>
@@ -633,8 +633,7 @@ class HtmlFormatter(Formatter):
styledefs=self.get_style_defs('body'),
encoding=self.encoding))
- for t, line in inner:
- yield t, line
+ yield from inner
yield 0, DOC_FOOTER
def _wrap_tablelinenos(self, inner):
@@ -677,7 +676,8 @@ class HtmlFormatter(Formatter):
else:
style = ''
- line = '<pre%s>%s</pre>' % (style, line)
+ if style:
+ line = '<span%s>%s</span>' % (style, line)
lines.append(line)
@@ -688,8 +688,8 @@ class HtmlFormatter(Formatter):
# some configurations seem to mess up the formatting...
yield 0, (
'<table class="%stable">' % self.cssclass +
- '<tr><td class="linenos"><div class="linenodiv">' +
- ls + '</div></td><td class="code">'
+ '<tr><td class="linenos"><div class="linenodiv"><pre>' +
+ ls + '</pre></div></td><td class="code">'
)
yield 0, dummyoutfile.getvalue()
yield 0, '</td></tr></table>'
@@ -723,7 +723,10 @@ class HtmlFormatter(Formatter):
else:
style = ' class="linenos"'
- yield 1, '<span%s>%s</span>' % (style, line) + inner_line
+ if style:
+ yield 1, '<span%s>%s</span>' % (style, line) + inner_line
+ else:
+ yield 1, line + inner_line
num += 1
def _wrap_lineanchors(self, inner):
@@ -758,8 +761,7 @@ class HtmlFormatter(Formatter):
yield 0, ('<div' + (self.cssclass and ' class="%s"' % self.cssclass) +
(style and (' style="%s"' % style)) + '>')
- for tup in inner:
- yield tup
+ yield from inner
yield 0, '</div>\n'
def _wrap_pre(self, inner):
@@ -776,14 +778,12 @@ class HtmlFormatter(Formatter):
# the empty span here is to keep leading empty lines from being
# ignored by HTML parsers
yield 0, ('<pre' + (style and ' style="%s"' % style) + '><span></span>')
- for tup in inner:
- yield tup
+ yield from inner
yield 0, '</pre>'
def _wrap_code(self, inner):
yield 0, '<code>'
- for tup in inner:
- yield tup
+ yield from inner
yield 0, '</code>'
def _format_lines(self, tokensource):
diff --git a/pygments/formatters/img.py b/pygments/formatters/img.py
index 14207a3d..aa4cf35e 100644
--- a/pygments/formatters/img.py
+++ b/pygments/formatters/img.py
@@ -5,7 +5,7 @@
Formatter for Pixmap output.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/irc.py b/pygments/formatters/irc.py
index 0650492a..c54df867 100644
--- a/pygments/formatters/irc.py
+++ b/pygments/formatters/irc.py
@@ -5,7 +5,7 @@
Formatter for IRC output
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/latex.py b/pygments/formatters/latex.py
index d5f80ef8..9e23740d 100644
--- a/pygments/formatters/latex.py
+++ b/pygments/formatters/latex.py
@@ -5,7 +5,7 @@
Formatter for LaTeX fancyvrb output.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -451,8 +451,7 @@ class LatexEmbeddedLexer(Lexer):
for i, t, v in self.lang.get_tokens_unprocessed(text):
if t in Token.Comment or t in Token.String:
if buf:
- for x in self.get_tokens_aux(idx, buf):
- yield x
+ yield from self.get_tokens_aux(idx, buf)
buf = ''
yield i, t, v
else:
@@ -460,8 +459,7 @@ class LatexEmbeddedLexer(Lexer):
idx = i
buf += v
if buf:
- for x in self.get_tokens_aux(idx, buf):
- yield x
+ yield from self.get_tokens_aux(idx, buf)
def get_tokens_aux(self, index, text):
while text:
diff --git a/pygments/formatters/other.py b/pygments/formatters/other.py
index c09eff0c..9f8bab4b 100644
--- a/pygments/formatters/other.py
+++ b/pygments/formatters/other.py
@@ -5,7 +5,7 @@
Other formatters: NullFormatter, RawTokenFormatter.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/rtf.py b/pygments/formatters/rtf.py
index 1246db2a..7f51bacd 100644
--- a/pygments/formatters/rtf.py
+++ b/pygments/formatters/rtf.py
@@ -5,7 +5,7 @@
A formatter that generates RTF files.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/svg.py b/pygments/formatters/svg.py
index fb75e494..d7f1d570 100644
--- a/pygments/formatters/svg.py
+++ b/pygments/formatters/svg.py
@@ -5,7 +5,7 @@
Formatter for SVG output.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/terminal.py b/pygments/formatters/terminal.py
index f0f3d7ae..747dfc38 100644
--- a/pygments/formatters/terminal.py
+++ b/pygments/formatters/terminal.py
@@ -5,7 +5,7 @@
Formatter for terminal output with ANSI sequences.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/terminal256.py b/pygments/formatters/terminal256.py
index 30e6e1e2..356d1f5b 100644
--- a/pygments/formatters/terminal256.py
+++ b/pygments/formatters/terminal256.py
@@ -11,7 +11,7 @@
Formatter version 1.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexer.py b/pygments/lexer.py
index 41d74b19..3f0df88e 100644
--- a/pygments/lexer.py
+++ b/pygments/lexer.py
@@ -5,7 +5,7 @@
Base lexer classes.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -628,8 +628,7 @@ class RegexLexer(Lexer, metaclass=RegexLexerMeta):
if type(action) is _TokenType:
yield pos, action, m.group()
else:
- for item in action(self, m):
- yield item
+ yield from action(self, m)
pos = m.end()
if new_state is not None:
# state transition
@@ -716,8 +715,7 @@ class ExtendedRegexLexer(RegexLexer):
yield ctx.pos, action, m.group()
ctx.pos = m.end()
else:
- for item in action(self, m, ctx):
- yield item
+ yield from action(self, m, ctx)
if not new_state:
# altered the state stack?
statetokens = tokendefs[ctx.stack[-1]]
@@ -781,8 +779,7 @@ def do_insertions(insertions, tokens):
index, itokens = next(insertions)
except StopIteration:
# no insertions
- for item in tokens:
- yield item
+ yield from tokens
return
realpos = None
@@ -856,8 +853,7 @@ class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta):
def get_tokens_unprocessed(self, text, stack=('root',)):
# this needs to be a stack, since using(this) will produce nested calls
self.__class__._prof_data.append({})
- for tok in RegexLexer.get_tokens_unprocessed(self, text, stack):
- yield tok
+ yield from RegexLexer.get_tokens_unprocessed(self, text, stack)
rawdata = self.__class__._prof_data.pop()
data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65],
n, 1000 * t, 1000 * t / n)
diff --git a/pygments/lexers/__init__.py b/pygments/lexers/__init__.py
index a5691171..12e6ab19 100644
--- a/pygments/lexers/__init__.py
+++ b/pygments/lexers/__init__.py
@@ -5,7 +5,7 @@
Pygments lexers.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -235,8 +235,7 @@ def _iter_lexerclasses(plugins=True):
_load_lexers(module_name)
yield _lexer_cache[name]
if plugins:
- for lexer in find_plugin_lexers():
- yield lexer
+ yield from find_plugin_lexers()
def guess_lexer_for_filename(_fn, _text, **options):
diff --git a/pygments/lexers/_asy_builtins.py b/pygments/lexers/_asy_builtins.py
index b76c22ab..51ca5068 100644
--- a/pygments/lexers/_asy_builtins.py
+++ b/pygments/lexers/_asy_builtins.py
@@ -10,7 +10,7 @@
TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
for function and variable names.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_cl_builtins.py b/pygments/lexers/_cl_builtins.py
index 7722e81f..d09ae0fe 100644
--- a/pygments/lexers/_cl_builtins.py
+++ b/pygments/lexers/_cl_builtins.py
@@ -5,7 +5,7 @@
ANSI Common Lisp builtins.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_cocoa_builtins.py b/pygments/lexers/_cocoa_builtins.py
index d5e7680c..cbd26d9d 100644
--- a/pygments/lexers/_cocoa_builtins.py
+++ b/pygments/lexers/_cocoa_builtins.py
@@ -8,7 +8,7 @@
File may be also used as standalone generator for aboves.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_csound_builtins.py b/pygments/lexers/_csound_builtins.py
index e4f8fc7e..98c5a3a9 100644
--- a/pygments/lexers/_csound_builtins.py
+++ b/pygments/lexers/_csound_builtins.py
@@ -3,7 +3,7 @@
pygments.lexers._csound_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -200,6 +200,7 @@ adsyn
adsynt
adsynt2
aftouch
+allpole
alpass
alwayson
ampdb
@@ -207,6 +208,10 @@ ampdbfs
ampmidi
ampmidicurve
ampmidid
+apoleparams
+arduinoRead
+arduinoStart
+arduinoStop
areson
aresonk
atone
@@ -229,6 +234,7 @@ binit
biquad
biquada
birnd
+bob
bpf
bpfcos
bqrez
@@ -286,6 +292,11 @@ clockoff
clockon
cmp
cmplxprod
+cntCreate
+cntCycles
+cntRead
+cntReset
+cntState
comb
combinv
compilecsd
@@ -305,6 +316,8 @@ cosinv
cosseg
cossegb
cossegr
+count
+count_i
cps2pch
cpsmidi
cpsmidib
@@ -492,7 +505,9 @@ ftresizei
ftsamplebank
ftsave
ftsavek
+ftset
ftslice
+ftslicei
ftsr
gain
gainslider
@@ -757,6 +772,8 @@ la_k_upper_solve_mc
la_k_upper_solve_mr
la_k_vc_set
la_k_vr_set
+lag
+lagud
lastcycle
lenarray
lfo
@@ -803,6 +820,8 @@ loscilx
lowpass2
lowres
lowresx
+lpcanal
+lpcfilter
lpf18
lpform
lpfreson
@@ -826,6 +845,7 @@ lua_ikopcall_off
lua_iopcall
lua_iopcall_off
lua_opdef
+lufs
mac
maca
madsr
@@ -1053,12 +1073,11 @@ printk
printk2
printks
printks2
+println
prints
+printsk
product
pset
-ptable
-ptable3
-ptablei
ptablew
ptrack
puts
@@ -1075,6 +1094,7 @@ pvsanal
pvsarp
pvsbandp
pvsbandr
+pvsbandwidth
pvsbin
pvsblur
pvsbuffer
@@ -1083,6 +1103,7 @@ pvsbufread2
pvscale
pvscent
pvsceps
+pvscfs
pvscross
pvsdemix
pvsdiskin
@@ -1102,6 +1123,7 @@ pvsin
pvsinfo
pvsinit
pvslock
+pvslpc
pvsmaska
pvsmix
pvsmooth
@@ -1227,6 +1249,7 @@ remove
repluck
reshapearray
reson
+resonbnk
resonk
resonr
resonx
@@ -1244,6 +1267,7 @@ rifft
rms
rnd
rnd31
+rndseed
round
rspline
rtclock
@@ -1352,6 +1376,7 @@ spsend
sqrt
squinewave
statevar
+sterrain
stix
strcat
strcatk
@@ -1463,6 +1488,8 @@ trcross
trfilter
trhighest
trigger
+trighold
+trigphasor
trigseq
trim
trim_i
@@ -1545,6 +1572,7 @@ vpow
vpow_i
vpowv
vpowv_i
+vps
vpvoc
vrandh
vrandi
@@ -1630,6 +1658,9 @@ maxtab
mintab
pop
pop_f
+ptable
+ptable3
+ptablei
ptableiw
push
push_f
diff --git a/pygments/lexers/_lasso_builtins.py b/pygments/lexers/_lasso_builtins.py
index 1d2719da..06261279 100644
--- a/pygments/lexers/_lasso_builtins.py
+++ b/pygments/lexers/_lasso_builtins.py
@@ -5,7 +5,7 @@
Built-in Lasso types, traits, methods, and members.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_lua_builtins.py b/pygments/lexers/_lua_builtins.py
index 9186d081..c1bc0d71 100644
--- a/pygments/lexers/_lua_builtins.py
+++ b/pygments/lexers/_lua_builtins.py
@@ -9,7 +9,7 @@
Do not edit the MODULES dict by hand.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index dad78679..5474daf9 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -51,6 +51,7 @@ LEXERS = {
'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
'BSTLexer': ('pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()),
+ 'BareLexer': ('pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()),
'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')),
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
@@ -271,7 +272,7 @@ LEXERS = {
'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
- 'MarkdownLexer': ('pygments.lexers.markup', 'markdown', ('md',), ('*.md',), ('text/x-markdown',)),
+ 'MarkdownLexer': ('pygments.lexers.markup', 'markdown', ('md',), ('*.md', '*.markdown'), ('text/x-markdown',)),
'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
@@ -338,6 +339,7 @@ LEXERS = {
'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)),
'PkgConfigLexer': ('pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()),
'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
+ 'PointlessLexer': ('pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()),
'PonyLexer': ('pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()),
'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
@@ -347,8 +349,10 @@ LEXERS = {
'PowerShellSessionLexer': ('pygments.lexers.shell', 'PowerShell Session', ('ps1con',), (), ()),
'PraatLexer': ('pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()),
'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
+ 'PromQLLexer': ('pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()),
'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
+ 'PsyshConsoleLexer': ('pygments.lexers.php', 'PsySH console session for PHP', ('psysh',), (), ()),
'PugLexer': ('pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')),
'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
diff --git a/pygments/lexers/_mql_builtins.py b/pygments/lexers/_mql_builtins.py
index e59fd910..f5c90f70 100644
--- a/pygments/lexers/_mql_builtins.py
+++ b/pygments/lexers/_mql_builtins.py
@@ -5,7 +5,7 @@
Builtins for the MqlLexer.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
types = (
diff --git a/pygments/lexers/_mysql_builtins.py b/pygments/lexers/_mysql_builtins.py
new file mode 100644
index 00000000..121054c3
--- /dev/null
+++ b/pygments/lexers/_mysql_builtins.py
@@ -0,0 +1,1282 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._mysql_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Self-updating data files for the MySQL lexer.
+
+ :copyright: Copyright 2020 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+
+MYSQL_CONSTANTS = (
+ 'false',
+ 'null',
+ 'true',
+ 'unknown',
+)
+
+
+# At this time, no easily-parsed, definitive list of data types
+# has been found in the MySQL source code or documentation. (The
+# `sql/sql_yacc.yy` file is definitive but is difficult to parse.)
+# Therefore these types are currently maintained manually.
+#
+# Some words in this list -- like "long", "national", "precision",
+# and "varying" -- appear to only occur in combination with other
+# data type keywords. Therefore they are included as separate words
+# even though they do not naturally occur in syntax separately.
+#
+# This list is also used to strip data types out of the list of
+# MySQL keywords, which is automatically updated later in the file.
+#
+MYSQL_DATATYPES = (
+ # Numeric data types
+ 'bigint',
+ 'bit',
+ 'bool',
+ 'boolean',
+ 'dec',
+ 'decimal',
+ 'double',
+ 'fixed',
+ 'float',
+ 'float4',
+ 'float8',
+ 'int',
+ 'int1',
+ 'int2',
+ 'int3',
+ 'int4',
+ 'int8',
+ 'integer',
+ 'mediumint',
+ 'middleint',
+ 'numeric',
+ 'precision',
+ 'real',
+ 'serial',
+ 'smallint',
+ 'tinyint',
+
+ # Date and time data types
+ 'date',
+ 'datetime',
+ 'time',
+ 'timestamp',
+ 'year',
+
+ # String data types
+ 'binary',
+ 'blob',
+ 'char',
+ 'enum',
+ 'long',
+ 'longblob',
+ 'longtext',
+ 'mediumblob',
+ 'mediumtext',
+ 'national',
+ 'nchar',
+ 'nvarchar',
+ 'set',
+ 'text',
+ 'tinyblob',
+ 'tinytext',
+ 'varbinary',
+ 'varchar',
+ 'varcharacter',
+ 'varying',
+
+ # Spatial data types
+ 'geometry',
+ 'geometrycollection',
+ 'linestring',
+ 'multilinestring',
+ 'multipoint',
+ 'multipolygon',
+ 'point',
+ 'polygon',
+
+ # JSON data types
+ 'json',
+)
+
+# Everything below this line is auto-generated from the MySQL source code.
+# Run this file in Python and it will update itself.
+# -----------------------------------------------------------------------------
+
+MYSQL_FUNCTIONS = (
+ 'abs',
+ 'acos',
+ 'adddate',
+ 'addtime',
+ 'aes_decrypt',
+ 'aes_encrypt',
+ 'any_value',
+ 'asin',
+ 'atan',
+ 'atan2',
+ 'benchmark',
+ 'bin',
+ 'bin_to_uuid',
+ 'bit_and',
+ 'bit_count',
+ 'bit_length',
+ 'bit_or',
+ 'bit_xor',
+ 'can_access_column',
+ 'can_access_database',
+ 'can_access_event',
+ 'can_access_resource_group',
+ 'can_access_routine',
+ 'can_access_table',
+ 'can_access_trigger',
+ 'can_access_view',
+ 'cast',
+ 'ceil',
+ 'ceiling',
+ 'char_length',
+ 'character_length',
+ 'coercibility',
+ 'compress',
+ 'concat',
+ 'concat_ws',
+ 'connection_id',
+ 'conv',
+ 'convert_cpu_id_mask',
+ 'convert_interval_to_user_interval',
+ 'convert_tz',
+ 'cos',
+ 'cot',
+ 'count',
+ 'crc32',
+ 'curdate',
+ 'current_role',
+ 'curtime',
+ 'date_add',
+ 'date_format',
+ 'date_sub',
+ 'datediff',
+ 'dayname',
+ 'dayofmonth',
+ 'dayofweek',
+ 'dayofyear',
+ 'degrees',
+ 'elt',
+ 'exp',
+ 'export_set',
+ 'extract',
+ 'extractvalue',
+ 'field',
+ 'find_in_set',
+ 'floor',
+ 'format_bytes',
+ 'format_pico_time',
+ 'found_rows',
+ 'from_base64',
+ 'from_days',
+ 'from_unixtime',
+ 'get_dd_column_privileges',
+ 'get_dd_create_options',
+ 'get_dd_index_private_data',
+ 'get_dd_index_sub_part_length',
+ 'get_dd_property_key_value',
+ 'get_dd_tablespace_private_data',
+ 'get_lock',
+ 'greatest',
+ 'group_concat',
+ 'gtid_subset',
+ 'gtid_subtract',
+ 'hex',
+ 'icu_version',
+ 'ifnull',
+ 'inet6_aton',
+ 'inet6_ntoa',
+ 'inet_aton',
+ 'inet_ntoa',
+ 'instr',
+ 'internal_auto_increment',
+ 'internal_avg_row_length',
+ 'internal_check_time',
+ 'internal_checksum',
+ 'internal_data_free',
+ 'internal_data_length',
+ 'internal_dd_char_length',
+ 'internal_get_comment_or_error',
+ 'internal_get_dd_column_extra',
+ 'internal_get_enabled_role_json',
+ 'internal_get_hostname',
+ 'internal_get_mandatory_roles_json',
+ 'internal_get_partition_nodegroup',
+ 'internal_get_username',
+ 'internal_get_view_warning_or_error',
+ 'internal_index_column_cardinality',
+ 'internal_index_length',
+ 'internal_is_enabled_role',
+ 'internal_is_mandatory_role',
+ 'internal_keys_disabled',
+ 'internal_max_data_length',
+ 'internal_table_rows',
+ 'internal_tablespace_autoextend_size',
+ 'internal_tablespace_data_free',
+ 'internal_tablespace_extent_size',
+ 'internal_tablespace_extra',
+ 'internal_tablespace_free_extents',
+ 'internal_tablespace_id',
+ 'internal_tablespace_initial_size',
+ 'internal_tablespace_logfile_group_name',
+ 'internal_tablespace_logfile_group_number',
+ 'internal_tablespace_maximum_size',
+ 'internal_tablespace_row_format',
+ 'internal_tablespace_status',
+ 'internal_tablespace_total_extents',
+ 'internal_tablespace_type',
+ 'internal_tablespace_version',
+ 'internal_update_time',
+ 'is_free_lock',
+ 'is_ipv4',
+ 'is_ipv4_compat',
+ 'is_ipv4_mapped',
+ 'is_ipv6',
+ 'is_used_lock',
+ 'is_uuid',
+ 'is_visible_dd_object',
+ 'isnull',
+ 'json_array',
+ 'json_array_append',
+ 'json_array_insert',
+ 'json_arrayagg',
+ 'json_contains',
+ 'json_contains_path',
+ 'json_depth',
+ 'json_extract',
+ 'json_insert',
+ 'json_keys',
+ 'json_length',
+ 'json_merge',
+ 'json_merge_patch',
+ 'json_merge_preserve',
+ 'json_object',
+ 'json_objectagg',
+ 'json_overlaps',
+ 'json_pretty',
+ 'json_quote',
+ 'json_remove',
+ 'json_replace',
+ 'json_schema_valid',
+ 'json_schema_validation_report',
+ 'json_search',
+ 'json_set',
+ 'json_storage_free',
+ 'json_storage_size',
+ 'json_type',
+ 'json_unquote',
+ 'json_valid',
+ 'last_day',
+ 'last_insert_id',
+ 'lcase',
+ 'least',
+ 'length',
+ 'like_range_max',
+ 'like_range_min',
+ 'ln',
+ 'load_file',
+ 'locate',
+ 'log',
+ 'log10',
+ 'log2',
+ 'lower',
+ 'lpad',
+ 'ltrim',
+ 'make_set',
+ 'makedate',
+ 'maketime',
+ 'master_pos_wait',
+ 'max',
+ 'mbrcontains',
+ 'mbrcoveredby',
+ 'mbrcovers',
+ 'mbrdisjoint',
+ 'mbrequals',
+ 'mbrintersects',
+ 'mbroverlaps',
+ 'mbrtouches',
+ 'mbrwithin',
+ 'md5',
+ 'mid',
+ 'min',
+ 'monthname',
+ 'name_const',
+ 'now',
+ 'nullif',
+ 'oct',
+ 'octet_length',
+ 'ord',
+ 'period_add',
+ 'period_diff',
+ 'pi',
+ 'position',
+ 'pow',
+ 'power',
+ 'ps_current_thread_id',
+ 'ps_thread_id',
+ 'quote',
+ 'radians',
+ 'rand',
+ 'random_bytes',
+ 'regexp_instr',
+ 'regexp_like',
+ 'regexp_replace',
+ 'regexp_substr',
+ 'release_all_locks',
+ 'release_lock',
+ 'remove_dd_property_key',
+ 'reverse',
+ 'roles_graphml',
+ 'round',
+ 'rpad',
+ 'rtrim',
+ 'sec_to_time',
+ 'session_user',
+ 'sha',
+ 'sha1',
+ 'sha2',
+ 'sign',
+ 'sin',
+ 'sleep',
+ 'soundex',
+ 'space',
+ 'sqrt',
+ 'st_area',
+ 'st_asbinary',
+ 'st_asgeojson',
+ 'st_astext',
+ 'st_aswkb',
+ 'st_aswkt',
+ 'st_buffer',
+ 'st_buffer_strategy',
+ 'st_centroid',
+ 'st_contains',
+ 'st_convexhull',
+ 'st_crosses',
+ 'st_difference',
+ 'st_dimension',
+ 'st_disjoint',
+ 'st_distance',
+ 'st_distance_sphere',
+ 'st_endpoint',
+ 'st_envelope',
+ 'st_equals',
+ 'st_exteriorring',
+ 'st_geohash',
+ 'st_geomcollfromtext',
+ 'st_geomcollfromtxt',
+ 'st_geomcollfromwkb',
+ 'st_geometrycollectionfromtext',
+ 'st_geometrycollectionfromwkb',
+ 'st_geometryfromtext',
+ 'st_geometryfromwkb',
+ 'st_geometryn',
+ 'st_geometrytype',
+ 'st_geomfromgeojson',
+ 'st_geomfromtext',
+ 'st_geomfromwkb',
+ 'st_interiorringn',
+ 'st_intersection',
+ 'st_intersects',
+ 'st_isclosed',
+ 'st_isempty',
+ 'st_issimple',
+ 'st_isvalid',
+ 'st_latfromgeohash',
+ 'st_latitude',
+ 'st_length',
+ 'st_linefromtext',
+ 'st_linefromwkb',
+ 'st_linestringfromtext',
+ 'st_linestringfromwkb',
+ 'st_longfromgeohash',
+ 'st_longitude',
+ 'st_makeenvelope',
+ 'st_mlinefromtext',
+ 'st_mlinefromwkb',
+ 'st_mpointfromtext',
+ 'st_mpointfromwkb',
+ 'st_mpolyfromtext',
+ 'st_mpolyfromwkb',
+ 'st_multilinestringfromtext',
+ 'st_multilinestringfromwkb',
+ 'st_multipointfromtext',
+ 'st_multipointfromwkb',
+ 'st_multipolygonfromtext',
+ 'st_multipolygonfromwkb',
+ 'st_numgeometries',
+ 'st_numinteriorring',
+ 'st_numinteriorrings',
+ 'st_numpoints',
+ 'st_overlaps',
+ 'st_pointfromgeohash',
+ 'st_pointfromtext',
+ 'st_pointfromwkb',
+ 'st_pointn',
+ 'st_polyfromtext',
+ 'st_polyfromwkb',
+ 'st_polygonfromtext',
+ 'st_polygonfromwkb',
+ 'st_simplify',
+ 'st_srid',
+ 'st_startpoint',
+ 'st_swapxy',
+ 'st_symdifference',
+ 'st_touches',
+ 'st_transform',
+ 'st_union',
+ 'st_validate',
+ 'st_within',
+ 'st_x',
+ 'st_y',
+ 'statement_digest',
+ 'statement_digest_text',
+ 'std',
+ 'stddev',
+ 'stddev_pop',
+ 'stddev_samp',
+ 'str_to_date',
+ 'strcmp',
+ 'subdate',
+ 'substr',
+ 'substring',
+ 'substring_index',
+ 'subtime',
+ 'sum',
+ 'sysdate',
+ 'system_user',
+ 'tan',
+ 'time_format',
+ 'time_to_sec',
+ 'timediff',
+ 'to_base64',
+ 'to_days',
+ 'to_seconds',
+ 'trim',
+ 'ucase',
+ 'uncompress',
+ 'uncompressed_length',
+ 'unhex',
+ 'unix_timestamp',
+ 'updatexml',
+ 'upper',
+ 'uuid',
+ 'uuid_short',
+ 'uuid_to_bin',
+ 'validate_password_strength',
+ 'var_pop',
+ 'var_samp',
+ 'variance',
+ 'version',
+ 'wait_for_executed_gtid_set',
+ 'wait_until_sql_thread_after_gtids',
+ 'weekday',
+ 'weekofyear',
+ 'yearweek',
+)
+
+
+MYSQL_OPTIMIZER_HINTS = (
+ 'bka',
+ 'bnl',
+ 'dupsweedout',
+ 'firstmatch',
+ 'group_index',
+ 'hash_join',
+ 'index',
+ 'index_merge',
+ 'intoexists',
+ 'join_fixed_order',
+ 'join_index',
+ 'join_order',
+ 'join_prefix',
+ 'join_suffix',
+ 'loosescan',
+ 'materialization',
+ 'max_execution_time',
+ 'merge',
+ 'mrr',
+ 'no_bka',
+ 'no_bnl',
+ 'no_group_index',
+ 'no_hash_join',
+ 'no_icp',
+ 'no_index',
+ 'no_index_merge',
+ 'no_join_index',
+ 'no_merge',
+ 'no_mrr',
+ 'no_order_index',
+ 'no_range_optimization',
+ 'no_semijoin',
+ 'no_skip_scan',
+ 'order_index',
+ 'qb_name',
+ 'resource_group',
+ 'semijoin',
+ 'set_var',
+ 'skip_scan',
+ 'subquery',
+)
+
+
+MYSQL_KEYWORDS = (
+ 'accessible',
+ 'account',
+ 'action',
+ 'active',
+ 'add',
+ 'admin',
+ 'after',
+ 'against',
+ 'aggregate',
+ 'algorithm',
+ 'all',
+ 'alter',
+ 'always',
+ 'analyze',
+ 'and',
+ 'any',
+ 'array',
+ 'as',
+ 'asc',
+ 'ascii',
+ 'asensitive',
+ 'at',
+ 'attribute',
+ 'auto_increment',
+ 'autoextend_size',
+ 'avg',
+ 'avg_row_length',
+ 'backup',
+ 'before',
+ 'begin',
+ 'between',
+ 'binlog',
+ 'block',
+ 'both',
+ 'btree',
+ 'buckets',
+ 'by',
+ 'byte',
+ 'cache',
+ 'call',
+ 'cascade',
+ 'cascaded',
+ 'case',
+ 'catalog_name',
+ 'chain',
+ 'change',
+ 'changed',
+ 'channel',
+ 'character',
+ 'charset',
+ 'check',
+ 'checksum',
+ 'cipher',
+ 'class_origin',
+ 'client',
+ 'clone',
+ 'close',
+ 'coalesce',
+ 'code',
+ 'collate',
+ 'collation',
+ 'column',
+ 'column_format',
+ 'column_name',
+ 'columns',
+ 'comment',
+ 'commit',
+ 'committed',
+ 'compact',
+ 'completion',
+ 'component',
+ 'compressed',
+ 'compression',
+ 'concurrent',
+ 'condition',
+ 'connection',
+ 'consistent',
+ 'constraint',
+ 'constraint_catalog',
+ 'constraint_name',
+ 'constraint_schema',
+ 'contains',
+ 'context',
+ 'continue',
+ 'convert',
+ 'cpu',
+ 'create',
+ 'cross',
+ 'cube',
+ 'cume_dist',
+ 'current',
+ 'current_date',
+ 'current_time',
+ 'current_timestamp',
+ 'current_user',
+ 'cursor',
+ 'cursor_name',
+ 'data',
+ 'database',
+ 'databases',
+ 'datafile',
+ 'day',
+ 'day_hour',
+ 'day_microsecond',
+ 'day_minute',
+ 'day_second',
+ 'deallocate',
+ 'declare',
+ 'default',
+ 'default_auth',
+ 'definer',
+ 'definition',
+ 'delay_key_write',
+ 'delayed',
+ 'delete',
+ 'dense_rank',
+ 'desc',
+ 'describe',
+ 'description',
+ 'deterministic',
+ 'diagnostics',
+ 'directory',
+ 'disable',
+ 'discard',
+ 'disk',
+ 'distinct',
+ 'distinctrow',
+ 'div',
+ 'do',
+ 'drop',
+ 'dual',
+ 'dumpfile',
+ 'duplicate',
+ 'dynamic',
+ 'each',
+ 'else',
+ 'elseif',
+ 'empty',
+ 'enable',
+ 'enclosed',
+ 'encryption',
+ 'end',
+ 'ends',
+ 'enforced',
+ 'engine',
+ 'engine_attribute',
+ 'engines',
+ 'error',
+ 'errors',
+ 'escape',
+ 'escaped',
+ 'event',
+ 'events',
+ 'every',
+ 'except',
+ 'exchange',
+ 'exclude',
+ 'execute',
+ 'exists',
+ 'exit',
+ 'expansion',
+ 'expire',
+ 'explain',
+ 'export',
+ 'extended',
+ 'extent_size',
+ 'failed_login_attempts',
+ 'false',
+ 'fast',
+ 'faults',
+ 'fetch',
+ 'fields',
+ 'file',
+ 'file_block_size',
+ 'filter',
+ 'first',
+ 'first_value',
+ 'flush',
+ 'following',
+ 'follows',
+ 'for',
+ 'force',
+ 'foreign',
+ 'format',
+ 'found',
+ 'from',
+ 'full',
+ 'fulltext',
+ 'function',
+ 'general',
+ 'generated',
+ 'geomcollection',
+ 'get',
+ 'get_format',
+ 'get_master_public_key',
+ 'global',
+ 'grant',
+ 'grants',
+ 'group',
+ 'group_replication',
+ 'grouping',
+ 'groups',
+ 'handler',
+ 'hash',
+ 'having',
+ 'help',
+ 'high_priority',
+ 'histogram',
+ 'history',
+ 'host',
+ 'hosts',
+ 'hour',
+ 'hour_microsecond',
+ 'hour_minute',
+ 'hour_second',
+ 'identified',
+ 'if',
+ 'ignore',
+ 'ignore_server_ids',
+ 'import',
+ 'in',
+ 'inactive',
+ 'index',
+ 'indexes',
+ 'infile',
+ 'initial_size',
+ 'inner',
+ 'inout',
+ 'insensitive',
+ 'insert',
+ 'insert_method',
+ 'install',
+ 'instance',
+ 'interval',
+ 'into',
+ 'invisible',
+ 'invoker',
+ 'io',
+ 'io_after_gtids',
+ 'io_before_gtids',
+ 'io_thread',
+ 'ipc',
+ 'is',
+ 'isolation',
+ 'issuer',
+ 'iterate',
+ 'join',
+ 'json_table',
+ 'json_value',
+ 'key',
+ 'key_block_size',
+ 'keys',
+ 'kill',
+ 'lag',
+ 'language',
+ 'last',
+ 'last_value',
+ 'lateral',
+ 'lead',
+ 'leading',
+ 'leave',
+ 'leaves',
+ 'left',
+ 'less',
+ 'level',
+ 'like',
+ 'limit',
+ 'linear',
+ 'lines',
+ 'list',
+ 'load',
+ 'local',
+ 'localtime',
+ 'localtimestamp',
+ 'lock',
+ 'locked',
+ 'locks',
+ 'logfile',
+ 'logs',
+ 'loop',
+ 'low_priority',
+ 'master',
+ 'master_auto_position',
+ 'master_bind',
+ 'master_compression_algorithms',
+ 'master_connect_retry',
+ 'master_delay',
+ 'master_heartbeat_period',
+ 'master_host',
+ 'master_log_file',
+ 'master_log_pos',
+ 'master_password',
+ 'master_port',
+ 'master_public_key_path',
+ 'master_retry_count',
+ 'master_server_id',
+ 'master_ssl',
+ 'master_ssl_ca',
+ 'master_ssl_capath',
+ 'master_ssl_cert',
+ 'master_ssl_cipher',
+ 'master_ssl_crl',
+ 'master_ssl_crlpath',
+ 'master_ssl_key',
+ 'master_ssl_verify_server_cert',
+ 'master_tls_ciphersuites',
+ 'master_tls_version',
+ 'master_user',
+ 'master_zstd_compression_level',
+ 'match',
+ 'max_connections_per_hour',
+ 'max_queries_per_hour',
+ 'max_rows',
+ 'max_size',
+ 'max_updates_per_hour',
+ 'max_user_connections',
+ 'maxvalue',
+ 'medium',
+ 'member',
+ 'memory',
+ 'merge',
+ 'message_text',
+ 'microsecond',
+ 'migrate',
+ 'min_rows',
+ 'minute',
+ 'minute_microsecond',
+ 'minute_second',
+ 'mod',
+ 'mode',
+ 'modifies',
+ 'modify',
+ 'month',
+ 'mutex',
+ 'mysql_errno',
+ 'name',
+ 'names',
+ 'natural',
+ 'ndb',
+ 'ndbcluster',
+ 'nested',
+ 'network_namespace',
+ 'never',
+ 'new',
+ 'next',
+ 'no',
+ 'no_wait',
+ 'no_write_to_binlog',
+ 'nodegroup',
+ 'none',
+ 'not',
+ 'nowait',
+ 'nth_value',
+ 'ntile',
+ 'null',
+ 'nulls',
+ 'number',
+ 'of',
+ 'off',
+ 'offset',
+ 'oj',
+ 'old',
+ 'on',
+ 'one',
+ 'only',
+ 'open',
+ 'optimize',
+ 'optimizer_costs',
+ 'option',
+ 'optional',
+ 'optionally',
+ 'options',
+ 'or',
+ 'order',
+ 'ordinality',
+ 'organization',
+ 'others',
+ 'out',
+ 'outer',
+ 'outfile',
+ 'over',
+ 'owner',
+ 'pack_keys',
+ 'page',
+ 'parser',
+ 'partial',
+ 'partition',
+ 'partitioning',
+ 'partitions',
+ 'password',
+ 'password_lock_time',
+ 'path',
+ 'percent_rank',
+ 'persist',
+ 'persist_only',
+ 'phase',
+ 'plugin',
+ 'plugin_dir',
+ 'plugins',
+ 'port',
+ 'precedes',
+ 'preceding',
+ 'prepare',
+ 'preserve',
+ 'prev',
+ 'primary',
+ 'privilege_checks_user',
+ 'privileges',
+ 'procedure',
+ 'process',
+ 'processlist',
+ 'profile',
+ 'profiles',
+ 'proxy',
+ 'purge',
+ 'quarter',
+ 'query',
+ 'quick',
+ 'random',
+ 'range',
+ 'rank',
+ 'read',
+ 'read_only',
+ 'read_write',
+ 'reads',
+ 'rebuild',
+ 'recover',
+ 'recursive',
+ 'redo_buffer_size',
+ 'redundant',
+ 'reference',
+ 'references',
+ 'regexp',
+ 'relay',
+ 'relay_log_file',
+ 'relay_log_pos',
+ 'relay_thread',
+ 'relaylog',
+ 'release',
+ 'reload',
+ 'remove',
+ 'rename',
+ 'reorganize',
+ 'repair',
+ 'repeat',
+ 'repeatable',
+ 'replace',
+ 'replicate_do_db',
+ 'replicate_do_table',
+ 'replicate_ignore_db',
+ 'replicate_ignore_table',
+ 'replicate_rewrite_db',
+ 'replicate_wild_do_table',
+ 'replicate_wild_ignore_table',
+ 'replication',
+ 'require',
+ 'require_row_format',
+ 'require_table_primary_key_check',
+ 'reset',
+ 'resignal',
+ 'resource',
+ 'respect',
+ 'restart',
+ 'restore',
+ 'restrict',
+ 'resume',
+ 'retain',
+ 'return',
+ 'returned_sqlstate',
+ 'returning',
+ 'returns',
+ 'reuse',
+ 'reverse',
+ 'revoke',
+ 'right',
+ 'rlike',
+ 'role',
+ 'rollback',
+ 'rollup',
+ 'rotate',
+ 'routine',
+ 'row',
+ 'row_count',
+ 'row_format',
+ 'row_number',
+ 'rows',
+ 'rtree',
+ 'savepoint',
+ 'schedule',
+ 'schema',
+ 'schema_name',
+ 'schemas',
+ 'second',
+ 'second_microsecond',
+ 'secondary',
+ 'secondary_engine',
+ 'secondary_engine_attribute',
+ 'secondary_load',
+ 'secondary_unload',
+ 'security',
+ 'select',
+ 'sensitive',
+ 'separator',
+ 'serializable',
+ 'server',
+ 'session',
+ 'share',
+ 'show',
+ 'shutdown',
+ 'signal',
+ 'signed',
+ 'simple',
+ 'skip',
+ 'slave',
+ 'slow',
+ 'snapshot',
+ 'socket',
+ 'some',
+ 'soname',
+ 'sounds',
+ 'source',
+ 'spatial',
+ 'specific',
+ 'sql',
+ 'sql_after_gtids',
+ 'sql_after_mts_gaps',
+ 'sql_before_gtids',
+ 'sql_big_result',
+ 'sql_buffer_result',
+ 'sql_calc_found_rows',
+ 'sql_no_cache',
+ 'sql_small_result',
+ 'sql_thread',
+ 'sql_tsi_day',
+ 'sql_tsi_hour',
+ 'sql_tsi_minute',
+ 'sql_tsi_month',
+ 'sql_tsi_quarter',
+ 'sql_tsi_second',
+ 'sql_tsi_week',
+ 'sql_tsi_year',
+ 'sqlexception',
+ 'sqlstate',
+ 'sqlwarning',
+ 'srid',
+ 'ssl',
+ 'stacked',
+ 'start',
+ 'starting',
+ 'starts',
+ 'stats_auto_recalc',
+ 'stats_persistent',
+ 'stats_sample_pages',
+ 'status',
+ 'stop',
+ 'storage',
+ 'stored',
+ 'straight_join',
+ 'stream',
+ 'string',
+ 'subclass_origin',
+ 'subject',
+ 'subpartition',
+ 'subpartitions',
+ 'super',
+ 'suspend',
+ 'swaps',
+ 'switches',
+ 'system',
+ 'table',
+ 'table_checksum',
+ 'table_name',
+ 'tables',
+ 'tablespace',
+ 'temporary',
+ 'temptable',
+ 'terminated',
+ 'than',
+ 'then',
+ 'thread_priority',
+ 'ties',
+ 'timestampadd',
+ 'timestampdiff',
+ 'tls',
+ 'to',
+ 'trailing',
+ 'transaction',
+ 'trigger',
+ 'triggers',
+ 'true',
+ 'truncate',
+ 'type',
+ 'types',
+ 'unbounded',
+ 'uncommitted',
+ 'undefined',
+ 'undo',
+ 'undo_buffer_size',
+ 'undofile',
+ 'unicode',
+ 'uninstall',
+ 'union',
+ 'unique',
+ 'unknown',
+ 'unlock',
+ 'unsigned',
+ 'until',
+ 'update',
+ 'upgrade',
+ 'usage',
+ 'use',
+ 'use_frm',
+ 'user',
+ 'user_resources',
+ 'using',
+ 'utc_date',
+ 'utc_time',
+ 'utc_timestamp',
+ 'validation',
+ 'value',
+ 'values',
+ 'variables',
+ 'vcpu',
+ 'view',
+ 'virtual',
+ 'visible',
+ 'wait',
+ 'warnings',
+ 'week',
+ 'weight_string',
+ 'when',
+ 'where',
+ 'while',
+ 'window',
+ 'with',
+ 'without',
+ 'work',
+ 'wrapper',
+ 'write',
+ 'x509',
+ 'xa',
+ 'xid',
+ 'xml',
+ 'xor',
+ 'year_month',
+ 'zerofill',
+)
+
+
+if __name__ == '__main__': # pragma: no cover
+ import re
+ from urllib.request import urlopen
+
+ from pygments.util import format_lines
+
+ # MySQL source code
+ SOURCE_URL = 'https://github.com/mysql/mysql-server/raw/8.0'
+ LEX_URL = SOURCE_URL + '/sql/lex.h'
+ ITEM_CREATE_URL = SOURCE_URL + '/sql/item_create.cc'
+
+
+ def update_myself():
+ # Pull content from lex.h.
+ lex_file = urlopen(LEX_URL).read().decode('utf8', errors='ignore')
+ keywords = parse_lex_keywords(lex_file)
+ functions = parse_lex_functions(lex_file)
+ optimizer_hints = parse_lex_optimizer_hints(lex_file)
+
+ # Parse content in item_create.cc.
+ item_create_file = urlopen(ITEM_CREATE_URL).read().decode('utf8', errors='ignore')
+ functions.update(parse_item_create_functions(item_create_file))
+
+ # Remove data types from the set of keywords.
+ keywords -= set(MYSQL_DATATYPES)
+
+ update_content('MYSQL_FUNCTIONS', tuple(sorted(functions)))
+ update_content('MYSQL_KEYWORDS', tuple(sorted(keywords)))
+ update_content('MYSQL_OPTIMIZER_HINTS', tuple(sorted(optimizer_hints)))
+
+
+ def parse_lex_keywords(f):
+ """Parse keywords in lex.h."""
+
+ results = set()
+ for m in re.finditer(r'{SYM(?:_HK)?\("(?P<keyword>[a-z0-9_]+)",', f, flags=re.I):
+ results.add(m.group('keyword').lower())
+
+ if not results:
+ raise ValueError('No keywords found')
+
+ return results
+
+
+ def parse_lex_optimizer_hints(f):
+ """Parse optimizer hints in lex.h."""
+
+ results = set()
+ for m in re.finditer(r'{SYM_H\("(?P<keyword>[a-z0-9_]+)",', f, flags=re.I):
+ results.add(m.group('keyword').lower())
+
+ if not results:
+ raise ValueError('No optimizer hints found')
+
+ return results
+
+
+ def parse_lex_functions(f):
+ """Parse MySQL function names from lex.h."""
+
+ results = set()
+ for m in re.finditer(r'{SYM_FN?\("(?P<function>[a-z0-9_]+)",', f, flags=re.I):
+ results.add(m.group('function').lower())
+
+ if not results:
+ raise ValueError('No lex functions found')
+
+ return results
+
+
+ def parse_item_create_functions(f):
+ """Parse MySQL function names from item_create.cc."""
+
+ results = set()
+ for m in re.finditer(r'{"(?P<function>[^"]+?)",\s*SQL_F[^(]+?\(', f, flags=re.I):
+ results.add(m.group('function').lower())
+
+ if not results:
+ raise ValueError('No item_create functions found')
+
+ return results
+
+
+ def update_content(field_name, content):
+ """Overwrite this file with content parsed from MySQL's source code."""
+
+ with open(__file__) as f:
+ data = f.read()
+
+ # Line to start/end inserting
+ re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % field_name, re.M | re.S)
+ m = re_match.search(data)
+ if not m:
+ raise ValueError('Could not find an existing definition for %s' % field_name)
+
+ new_block = format_lines(field_name, content)
+ data = data[:m.start()] + new_block + data[m.end():]
+
+ with open(__file__, 'w', newline='\n') as f:
+ f.write(data)
+
+ update_myself()
diff --git a/pygments/lexers/_openedge_builtins.py b/pygments/lexers/_openedge_builtins.py
index 09587e00..1a8da5c7 100644
--- a/pygments/lexers/_openedge_builtins.py
+++ b/pygments/lexers/_openedge_builtins.py
@@ -5,7 +5,7 @@
Builtin list for the OpenEdgeLexer.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_php_builtins.py b/pygments/lexers/_php_builtins.py
index 39a39057..ad54492f 100644
--- a/pygments/lexers/_php_builtins.py
+++ b/pygments/lexers/_php_builtins.py
@@ -12,7 +12,7 @@
internet connection. don't run that at home, use
a server ;-)
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -4727,8 +4727,7 @@ if __name__ == '__main__': # pragma: no cover
download = urlretrieve(PHP_MANUAL_URL)
with tarfile.open(download[0]) as tar:
tar.extractall()
- for file in glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB)):
- yield file
+ yield from glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB))
os.remove(download[0])
def regenerate(filename, modules):
diff --git a/pygments/lexers/_postgres_builtins.py b/pygments/lexers/_postgres_builtins.py
index 21086722..3fe40a6b 100644
--- a/pygments/lexers/_postgres_builtins.py
+++ b/pygments/lexers/_postgres_builtins.py
@@ -5,7 +5,7 @@
Self-updating data files for PostgreSQL lexer.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -36,6 +36,7 @@ KEYWORDS = (
'ASSIGNMENT',
'ASYMMETRIC',
'AT',
+ 'ATTACH',
'ATTRIBUTE',
'AUTHORIZATION',
'BACKWARD',
@@ -49,6 +50,7 @@ KEYWORDS = (
'BOTH',
'BY',
'CACHE',
+ 'CALL',
'CALLED',
'CASCADE',
'CASCADED',
@@ -68,12 +70,14 @@ KEYWORDS = (
'COLLATE',
'COLLATION',
'COLUMN',
+ 'COLUMNS',
'COMMENT',
'COMMENTS',
'COMMIT',
'COMMITTED',
'CONCURRENTLY',
'CONFIGURATION',
+ 'CONFLICT',
'CONNECTION',
'CONSTRAINT',
'CONSTRAINTS',
@@ -85,6 +89,7 @@ KEYWORDS = (
'CREATE',
'CROSS',
'CSV',
+ 'CUBE',
'CURRENT',
'CURRENT_CATALOG',
'CURRENT_DATE',
@@ -110,7 +115,9 @@ KEYWORDS = (
'DELETE',
'DELIMITER',
'DELIMITERS',
+ 'DEPENDS',
'DESC',
+ 'DETACH',
'DICTIONARY',
'DISABLE',
'DISCARD',
@@ -136,6 +143,7 @@ KEYWORDS = (
'EXECUTE',
'EXISTS',
'EXPLAIN',
+ 'EXPRESSION',
'EXTENSION',
'EXTERNAL',
'EXTRACT',
@@ -155,11 +163,14 @@ KEYWORDS = (
'FULL',
'FUNCTION',
'FUNCTIONS',
+ 'GENERATED',
'GLOBAL',
'GRANT',
'GRANTED',
'GREATEST',
'GROUP',
+ 'GROUPING',
+ 'GROUPS',
'HANDLER',
'HAVING',
'HEADER',
@@ -171,7 +182,9 @@ KEYWORDS = (
'IMMEDIATE',
'IMMUTABLE',
'IMPLICIT',
+ 'IMPORT',
'IN',
+ 'INCLUDE',
'INCLUDING',
'INCREMENT',
'INDEX',
@@ -202,8 +215,6 @@ KEYWORDS = (
'LARGE',
'LAST',
'LATERAL',
- 'LC_COLLATE',
- 'LC_CTYPE',
'LEADING',
'LEAKPROOF',
'LEAST',
@@ -218,10 +229,13 @@ KEYWORDS = (
'LOCALTIMESTAMP',
'LOCATION',
'LOCK',
+ 'LOCKED',
+ 'LOGGED',
'MAPPING',
'MATCH',
'MATERIALIZED',
'MAXVALUE',
+ 'METHOD',
'MINUTE',
'MINVALUE',
'MODE',
@@ -232,9 +246,16 @@ KEYWORDS = (
'NATIONAL',
'NATURAL',
'NCHAR',
+ 'NEW',
'NEXT',
+ 'NFC',
+ 'NFD',
+ 'NFKC',
+ 'NFKD',
'NO',
'NONE',
+ 'NORMALIZE',
+ 'NORMALIZED',
'NOT',
'NOTHING',
'NOTIFY',
@@ -249,6 +270,7 @@ KEYWORDS = (
'OFF',
'OFFSET',
'OIDS',
+ 'OLD',
'ON',
'ONLY',
'OPERATOR',
@@ -257,13 +279,16 @@ KEYWORDS = (
'OR',
'ORDER',
'ORDINALITY',
+ 'OTHERS',
'OUT',
'OUTER',
'OVER',
'OVERLAPS',
'OVERLAY',
+ 'OVERRIDING',
'OWNED',
'OWNER',
+ 'PARALLEL',
'PARSER',
'PARTIAL',
'PARTITION',
@@ -283,7 +308,9 @@ KEYWORDS = (
'PRIVILEGES',
'PROCEDURAL',
'PROCEDURE',
+ 'PROCEDURES',
'PROGRAM',
+ 'PUBLICATION',
'QUOTE',
'RANGE',
'READ',
@@ -293,6 +320,7 @@ KEYWORDS = (
'RECURSIVE',
'REF',
'REFERENCES',
+ 'REFERENCING',
'REFRESH',
'REINDEX',
'RELATIVE',
@@ -310,11 +338,15 @@ KEYWORDS = (
'RIGHT',
'ROLE',
'ROLLBACK',
+ 'ROLLUP',
+ 'ROUTINE',
+ 'ROUTINES',
'ROW',
'ROWS',
'RULE',
'SAVEPOINT',
'SCHEMA',
+ 'SCHEMAS',
'SCROLL',
'SEARCH',
'SECOND',
@@ -328,13 +360,16 @@ KEYWORDS = (
'SESSION_USER',
'SET',
'SETOF',
+ 'SETS',
'SHARE',
'SHOW',
'SIMILAR',
'SIMPLE',
+ 'SKIP',
'SMALLINT',
'SNAPSHOT',
'SOME',
+ 'SQL',
'STABLE',
'STANDALONE',
'START',
@@ -343,25 +378,31 @@ KEYWORDS = (
'STDIN',
'STDOUT',
'STORAGE',
+ 'STORED',
'STRICT',
'STRIP',
+ 'SUBSCRIPTION',
'SUBSTRING',
+ 'SUPPORT',
'SYMMETRIC',
'SYSID',
'SYSTEM',
'TABLE',
'TABLES',
+ 'TABLESAMPLE',
'TABLESPACE',
'TEMP',
'TEMPLATE',
'TEMPORARY',
'TEXT',
'THEN',
+ 'TIES',
'TIME',
'TIMESTAMP',
'TO',
'TRAILING',
'TRANSACTION',
+ 'TRANSFORM',
'TREAT',
'TRIGGER',
'TRIM',
@@ -370,6 +411,7 @@ KEYWORDS = (
'TRUSTED',
'TYPE',
'TYPES',
+ 'UESCAPE',
'UNBOUNDED',
'UNCOMMITTED',
'UNENCRYPTED',
@@ -412,10 +454,12 @@ KEYWORDS = (
'XMLELEMENT',
'XMLEXISTS',
'XMLFOREST',
+ 'XMLNAMESPACES',
'XMLPARSE',
'XMLPI',
'XMLROOT',
'XMLSERIALIZE',
+ 'XMLTABLE',
'YEAR',
'YES',
'ZONE',
@@ -452,10 +496,12 @@ DATATYPES = (
'line',
'lseg',
'macaddr',
+ 'macaddr8',
'money',
'numeric',
'path',
'pg_lsn',
+ 'pg_snapshot',
'point',
'polygon',
'real',
@@ -483,19 +529,28 @@ DATATYPES = (
PSEUDO_TYPES = (
'any',
- 'anyelement',
'anyarray',
- 'anynonarray',
+ 'anycompatible',
+ 'anycompatiblearray',
+ 'anycompatiblenonarray',
+ 'anycompatiblerange',
+ 'anyelement',
'anyenum',
+ 'anynonarray',
'anyrange',
'cstring',
+ 'event_trigger',
+ 'fdw_handler',
+ 'index_am_handler',
'internal',
'language_handler',
- 'fdw_handler',
+ 'pg_ddl_command',
'record',
+ 'table_am_handler',
'trigger',
+ 'tsm_handler',
+ 'unknown',
'void',
- 'opaque',
)
# Remove 'trigger' from types
@@ -519,25 +574,26 @@ if __name__ == '__main__': # pragma: no cover
# One man's constant is another man's variable.
SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
- KEYWORDS_URL = SOURCE_URL + '/doc/src/sgml/keywords.sgml'
+ KEYWORDS_URL = SOURCE_URL + '/src/include/parser/kwlist.h'
DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml'
def update_myself():
- data_file = list(urlopen(DATATYPES_URL))
+ content = urlopen(DATATYPES_URL).read().decode('utf-8', errors='ignore')
+ data_file = list(content.splitlines())
datatypes = parse_datatypes(data_file)
pseudos = parse_pseudos(data_file)
- keywords = parse_keywords(urlopen(KEYWORDS_URL))
+ content = urlopen(KEYWORDS_URL).read().decode('utf-8', errors='ignore')
+ keywords = parse_keywords(content)
+
update_consts(__file__, 'DATATYPES', datatypes)
update_consts(__file__, 'PSEUDO_TYPES', pseudos)
update_consts(__file__, 'KEYWORDS', keywords)
def parse_keywords(f):
kw = []
- for m in re.finditer(
- r'\s*<entry><token>([^<]+)</token></entry>\s*'
- r'<entry>([^<]+)</entry>', f.read()):
- kw.append(m.group(1))
+ for m in re.finditer(r'PG_KEYWORD\("(.+?)"', f):
+ kw.append(m.group(1).upper())
if not kw:
raise ValueError('no keyword found')
@@ -576,7 +632,7 @@ if __name__ == '__main__': # pragma: no cover
def parse_pseudos(f):
dt = []
re_start = re.compile(r'\s*<table id="datatype-pseudotypes-table">')
- re_entry = re.compile(r'\s*<entry><type>([^<]+)</></entry>')
+ re_entry = re.compile(r'\s*<entry><type>(.+?)</type></entry>')
re_end = re.compile(r'\s*</table>')
f = iter(f)
@@ -599,6 +655,7 @@ if __name__ == '__main__': # pragma: no cover
if not dt:
raise ValueError('pseudo datatypes not found')
+ dt.sort()
return dt
def update_consts(filename, constname, content):
@@ -615,7 +672,7 @@ if __name__ == '__main__': # pragma: no cover
new_block = format_lines(constname, content)
data = data[:m.start()] + new_block + data[m.end():]
- with open(filename, 'w') as f:
+ with open(filename, 'w', newline='\n') as f:
f.write(data)
update_myself()
diff --git a/pygments/lexers/_scilab_builtins.py b/pygments/lexers/_scilab_builtins.py
index 4b6886f9..3f2edc1d 100644
--- a/pygments/lexers/_scilab_builtins.py
+++ b/pygments/lexers/_scilab_builtins.py
@@ -5,7 +5,7 @@
Builtin list for the ScilabLexer.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_sourcemod_builtins.py b/pygments/lexers/_sourcemod_builtins.py
index 8eb4597c..cd6264e2 100644
--- a/pygments/lexers/_sourcemod_builtins.py
+++ b/pygments/lexers/_sourcemod_builtins.py
@@ -8,7 +8,7 @@
Do not edit the FUNCTIONS list by hand.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_stan_builtins.py b/pygments/lexers/_stan_builtins.py
index e95f5b1e..eaaec9cc 100644
--- a/pygments/lexers/_stan_builtins.py
+++ b/pygments/lexers/_stan_builtins.py
@@ -6,7 +6,7 @@
This file contains the names of functions for Stan used by
``pygments.lexers.math.StanLexer. This is for Stan language version 2.17.0.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_stata_builtins.py b/pygments/lexers/_stata_builtins.py
index 3f4abdcb..575249ef 100644
--- a/pygments/lexers/_stata_builtins.py
+++ b/pygments/lexers/_stata_builtins.py
@@ -5,7 +5,7 @@
Builtins for Stata
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_tsql_builtins.py b/pygments/lexers/_tsql_builtins.py
index dfc5f618..da184181 100644
--- a/pygments/lexers/_tsql_builtins.py
+++ b/pygments/lexers/_tsql_builtins.py
@@ -5,7 +5,7 @@
These are manually translated lists from https://msdn.microsoft.com.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_usd_builtins.py b/pygments/lexers/_usd_builtins.py
index 0c7316a6..edcbde75 100644
--- a/pygments/lexers/_usd_builtins.py
+++ b/pygments/lexers/_usd_builtins.py
@@ -1,7 +1,13 @@
-#!/usr/bin/env python
# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._usd_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-"""A collection of known USD-related keywords, attributes, and types."""
+ A collection of known USD-related keywords, attributes, and types.
+
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
COMMON_ATTRIBUTES = [
"extent",
diff --git a/pygments/lexers/_vbscript_builtins.py b/pygments/lexers/_vbscript_builtins.py
index e7520072..0fe55b1d 100644
--- a/pygments/lexers/_vbscript_builtins.py
+++ b/pygments/lexers/_vbscript_builtins.py
@@ -6,7 +6,7 @@
These are manually translated lists from
http://www.indusoft.com/pdf/VBScript%20Reference.pdf.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/_vim_builtins.py b/pygments/lexers/_vim_builtins.py
index 39c9ed19..3ee1e854 100644
--- a/pygments/lexers/_vim_builtins.py
+++ b/pygments/lexers/_vim_builtins.py
@@ -5,7 +5,7 @@
This file is autogenerated by scripts/get_vimkw.py
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/actionscript.py b/pygments/lexers/actionscript.py
index f4b4964e..e0ef3516 100644
--- a/pygments/lexers/actionscript.py
+++ b/pygments/lexers/actionscript.py
@@ -5,7 +5,7 @@
Lexers for ActionScript and MXML.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
index 0e726339..e1b8a11b 100644
--- a/pygments/lexers/agile.py
+++ b/pygments/lexers/agile.py
@@ -5,7 +5,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/algebra.py b/pygments/lexers/algebra.py
index aa1dea77..eb363a08 100644
--- a/pygments/lexers/algebra.py
+++ b/pygments/lexers/algebra.py
@@ -5,7 +5,7 @@
Lexers for computer algebra systems.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/ambient.py b/pygments/lexers/ambient.py
index 7d42d12a..82454829 100644
--- a/pygments/lexers/ambient.py
+++ b/pygments/lexers/ambient.py
@@ -5,7 +5,7 @@
Lexers for AmbientTalk language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/ampl.py b/pygments/lexers/ampl.py
index 21e7847f..652a9419 100644
--- a/pygments/lexers/ampl.py
+++ b/pygments/lexers/ampl.py
@@ -5,7 +5,7 @@
Lexers for the AMPL language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/apl.py b/pygments/lexers/apl.py
index 280b6324..68c4ffe1 100644
--- a/pygments/lexers/apl.py
+++ b/pygments/lexers/apl.py
@@ -5,7 +5,7 @@
Lexers for APL.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -77,8 +77,8 @@ class APLLexer(RegexLexer):
#
# Operators
# ==========
- (u'[\\.\\\\\\/⌿⍀¨⍣⍨⍠⍤∘]', Name.Attribute), # closest token type
- (u'[+\\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]',
+ (u'[\\.\\\\\\/⌿⍀¨⍣⍨⍠⍤∘⌸&⌶@⌺⍥⍛⍢]', Name.Attribute), # closest token type
+ (u'[+\\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗⊆⊇⍸√⌾…⍮]',
Operator),
#
# Constant
diff --git a/pygments/lexers/archetype.py b/pygments/lexers/archetype.py
index 68ec5c04..bca9cbbb 100644
--- a/pygments/lexers/archetype.py
+++ b/pygments/lexers/archetype.py
@@ -14,7 +14,7 @@
Contributed by Thomas Beale <https://github.com/wolandscat>,
<https://bitbucket.org/thomas_beale>.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/arrow.py b/pygments/lexers/arrow.py
index b1491c62..452a4164 100644
--- a/pygments/lexers/arrow.py
+++ b/pygments/lexers/arrow.py
@@ -1,16 +1,17 @@
# -*- coding: utf-8 -*-
"""
pygments.lexers.arrow
- ~~~~~~~~~~~~~~~~~~~
+ ~~~~~~~~~~~~~~~~~~~~~
Lexer for Arrow.
- :copyright: Copyright 2020 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-from pygments.lexer import RegexLexer, bygroups, include
+
+from pygments.lexer import RegexLexer, bygroups, default, include
from pygments.token import Text, Operator, Keyword, Punctuation, Name, \
- Whitespace, String, Number
+ String, Number
__all__ = ['ArrowLexer']
@@ -18,6 +19,7 @@ TYPES = r'\b(int|bool|char)((?:\[\])*)(?=\s+)'
IDENT = r'([a-zA-Z_][a-zA-Z0-9_]*)'
DECL = TYPES + r'(\s+)' + IDENT
+
class ArrowLexer(RegexLexer):
"""
Lexer for Arrow: https://pypi.org/project/py-arrow-lang/
@@ -38,9 +40,9 @@ class ArrowLexer(RegexLexer):
include('expressions'),
],
'blocks': [
- (r'(function)(\n+)(/-->)(\s*)'
- + DECL # 4 groups
- + r'(\()', bygroups(
+ (r'(function)(\n+)(/-->)(\s*)' +
+ DECL + # 4 groups
+ r'(\()', bygroups(
Keyword.Reserved, Text, Punctuation,
Text, Keyword.Type, Punctuation, Text,
Name.Function, Punctuation
@@ -60,7 +62,7 @@ class ArrowLexer(RegexLexer):
(r'true|false', Keyword.Constant),
(r"'", String.Char, 'char'),
(r'"', String.Double, 'string'),
- (r'{', Punctuation, 'array'),
+ (r'\{', Punctuation, 'array'),
(r'==|!=|<|>|\+|-|\*|/|%', Operator),
(r'and|or|not|length', Operator.Word),
(r'(input)(\s+)(int|char\[\])', bygroups(
@@ -77,7 +79,7 @@ class ArrowLexer(RegexLexer):
'print': [
include('expressions'),
(r',', Punctuation),
- (r'', Text, '#pop'),
+ default('#pop'),
],
'fparams': [
(DECL, bygroups(Keyword.Type, Punctuation, Text, Name.Variable)),
diff --git a/pygments/lexers/asm.py b/pygments/lexers/asm.py
index 89c758c8..354c80c8 100644
--- a/pygments/lexers/asm.py
+++ b/pygments/lexers/asm.py
@@ -5,7 +5,7 @@
Lexers for assembly languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -37,7 +37,7 @@ class GasLexer(RegexLexer):
string = r'"(\\"|[^"])*"'
char = r'[\w$.@-]'
identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)'
- number = r'(?:0[xX][a-zA-Z0-9]+|\d+)'
+ number = r'(?:0[xX][a-fA-F0-9]+|#?-?\d+)'
register = '%' + identifier
tokens = {
@@ -178,7 +178,7 @@ class DObjdumpLexer(DelegatingLexer):
mimetypes = ['text/x-d-objdump']
def __init__(self, **options):
- super(DObjdumpLexer, self).__init__(DLexer, ObjdumpLexer, **options)
+ super().__init__(DLexer, ObjdumpLexer, **options)
class CppObjdumpLexer(DelegatingLexer):
@@ -191,7 +191,7 @@ class CppObjdumpLexer(DelegatingLexer):
mimetypes = ['text/x-cpp-objdump']
def __init__(self, **options):
- super(CppObjdumpLexer, self).__init__(CppLexer, ObjdumpLexer, **options)
+ super().__init__(CppLexer, ObjdumpLexer, **options)
class CObjdumpLexer(DelegatingLexer):
@@ -204,7 +204,7 @@ class CObjdumpLexer(DelegatingLexer):
mimetypes = ['text/x-c-objdump']
def __init__(self, **options):
- super(CObjdumpLexer, self).__init__(CLexer, ObjdumpLexer, **options)
+ super().__init__(CLexer, ObjdumpLexer, **options)
class HsailLexer(RegexLexer):
@@ -453,6 +453,7 @@ class LlvmLexer(RegexLexer):
]
}
+
class LlvmMirBodyLexer(RegexLexer):
"""
For LLVM MIR examples without the YAML wrapper.
@@ -471,19 +472,19 @@ class LlvmMirBodyLexer(RegexLexer):
# Attributes on basic blocks
(words(('liveins', 'successors'), suffix=':'), Keyword),
# Basic Block Labels
- (r'bb\.[0-9]+(\.[0-9a-zA-Z_.-]+)?( \(address-taken\))?:', Name.Label),
- (r'bb\.[0-9]+ \(%[0-9a-zA-Z_.-]+\)( \(address-taken\))?:', Name.Label),
+ (r'bb\.[0-9]+(\.[a-zA-Z0-9_.-]+)?( \(address-taken\))?:', Name.Label),
+ (r'bb\.[0-9]+ \(%[a-zA-Z0-9_.-]+\)( \(address-taken\))?:', Name.Label),
(r'%bb\.[0-9]+(\.\w+)?', Name.Label),
# Stack references
(r'%stack\.[0-9]+(\.\w+\.addr)?', Name),
# Subreg indices
(r'%subreg\.\w+', Name),
# Virtual registers
- (r'%[0-9a-zA-Z_]+ *', Name.Variable, 'vreg'),
+ (r'%[a-zA-Z0-9_]+ *', Name.Variable, 'vreg'),
# Reference to LLVM-IR global
include('global'),
# Reference to Intrinsic
- (r'intrinsic\(\@[0-9a-zA-Z_.]+\)', Name.Variable.Global),
+ (r'intrinsic\(\@[a-zA-Z0-9_.]+\)', Name.Variable.Global),
# Comparison predicates
(words(('eq', 'ne', 'sgt', 'sge', 'slt', 'sle', 'ugt', 'uge', 'ult',
'ule'), prefix=r'intpred\(', suffix=r'\)'), Name.Builtin),
@@ -493,7 +494,7 @@ class LlvmMirBodyLexer(RegexLexer):
# Physical registers
(r'\$\w+', String.Single),
# Assignment operator
- (r'[=]', Operator),
+ (r'=', Operator),
# gMIR Opcodes
(r'(G_ANYEXT|G_[SZ]EXT|G_SEXT_INREG|G_TRUNC|G_IMPLICIT_DEF|G_PHI|'
r'G_FRAME_INDEX|G_GLOBAL_VALUE|G_INTTOPTR|G_PTRTOINT|G_BITCAST|'
@@ -526,7 +527,7 @@ class LlvmMirBodyLexer(RegexLexer):
# Flags
(words(('killed', 'implicit')), Keyword),
# ConstantInt values
- (r'[i][0-9]+ +', Keyword.Type, 'constantint'),
+ (r'i[0-9]+ +', Keyword.Type, 'constantint'),
# ConstantFloat values
(r'(half|float|double) +', Keyword.Type, 'constantfloat'),
# Bare immediates
@@ -536,7 +537,7 @@ class LlvmMirBodyLexer(RegexLexer):
# MIR Comments
(r';.*', Comment),
# If we get here, assume it's a target instruction
- (r'[0-9a-zA-Z_]+', Name),
+ (r'[a-zA-Z0-9_]+', Name),
# Everything else that isn't highlighted
(r'[(), \n]+', Text),
],
@@ -560,7 +561,7 @@ class LlvmMirBodyLexer(RegexLexer):
'vreg_bank_or_class': [
# The unassigned bank/class
(r' *_', Name.Variable.Magic),
- (r' *[0-9a-zA-Z_]+', Name.Variable),
+ (r' *[a-zA-Z0-9_]+', Name.Variable),
# The LLT if there is one
(r' *\(', Text, 'vreg_type'),
(r'(?=.)', Text, '#pop'),
@@ -579,8 +580,8 @@ class LlvmMirBodyLexer(RegexLexer):
'acquire', 'release', 'acq_rel', 'seq_cst')),
Keyword),
# IR references
- (r'%ir\.[0-9a-zA-Z_.-]+', Name),
- (r'%ir-block\.[0-9a-zA-Z_.-]+', Name),
+ (r'%ir\.[a-zA-Z0-9_.-]+', Name),
+ (r'%ir-block\.[a-zA-Z0-9_.-]+', Name),
(r'[-+]', Operator),
include('integer'),
include('global'),
@@ -590,9 +591,10 @@ class LlvmMirBodyLexer(RegexLexer):
],
'integer': [(r'-?[0-9]+', Number.Integer),],
'float': [(r'-?[0-9]+\.[0-9]+(e[+-][0-9]+)?', Number.Float)],
- 'global': [(r'\@[0-9a-zA-Z_.]+', Name.Variable.Global)],
+ 'global': [(r'\@[a-zA-Z0-9_.]+', Name.Variable.Global)],
}
+
class LlvmMirLexer(RegexLexer):
"""
Lexer for the overall LLVM MIR document format.
@@ -649,9 +651,18 @@ class LlvmMirLexer(RegexLexer):
(r'.+', Text),
(r'\n', Text),
],
- 'name': [ (r'[^\n]+', Name), default('#pop') ],
- 'boolean': [ (r' *(true|false)', Name.Builtin), default('#pop') ],
- 'number': [ (r' *[0-9]+', Number), default('#pop') ],
+ 'name': [
+ (r'[^\n]+', Name),
+ default('#pop'),
+ ],
+ 'boolean': [
+ (r' *(true|false)', Name.Builtin),
+ default('#pop'),
+ ],
+ 'number': [
+ (r' *[0-9]+', Number),
+ default('#pop'),
+ ],
'llvm_mir_body': [
# Documents end with '...' or '---'.
# We have to pop llvm_mir_body and llvm_mir
@@ -660,7 +671,7 @@ class LlvmMirLexer(RegexLexer):
(r'((?:.|\n)+?)(?=\.\.\.|---)', bygroups(using(LlvmMirBodyLexer))),
# The '...' is optional. If we didn't already find it then it isn't
# there. There might be a '---' instead though.
- (r'(?!\.\.\.|---)((.|\n)+)', bygroups(using(LlvmMirBodyLexer), Keyword)),
+ (r'(?!\.\.\.|---)((?:.|\n)+)', bygroups(using(LlvmMirBodyLexer))),
],
}
@@ -924,7 +935,7 @@ class Dasm16Lexer(RegexLexer):
]
# Regexes yo
- char = r'[a-zA-Z$._0-9@]'
+ char = r'[a-zA-Z0-9_$@.]'
identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)'
number = r'[+-]?(?:0[xX][a-zA-Z0-9]+|\d+)'
binary_number = r'0b[01_]+'
diff --git a/pygments/lexers/automation.py b/pygments/lexers/automation.py
index 5f27b6c6..786f63fb 100644
--- a/pygments/lexers/automation.py
+++ b/pygments/lexers/automation.py
@@ -5,7 +5,7 @@
Lexers for automation scripting languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/bare.py b/pygments/lexers/bare.py
new file mode 100644
index 00000000..d63a13e8
--- /dev/null
+++ b/pygments/lexers/bare.py
@@ -0,0 +1,104 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.bare
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the BARE schema.
+
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, words, bygroups
+from pygments.token import Text, Comment, Keyword, Name, Literal
+
+__all__ = ['BareLexer']
+
+
+class BareLexer(RegexLexer):
+ """
+ For `BARE schema <https://baremessages.org>`_ schema source.
+
+ .. versionadded:: 2.7
+ """
+ name = 'BARE'
+ filenames = ['*.bare']
+ aliases = ['bare']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ keywords = [
+ 'type',
+ 'enum',
+ 'u8',
+ 'u16',
+ 'u32',
+ 'u64',
+ 'uint',
+ 'i8',
+ 'i16',
+ 'i32',
+ 'i64',
+ 'int',
+ 'f32',
+ 'f64',
+ 'bool',
+ 'void',
+ 'data',
+ 'string',
+ 'optional',
+ 'map',
+ ]
+
+ tokens = {
+ 'root': [
+ (r'(type)(\s+)([A-Z][a-zA-Z0-9]+)(\s+\{)',
+ bygroups(Keyword, Text, Name.Class, Text), 'struct'),
+ (r'(type)(\s+)([A-Z][a-zA-Z0-9]+)(\s+\()',
+ bygroups(Keyword, Text, Name.Class, Text), 'union'),
+ (r'(type)(\s+)([A-Z][a-zA-Z0-9]+)(\s+)',
+ bygroups(Keyword, Text, Name, Text), 'typedef'),
+ (r'(enum)(\s+)([A-Z][a-zA-Z0-9]+)(\s+\{)',
+ bygroups(Keyword, Text, Name.Class, Text), 'enum'),
+ (r'#.*?$', Comment),
+ (r'\s+', Text),
+ ],
+ 'struct': [
+ (r'\{', Text, '#push'),
+ (r'\}', Text, '#pop'),
+ (r'([a-zA-Z0-9]+)(:\s*)', bygroups(Name.Attribute, Text), 'typedef'),
+ (r'\s+', Text),
+ ],
+ 'union': [
+ (r'\)', Text, '#pop'),
+ (r'\s*\|\s*', Text),
+ (r'[A-Z][a-zA-Z0-9]+', Name.Class),
+ (words(keywords), Keyword),
+ (r'\s+', Text),
+ ],
+ 'typedef': [
+ (r'\[\]', Text),
+ (r'#.*?$', Comment, '#pop'),
+ (r'(\[)(\d+)(\])', bygroups(Text, Literal, Text)),
+ (r'<|>', Text),
+ (r'\(', Text, 'union'),
+ (r'(\[)([a-z][a-z-A-Z0-9]+)(\])', bygroups(Text, Keyword, Text)),
+ (r'(\[)([A-Z][a-z-A-Z0-9]+)(\])', bygroups(Text, Name.Class, Text)),
+ (r'([A-Z][a-z-A-Z0-9]+)', Name.Class),
+ (words(keywords), Keyword),
+ (r'\n', Text, '#pop'),
+ (r'\{', Text, 'struct'),
+ (r'\s+', Text),
+ (r'\d+', Literal),
+ ],
+ 'enum': [
+ (r'\{', Text, '#push'),
+ (r'\}', Text, '#pop'),
+ (r'([A-Z][A-Z0-9_]*)(\s*=\s*)(\d+)', bygroups(Name.Attribute, Text, Literal)),
+ (r'([A-Z][A-Z0-9_]*)', bygroups(Name.Attribute)),
+ (r'#.*?$', Comment),
+ (r'\s+', Text),
+ ],
+ }
diff --git a/pygments/lexers/basic.py b/pygments/lexers/basic.py
index 372c8229..0e46f23b 100644
--- a/pygments/lexers/basic.py
+++ b/pygments/lexers/basic.py
@@ -5,7 +5,7 @@
Lexers for BASIC like languages (other than VB.net).
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,7 +21,6 @@ __all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer',
'QBasicLexer', 'VBScriptLexer', 'BBCBasicLexer']
-
class BlitzMaxLexer(RegexLexer):
"""
For `BlitzMax <http://blitzbasic.com>`_ source code.
@@ -524,15 +523,18 @@ class VBScriptLexer(RegexLexer):
(r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float),
(r'\.[0-9]+(e[+-]?[0-9]+)?', Number.Float), # Float variant 2, for example: .1, .1e2
(r'[0-9]+e[+-]?[0-9]+', Number.Float), # Float variant 3, for example: 123e45
- (r'\d+', Number.Integer),
+ (r'[0-9]+', Number.Integer),
('#.+#', String), # date or time value
(r'(dim)(\s+)([a-z_][a-z0-9_]*)',
bygroups(Keyword.Declaration, Whitespace, Name.Variable), 'dim_more'),
(r'(function|sub)(\s+)([a-z_][a-z0-9_]*)',
bygroups(Keyword.Declaration, Whitespace, Name.Function)),
- (r'(class)(\s+)([a-z_][a-z0-9_]*)', bygroups(Keyword.Declaration, Whitespace, Name.Class)),
- (r'(const)(\s+)([a-z_][a-z0-9_]*)', bygroups(Keyword.Declaration, Whitespace, Name.Constant)),
- (r'(end)(\s+)(class|function|if|property|sub|with)', bygroups(Keyword, Whitespace, Keyword)),
+ (r'(class)(\s+)([a-z_][a-z0-9_]*)',
+ bygroups(Keyword.Declaration, Whitespace, Name.Class)),
+ (r'(const)(\s+)([a-z_][a-z0-9_]*)',
+ bygroups(Keyword.Declaration, Whitespace, Name.Constant)),
+ (r'(end)(\s+)(class|function|if|property|sub|with)',
+ bygroups(Keyword, Whitespace, Keyword)),
(r'(on)(\s+)(error)(\s+)(goto)(\s+)(0)',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword, Whitespace, Number.Integer)),
(r'(on)(\s+)(error)(\s+)(resume)(\s+)(next)',
@@ -553,7 +555,8 @@ class VBScriptLexer(RegexLexer):
(r'.+(\n)?', Error)
],
'dim_more': [
- (r'(\s*)(,)(\s*)([a-z_][a-z0-9]*)', bygroups(Whitespace, Punctuation, Whitespace, Name.Variable)),
+ (r'(\s*)(,)(\s*)([a-z_][a-z0-9]*)',
+ bygroups(Whitespace, Punctuation, Whitespace, Name.Variable)),
default('#pop'),
],
'string': [
@@ -609,7 +612,7 @@ class BBCBasicLexer(RegexLexer):
(r"[0-9]+", Name.Label),
(r"(\*)([^\n]*)",
bygroups(Keyword.Pseudo, Comment.Special)),
- (r"", Whitespace, 'code'),
+ default('code'),
],
'code': [
diff --git a/pygments/lexers/bibtex.py b/pygments/lexers/bibtex.py
index 6d15c230..e48db1ab 100644
--- a/pygments/lexers/bibtex.py
+++ b/pygments/lexers/bibtex.py
@@ -5,7 +5,7 @@
Lexers for BibTeX bibliography data and styles
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/boa.py b/pygments/lexers/boa.py
index a57c0e4a..bbe9dffa 100644
--- a/pygments/lexers/boa.py
+++ b/pygments/lexers/boa.py
@@ -5,7 +5,7 @@
Lexers for the Boa language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/business.py b/pygments/lexers/business.py
index 5eeaba56..f212fe58 100644
--- a/pygments/lexers/business.py
+++ b/pygments/lexers/business.py
@@ -5,7 +5,7 @@
Lexers for "business-oriented" languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/c_cpp.py b/pygments/lexers/c_cpp.py
index d15c4a54..10809189 100644
--- a/pygments/lexers/c_cpp.py
+++ b/pygments/lexers/c_cpp.py
@@ -5,7 +5,7 @@
Lexers for C/C++ languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/c_like.py b/pygments/lexers/c_like.py
index ff4e15cc..ab12c733 100644
--- a/pygments/lexers/c_like.py
+++ b/pygments/lexers/c_like.py
@@ -5,7 +5,7 @@
Lexers for other C-like languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/capnproto.py b/pygments/lexers/capnproto.py
index 01524046..0fa21b38 100644
--- a/pygments/lexers/capnproto.py
+++ b/pygments/lexers/capnproto.py
@@ -5,7 +5,7 @@
Lexers for the Cap'n Proto schema language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/chapel.py b/pygments/lexers/chapel.py
index 75f8b159..48842744 100644
--- a/pygments/lexers/chapel.py
+++ b/pygments/lexers/chapel.py
@@ -5,7 +5,7 @@
Lexer for the Chapel language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/clean.py b/pygments/lexers/clean.py
index 59b183b8..b5dba09b 100644
--- a/pygments/lexers/clean.py
+++ b/pygments/lexers/clean.py
@@ -5,11 +5,11 @@
Lexer for the Clean language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-from pygments.lexer import ExtendedRegexLexer, words, include, bygroups
+from pygments.lexer import ExtendedRegexLexer, words, default, include, bygroups
from pygments.token import Comment, Error, Keyword, Literal, Name, Number, \
Operator, Punctuation, String, Whitespace
@@ -35,9 +35,9 @@ class CleanLexer(ExtendedRegexLexer):
modulewords = ('implementation', 'definition', 'system')
- lowerId = r'[a-z`][\w\d`]*'
- upperId = r'[A-Z`][\w\d`]*'
- funnyId = r'[~@#\$%\^?!+\-*<>\\/|&=:]+'
+ lowerId = r'[a-z`][\w`]*'
+ upperId = r'[A-Z`][\w`]*'
+ funnyId = r'[~@#$%\^?!+\-*<>\\/|&=:]+'
scoreUpperId = r'_' + upperId
scoreLowerId = r'_' + lowerId
moduleId = r'[a-zA-Z_][a-zA-Z0-9_.`]+'
@@ -92,7 +92,8 @@ class CleanLexer(ExtendedRegexLexer):
(r'(\s*)\b(as)\b', bygroups(Whitespace, Keyword), ('#pop', 'import.module.as')),
(moduleId, Name.Class),
(r'(\s*)(,)(\s*)', bygroups(Whitespace, Punctuation, Whitespace)),
- (r'\s*', Whitespace, '#pop'),
+ (r'\s+', Whitespace),
+ default('#pop'),
],
'import.module.as': [
include('whitespace'),
@@ -160,7 +161,7 @@ class CleanLexer(ExtendedRegexLexer):
(r'[$\n]', Error, '#pop'),
],
'operators': [
- (r'[-~@#\$%\^?!+*<>\\/|&=:\.]+', Operator),
+ (r'[-~@#$%\^?!+*<>\\/|&=:.]+', Operator),
(r'\b_+\b', Operator),
],
'delimiters': [
diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py
index 0dab602e..21f45e57 100644
--- a/pygments/lexers/compiled.py
+++ b/pygments/lexers/compiled.py
@@ -5,7 +5,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py
index c956e478..3d291d5c 100644
--- a/pygments/lexers/configs.py
+++ b/pygments/lexers/configs.py
@@ -5,7 +5,7 @@
Lexers for configuration file formats.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -319,7 +319,7 @@ class ApacheConfLexer(RegexLexer):
r'os|productonly|full|emerg|alert|crit|error|warn|'
r'notice|info|debug|registry|script|inetd|standalone|'
r'user|group)\b', Keyword),
- (r'"([^"\\]*(?:\\(.|[\n])[^"\\]*)*)"', String.Double),
+ (r'"([^"\\]*(?:\\(.|\n)[^"\\]*)*)"', String.Double),
(r'[^\s"\\]+', Text)
],
}
@@ -953,7 +953,7 @@ class SingularityLexer(RegexLexer):
filenames = ['*.def', 'Singularity']
flags = re.IGNORECASE | re.MULTILINE | re.DOTALL
- _headers = r'^(\s)*(bootstrap|from|osversion|mirrorurl|include|registry|namespace|includecmd)(:)'
+ _headers = r'^(\s*)(bootstrap|from|osversion|mirrorurl|include|registry|namespace|includecmd)(:)'
_section = r'^%(?:pre|post|setup|environment|help|labels|test|runscript|files|startscript)\b'
_appsect = r'^%app(?:install|help|run|labels|env|test|files)\b'
diff --git a/pygments/lexers/console.py b/pygments/lexers/console.py
index ab93b7b8..07639ee4 100644
--- a/pygments/lexers/console.py
+++ b/pygments/lexers/console.py
@@ -5,7 +5,7 @@
Lexers for misc console output.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/crystal.py b/pygments/lexers/crystal.py
index 1fa7270c..cf051536 100644
--- a/pygments/lexers/crystal.py
+++ b/pygments/lexers/crystal.py
@@ -5,7 +5,7 @@
Lexer for Crystal.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -58,8 +58,7 @@ class CrystalLexer(ExtendedRegexLexer):
ctx.pos = match.start(5)
ctx.end = match.end(5)
# this may find other heredocs
- for i, t, v in self.get_tokens_unprocessed(context=ctx):
- yield i, t, v
+ yield from self.get_tokens_unprocessed(context=ctx)
ctx.pos = match.end()
if outermost:
@@ -135,7 +134,7 @@ class CrystalLexer(ExtendedRegexLexer):
('\\(', '\\)', '()', 'pa'), \
('<', '>', '<>', 'ab'):
states[name+'-intp-string'] = [
- (r'\\[' + lbrace + ']', String.Other),
+ (r'\\' + lbrace, String.Other),
(lbrace, String.Other, '#push'),
(rbrace, String.Other, '#pop'),
include('string-intp-escaped'),
diff --git a/pygments/lexers/csound.py b/pygments/lexers/csound.py
index e0d9ea9f..831f0f71 100644
--- a/pygments/lexers/csound.py
+++ b/pygments/lexers/csound.py
@@ -5,7 +5,7 @@
Lexers for Csound languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -149,7 +149,7 @@ class CsoundScoreLexer(CsoundLexer):
include('whitespace and macro uses'),
include('preprocessor directives'),
- (r'[abCdefiqstvxy]', Keyword),
+ (r'[aBbCdefiqstvxy]', Keyword),
# There is also a w statement that is generated internally and should not be
# used; see https://github.com/csound/csound/issues/750.
@@ -241,7 +241,7 @@ class CsoundOrchestraLexer(CsoundLexer):
'root': [
(r'\n', Text),
- (r'^([ \t]*)(\w+)(:)(?:[ \t]+|$)', bygroups(Text, Name.Label, Punctuation)),
+ (r'^([ \t]*)(\w+)(:)([ \t]+|$)', bygroups(Text, Name.Label, Punctuation, Text)),
include('whitespace and macro uses'),
include('preprocessor directives'),
@@ -339,13 +339,15 @@ class CsoundOrchestraLexer(CsoundLexer):
# sprintfk https://csound.com/docs/manual/sprintfk.html
# work with strings that contain format specifiers. In addition, these opcodes’
# handling of format specifiers is inconsistent:
- # - fprintks, fprints, printks, and prints do accept %a and %A
- # specifiers, but can’t accept %s specifiers.
- # - printf, printf_i, sprintf, and sprintfk don’t accept %a and %A
- # specifiers, but can accept %s specifiers.
+ # - fprintks and fprints accept %a and %A specifiers, and accept %s specifiers
+ # starting in Csound 6.15.0.
+ # - printks and prints accept %a and %A specifiers, but don’t accept %s
+ # specifiers.
+ # - printf, printf_i, sprintf, and sprintfk don’t accept %a and %A specifiers,
+ # but accept %s specifiers.
# See https://github.com/csound/csound/issues/747 for more information.
'format specifiers': [
- (r'%[#0\- +]*\d*(?:\.\d+)?[diuoxXfFeEgGaAcs]', String.Interpol),
+ (r'%[#0\- +]*\d*(?:\.\d+)?[AE-GXac-giosux]', String.Interpol),
(r'%%', String.Escape)
],
diff --git a/pygments/lexers/css.py b/pygments/lexers/css.py
index 6209be0c..0bc7e159 100644
--- a/pygments/lexers/css.py
+++ b/pygments/lexers/css.py
@@ -5,7 +5,7 @@
Lexers for CSS and related stylesheet formats.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -358,7 +358,7 @@ class CssLexer(RegexLexer):
(r'/\*(?:.|\n)*?\*/', Comment),
include('numeric-values'),
(r'[*+/-]', Operator),
- (r'[,]', Punctuation),
+ (r',', Punctuation),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'[a-zA-Z_-]\w*', Name),
@@ -396,7 +396,7 @@ common_sass_tokens = {
'behind', 'below', 'bidi-override', 'blink', 'block', 'bold', 'bolder', 'both',
'capitalize', 'center-left', 'center-right', 'center', 'circle',
'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous',
- 'crop', 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
+ 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed',
'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left',
'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help',
diff --git a/pygments/lexers/d.py b/pygments/lexers/d.py
index d76205da..1fd60d0f 100644
--- a/pygments/lexers/d.py
+++ b/pygments/lexers/d.py
@@ -5,7 +5,7 @@
Lexers for D languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/dalvik.py b/pygments/lexers/dalvik.py
index 0e39bb96..26d2ae3a 100644
--- a/pygments/lexers/dalvik.py
+++ b/pygments/lexers/dalvik.py
@@ -5,7 +5,7 @@
Pygments lexers for Dalvik VM-related languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/data.py b/pygments/lexers/data.py
index f8f01162..937dfda1 100644
--- a/pygments/lexers/data.py
+++ b/pygments/lexers/data.py
@@ -5,7 +5,7 @@
Lexers for data file format.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,7 +23,7 @@ class YamlLexerContext(LexerContext):
"""Indentation context for the YAML lexer."""
def __init__(self, *args, **kwds):
- super(YamlLexerContext, self).__init__(*args, **kwds)
+ super().__init__(*args, **kwds)
self.indent_stack = []
self.indent = -1
self.next_indent = 0
@@ -433,7 +433,7 @@ class YamlLexer(ExtendedRegexLexer):
def get_tokens_unprocessed(self, text=None, context=None):
if context is None:
context = YamlLexerContext(text, 0)
- return super(YamlLexer, self).get_tokens_unprocessed(text, context)
+ return super().get_tokens_unprocessed(text, context)
class JsonLexer(RegexLexer):
@@ -471,7 +471,7 @@ class JsonLexer(RegexLexer):
'%(exp_part)s|%(frac_part)s)') % vars(),
Number.Float),
(int_part, Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r'"(\\(["\\/bfnrt]|u[a-fA-F0-9]]{4})|[^\\"])*"', String.Double),
],
@@ -488,7 +488,7 @@ class JsonLexer(RegexLexer):
# a json object - { attr, attr, ... }
'objectvalue': [
include('whitespace'),
- (r'"(\\\\|\\"|[^"])*"', Name.Tag, 'objectattribute'),
+ (r'"(\\(["\\/bfnrt]|u[a-fA-F0-9]]{4})|[^\\"])*"', Name.Tag, 'objectattribute'),
(r'\}', Punctuation, '#pop'),
],
diff --git a/pygments/lexers/devicetree.py b/pygments/lexers/devicetree.py
index a5b915c6..50fa79c2 100644
--- a/pygments/lexers/devicetree.py
+++ b/pygments/lexers/devicetree.py
@@ -1,21 +1,21 @@
# -*- coding: utf-8 -*-
"""
pygments.lexers.devicetree
- ~~~~~~~~~~~~~~~~~~~
-
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
Lexers for Devicetree language.
- :copyright: Copyright 2019-2020 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-
from pygments.lexer import RegexLexer, bygroups, include, default, words
-from pygments.token import *
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
+ Punctuation, String, Text
__all__ = ['DevicetreeLexer']
+
class DevicetreeLexer(RegexLexer):
"""
Lexer for `Devicetree <https://www.devicetree.org/>`_ files.
@@ -32,21 +32,21 @@ class DevicetreeLexer(RegexLexer):
_ws = r'\s*(?:/[*][^*/]*?[*]/\s*)*'
tokens = {
- 'macro': [
- # Include preprocessor directives (C style):
- (r'(#include)(' + _ws + r')([^\n]+)',
- bygroups(Comment.Preproc, Comment.Multiline, Comment.PreprocFile)),
- # Define preprocessor directives (C style):
- (r'(#define)(' + _ws + r')([^\n]+)',
- bygroups(Comment.Preproc, Comment.Multiline, Comment.Preproc)),
- #devicetree style with file:
- (r'(/[^*/\{]+/)(' + _ws + r')("[^\n\{]+")',
- bygroups(Comment.Preproc, Comment.Multiline, Comment.PreprocFile)),
- #devicetree style with property:
- (r'(/[^*/\{]+/)(' + _ws + r')([^\n;\{]*)([;]?)',
- bygroups(Comment.Preproc, Comment.Multiline, Comment.Preproc, Punctuation)),
+ 'macro': [
+ # Include preprocessor directives (C style):
+ (r'(#include)(' + _ws + r')([^\n]+)',
+ bygroups(Comment.Preproc, Comment.Multiline, Comment.PreprocFile)),
+ # Define preprocessor directives (C style):
+ (r'(#define)(' + _ws + r')([^\n]+)',
+ bygroups(Comment.Preproc, Comment.Multiline, Comment.Preproc)),
+ # devicetree style with file:
+ (r'(/[^*/{]+/)(' + _ws + r')("[^\n{]+")',
+ bygroups(Comment.Preproc, Comment.Multiline, Comment.PreprocFile)),
+ # devicetree style with property:
+ (r'(/[^*/{]+/)(' + _ws + r')([^\n;{]*)([;]?)',
+ bygroups(Comment.Preproc, Comment.Multiline, Comment.Preproc, Punctuation)),
],
- 'whitespace': [
+ 'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
@@ -59,28 +59,28 @@ class DevicetreeLexer(RegexLexer):
(r'(L?)(")', bygroups(String.Affix, String), 'string'),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'\d+', Number.Integer),
- (r'([^\n\s{}/*]*)(\s*)(:)', bygroups(Name.Label, Text, Punctuation)),
- (words(('compatible', 'model', 'phandle', 'status', '#address-cells',
- '#size-cells', 'reg', 'virtual-reg', 'ranges', 'dma-ranges',
- 'device_type', 'name'), suffix=r'\b'), Keyword.Reserved),
- (r'([~!%^&*+=|?:<>/#-])', Operator),
+ (r'([^\s{}/*]*)(\s*)(:)', bygroups(Name.Label, Text, Punctuation)),
+ (words(('compatible', 'model', 'phandle', 'status', '#address-cells',
+ '#size-cells', 'reg', 'virtual-reg', 'ranges', 'dma-ranges',
+ 'device_type', 'name'), suffix=r'\b'), Keyword.Reserved),
+ (r'([~!%^&*+=|?:<>/#-])', Operator),
(r'[()\[\]{},.]', Punctuation),
- (r'[a-zA-Z_][\w-]*(?=(?:\s*,\s*[a-zA-Z_][\w-]*|(?:' + _ws + r'))*\s*[=;])', Name),
+ (r'[a-zA-Z_][\w-]*(?=(?:\s*,\s*[a-zA-Z_][\w-]*|(?:' + _ws + r'))*\s*[=;])',
+ Name),
(r'[a-zA-Z_]\w*', Name.Attribute),
],
- 'root': [
-
- include('whitespace'),
- include('macro'),
-
- # Nodes
- (r'([^/*@\n\s&]+|/)(@?)([0-9a-fA-F,]*)(' + _ws + r')(\{)',
- bygroups( Name.Function, Operator, Number.Integer,
+ 'root': [
+ include('whitespace'),
+ include('macro'),
+
+ # Nodes
+ (r'([^/*@\s&]+|/)(@?)([0-9a-fA-F,]*)(' + _ws + r')(\{)',
+ bygroups(Name.Function, Operator, Number.Integer,
Comment.Multiline, Punctuation), 'node'),
- default('statement'),
- ],
- 'statement': [
+ default('statement'),
+ ],
+ 'statement': [
include('whitespace'),
include('statements'),
(';', Punctuation, '#pop'),
@@ -88,10 +88,10 @@ class DevicetreeLexer(RegexLexer):
'node': [
include('whitespace'),
include('macro'),
-
- (r'([^/*@\n\s&]+|/)(@?)([0-9a-fA-F,]*)(' + _ws + r')(\{)',
- bygroups(Name.Function, Operator, Number.Integer,
- Comment.Multiline, Punctuation), '#push'),
+
+ (r'([^/*@\s&]+|/)(@?)([0-9a-fA-F,]*)(' + _ws + r')(\{)',
+ bygroups(Name.Function, Operator, Number.Integer,
+ Comment.Multiline, Punctuation), '#push'),
include('statements'),
@@ -106,5 +106,4 @@ class DevicetreeLexer(RegexLexer):
(r'\\\n', String), # line continuation
(r'\\', String), # stray backslash
],
-
}
diff --git a/pygments/lexers/diff.py b/pygments/lexers/diff.py
index bdb6d77b..3a775ca0 100644
--- a/pygments/lexers/diff.py
+++ b/pygments/lexers/diff.py
@@ -5,7 +5,7 @@
Lexers for diff/patch formats.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
index 83946054..c8635524 100644
--- a/pygments/lexers/dotnet.py
+++ b/pygments/lexers/dotnet.py
@@ -5,7 +5,7 @@
Lexers for .net languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
@@ -507,8 +507,7 @@ class CSharpAspxLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super(CSharpAspxLexer, self).__init__(CSharpLexer, GenericAspxLexer,
- **options)
+ super().__init__(CSharpLexer, GenericAspxLexer, **options)
def analyse_text(text):
if re.search(r'Page\s*Language="C#"', text, re.I) is not None:
@@ -528,8 +527,7 @@ class VbNetAspxLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super(VbNetAspxLexer, self).__init__(VbNetLexer, GenericAspxLexer,
- **options)
+ super().__init__(VbNetLexer, GenericAspxLexer, **options)
def analyse_text(text):
if re.search(r'Page\s*Language="Vb"', text, re.I) is not None:
diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py
index 7174c16e..a6a5e3b4 100644
--- a/pygments/lexers/dsls.py
+++ b/pygments/lexers/dsls.py
@@ -5,7 +5,7 @@
Lexers for various domain-specific languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -14,7 +14,7 @@ import re
from pygments.lexer import ExtendedRegexLexer, RegexLexer, bygroups, words, \
include, default, this, using, combined
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal, Whitespace
+ Number, Punctuation, Whitespace
__all__ = ['ProtoBufLexer', 'ZeekLexer', 'PuppetLexer', 'RslLexer',
'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer',
@@ -42,7 +42,7 @@ class ProtoBufLexer(RegexLexer):
(words((
'import', 'option', 'optional', 'required', 'repeated',
'reserved', 'default', 'packed', 'ctype', 'extensions', 'to',
- 'max', 'rpc', 'returns', 'oneof'), prefix=r'\b', suffix=r'\b'),
+ 'max', 'rpc', 'returns', 'oneof', 'syntax'), prefix=r'\b', suffix=r'\b'),
Keyword),
(words((
'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
@@ -332,7 +332,7 @@ class ZeekLexer(RegexLexer):
'string': [
(r'\\.', String.Escape),
- (r'%-?[0-9]*(\.[0-9]+)?[DTdxsefg]', String.Escape),
+ (r'%-?[0-9]*(\.[0-9]+)?[DTd-gsx]', String.Escape),
(r'"', String, '#pop'),
(r'.', String),
],
diff --git a/pygments/lexers/dylan.py b/pygments/lexers/dylan.py
index dd972bf4..4c8874f7 100644
--- a/pygments/lexers/dylan.py
+++ b/pygments/lexers/dylan.py
@@ -5,7 +5,7 @@
Lexers for the Dylan language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -277,13 +277,11 @@ class DylanConsoleLexer(Lexer):
curcode += line[end:]
else:
if curcode:
- for item in do_insertions(insertions,
- dylexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(insertions,
+ dylexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
yield match.start(), Generic.Output, line
if curcode:
- for item in do_insertions(insertions,
- dylexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(insertions,
+ dylexer.get_tokens_unprocessed(curcode))
diff --git a/pygments/lexers/ecl.py b/pygments/lexers/ecl.py
index c695c18c..63ae91e8 100644
--- a/pygments/lexers/ecl.py
+++ b/pygments/lexers/ecl.py
@@ -5,7 +5,7 @@
Lexers for the ECL language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/eiffel.py b/pygments/lexers/eiffel.py
index 3f7ce55d..2c75cc46 100644
--- a/pygments/lexers/eiffel.py
+++ b/pygments/lexers/eiffel.py
@@ -5,7 +5,7 @@
Lexer for the Eiffel language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/elm.py b/pygments/lexers/elm.py
index ee941d7d..46c12eda 100644
--- a/pygments/lexers/elm.py
+++ b/pygments/lexers/elm.py
@@ -5,7 +5,7 @@
Lexer for the Elm programming language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -40,7 +40,7 @@ class ElmLexer(RegexLexer):
reservedWords = words((
'alias', 'as', 'case', 'else', 'if', 'import', 'in',
'let', 'module', 'of', 'port', 'then', 'type', 'where',
- ), suffix=r'\b')
+ ), suffix=r'\b')
tokens = {
'root': [
@@ -68,7 +68,7 @@ class ElmLexer(RegexLexer):
(reservedWords, Keyword.Reserved),
# Types
- (r'[A-Z]\w*', Keyword.Type),
+ (r'[A-Z][a-zA-Z0-9_]*', Keyword.Type),
# Main
(specialName, Keyword.Reserved),
diff --git a/pygments/lexers/email.py b/pygments/lexers/email.py
index 5ad225bb..776db88c 100644
--- a/pygments/lexers/email.py
+++ b/pygments/lexers/email.py
@@ -5,7 +5,7 @@
Lexer for the raw E-mail.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -25,7 +25,7 @@ class EmailHeaderLexer(RegexLexer):
"""
def __init__(self, **options):
- super(EmailHeaderLexer, self).__init__(**options)
+ super().__init__(**options)
self.highlight_x = get_bool_opt(options, "highlight-X-header", False)
def get_x_header_tokens(self, match):
@@ -36,8 +36,7 @@ class EmailHeaderLexer(RegexLexer):
# content
default_actions = self.get_tokens_unprocessed(
match.group(2), stack=("root", "header"))
- for item in default_actions:
- yield item
+ yield from default_actions
else:
# lowlight
yield match.start(1), Comment.Special, match.group(1)
@@ -149,6 +148,4 @@ class EmailLexer(DelegatingLexer):
mimetypes = ["message/rfc822"]
def __init__(self, **options):
- super(EmailLexer, self).__init__(
- EmailHeaderLexer, MIMELexer, Comment, **options
- )
+ super().__init__(EmailHeaderLexer, MIMELexer, Comment, **options)
diff --git a/pygments/lexers/erlang.py b/pygments/lexers/erlang.py
index c90bc4fd..fdc83451 100644
--- a/pygments/lexers/erlang.py
+++ b/pygments/lexers/erlang.py
@@ -5,7 +5,7 @@
Lexers for Erlang.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -180,9 +180,8 @@ class ErlangShellLexer(Lexer):
curcode += line[end:]
else:
if curcode:
- for item in do_insertions(insertions,
- erlexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(insertions,
+ erlexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
if line.startswith('*'):
@@ -190,9 +189,8 @@ class ErlangShellLexer(Lexer):
else:
yield match.start(), Generic.Output, line
if curcode:
- for item in do_insertions(insertions,
- erlexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(insertions,
+ erlexer.get_tokens_unprocessed(curcode))
def gen_elixir_string_rules(name, symbol, token):
@@ -207,10 +205,10 @@ def gen_elixir_string_rules(name, symbol, token):
return states
-def gen_elixir_sigstr_rules(term, token, interpol=True):
+def gen_elixir_sigstr_rules(term, term_class, token, interpol=True):
if interpol:
return [
- (r'[^#%s\\]+' % (term,), token),
+ (r'[^#%s\\]+' % (term_class,), token),
include('escapes'),
(r'\\.', token),
(r'%s[a-zA-Z]*' % (term,), token, '#pop'),
@@ -218,7 +216,7 @@ def gen_elixir_sigstr_rules(term, token, interpol=True):
]
else:
return [
- (r'[^%s\\]+' % (term,), token),
+ (r'[^%s\\]+' % (term_class,), token),
(r'\\.', token),
(r'%s[a-zA-Z]*' % (term,), token, '#pop'),
]
@@ -291,14 +289,14 @@ class ElixirLexer(RegexLexer):
def gen_elixir_sigil_rules():
# all valid sigil terminators (excluding heredocs)
terminators = [
- (r'\{', r'\}', 'cb'),
- (r'\[', r'\]', 'sb'),
- (r'\(', r'\)', 'pa'),
- (r'<', r'>', 'ab'),
- (r'/', r'/', 'slas'),
- (r'\|', r'\|', 'pipe'),
- ('"', '"', 'quot'),
- ("'", "'", 'apos'),
+ (r'\{', r'\}', '}', 'cb'),
+ (r'\[', r'\]', r'\]', 'sb'),
+ (r'\(', r'\)', ')', 'pa'),
+ ('<', '>', '>', 'ab'),
+ ('/', '/', '/', 'slas'),
+ (r'\|', r'\|', '|', 'pipe'),
+ ('"', '"', '"', 'quot'),
+ ("'", "'", "'", 'apos'),
]
# heredocs have slightly different rules
@@ -328,14 +326,15 @@ class ElixirLexer(RegexLexer):
include('heredoc_no_interpol'),
]
- for lterm, rterm, name in terminators:
+ for lterm, rterm, rterm_class, name in terminators:
states['sigils'] += [
(r'~[a-z]' + lterm, token, name + '-intp'),
(r'~[A-Z]' + lterm, token, name + '-no-intp'),
]
- states[name + '-intp'] = gen_elixir_sigstr_rules(rterm, token)
+ states[name + '-intp'] = \
+ gen_elixir_sigstr_rules(rterm, rterm_class, token)
states[name + '-no-intp'] = \
- gen_elixir_sigstr_rules(rterm, token, interpol=False)
+ gen_elixir_sigstr_rules(rterm, rterm_class, token, interpol=False)
return states
@@ -520,14 +519,12 @@ class ElixirConsoleLexer(Lexer):
curcode += line[end:]
else:
if curcode:
- for item in do_insertions(
- insertions, exlexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(
+ insertions, exlexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
token = Generic.Error if in_error else Generic.Output
yield match.start(), token, line
if curcode:
- for item in do_insertions(
- insertions, exlexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(
+ insertions, exlexer.get_tokens_unprocessed(curcode))
diff --git a/pygments/lexers/esoteric.py b/pygments/lexers/esoteric.py
index 6946c902..6ac3dd9d 100644
--- a/pygments/lexers/esoteric.py
+++ b/pygments/lexers/esoteric.py
@@ -5,7 +5,7 @@
Lexers for esoteric languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/ezhil.py b/pygments/lexers/ezhil.py
index ee465885..37d793dd 100644
--- a/pygments/lexers/ezhil.py
+++ b/pygments/lexers/ezhil.py
@@ -5,7 +5,7 @@
Pygments lexers for Ezhil language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -65,5 +65,5 @@ class EzhilLexer(RegexLexer):
}
def __init__(self, **options):
- super(EzhilLexer, self).__init__(**options)
+ super().__init__(**options)
self.encoding = options.get('encoding', 'utf-8')
diff --git a/pygments/lexers/factor.py b/pygments/lexers/factor.py
index 4aed8f6e..60160d68 100644
--- a/pygments/lexers/factor.py
+++ b/pygments/lexers/factor.py
@@ -5,7 +5,7 @@
Lexers for the Factor language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/fantom.py b/pygments/lexers/fantom.py
index b1b0dd94..a47879da 100644
--- a/pygments/lexers/fantom.py
+++ b/pygments/lexers/fantom.py
@@ -5,7 +5,7 @@
Lexer for the Fantom language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/felix.py b/pygments/lexers/felix.py
index cf768a45..8fea79a2 100644
--- a/pygments/lexers/felix.py
+++ b/pygments/lexers/felix.py
@@ -5,7 +5,7 @@
Lexer for the Felix language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/floscript.py b/pygments/lexers/floscript.py
index d5744331..c3ac887d 100644
--- a/pygments/lexers/floscript.py
+++ b/pygments/lexers/floscript.py
@@ -5,7 +5,7 @@
Lexer for FloScript
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/forth.py b/pygments/lexers/forth.py
index 934bf478..14834178 100644
--- a/pygments/lexers/forth.py
+++ b/pygments/lexers/forth.py
@@ -5,15 +5,14 @@
Lexer for the Forth language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Error, Punctuation, Literal, Token, \
- Text, Comment, Operator, Keyword, Name, String, Number, Generic
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Keyword, Name, String, Number
__all__ = ['ForthLexer']
@@ -30,12 +29,6 @@ class ForthLexer(RegexLexer):
filenames = ['*.frt', '*.fs']
mimetypes = ['application/x-forth']
- delimiter = r'\s'
- delimiter_end = r'(?=[%s])' % delimiter
-
- valid_name_chars = r'[^%s]' % delimiter
- valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
-
flags = re.IGNORECASE | re.MULTILINE
tokens = {
@@ -71,7 +64,7 @@ class ForthLexer(RegexLexer):
r'then|type|u\.|u\<|um\*|um\/mod|unloop|until|'
r'variable|while|word|xor|\[char\]|\[\'\]|'
r'@|!|\#|<\#|\#>|:|;|\+|-|\*|\/|,|<|>|\|1\+|1-|\.|'
- # *** Wordset CORE-EXT
+ # *** Wordset CORE-EXT
r'\.r|0<>|'
r'0>|2>r|2r>|2r@|:noname|\?do|again|c\"|'
r'case|compile,|endcase|endof|erase|false|'
@@ -79,38 +72,38 @@ class ForthLexer(RegexLexer):
r'restore-input|roll|save-input|source-id|to|'
r'true|tuck|u\.r|u>|unused|value|within|'
r'\[compile\]|'
- # *** Wordset CORE-EXT-obsolescent
+ # *** Wordset CORE-EXT-obsolescent
r'\#tib|convert|expect|query|span|'
r'tib|'
- # *** Wordset DOUBLE
+ # *** Wordset DOUBLE
r'2constant|2literal|2variable|d\+|d-|'
r'd\.|d\.r|d0<|d0=|d2\*|d2\/|d<|d=|d>s|'
r'dabs|dmax|dmin|dnegate|m\*\/|m\+|'
- # *** Wordset DOUBLE-EXT
+ # *** Wordset DOUBLE-EXT
r'2rot|du<|'
- # *** Wordset EXCEPTION
+ # *** Wordset EXCEPTION
r'catch|throw|'
- # *** Wordset EXCEPTION-EXT
+ # *** Wordset EXCEPTION-EXT
r'abort|abort\"|'
- # *** Wordset FACILITY
+ # *** Wordset FACILITY
r'at-xy|key\?|page|'
- # *** Wordset FACILITY-EXT
+ # *** Wordset FACILITY-EXT
r'ekey|ekey>char|ekey\?|emit\?|ms|time&date|'
- # *** Wordset FILE
+ # *** Wordset FILE
r'BIN|CLOSE-FILE|CREATE-FILE|DELETE-FILE|FILE-POSITION|'
r'FILE-SIZE|INCLUDE-FILE|INCLUDED|OPEN-FILE|R\/O|'
r'R\/W|READ-FILE|READ-LINE|REPOSITION-FILE|RESIZE-FILE|'
r'S\"|SOURCE-ID|W/O|WRITE-FILE|WRITE-LINE|'
- # *** Wordset FILE-EXT
+ # *** Wordset FILE-EXT
r'FILE-STATUS|FLUSH-FILE|REFILL|RENAME-FILE|'
- # *** Wordset FLOAT
+ # *** Wordset FLOAT
r'>float|d>f|'
r'f!|f\*|f\+|f-|f\/|f0<|f0=|f<|f>d|f@|'
r'falign|faligned|fconstant|fdepth|fdrop|fdup|'
r'fliteral|float\+|floats|floor|fmax|fmin|'
r'fnegate|fover|frot|fround|fswap|fvariable|'
r'represent|'
- # *** Wordset FLOAT-EXT
+ # *** Wordset FLOAT-EXT
r'df!|df@|dfalign|dfaligned|dfloat\+|'
r'dfloats|f\*\*|f\.|fabs|facos|facosh|falog|'
r'fasin|fasinh|fatan|fatan2|fatanh|fcos|fcosh|'
@@ -118,34 +111,34 @@ class ForthLexer(RegexLexer):
r'fsincos|fsinh|fsqrt|ftan|ftanh|f~|precision|'
r'set-precision|sf!|sf@|sfalign|sfaligned|sfloat\+|'
r'sfloats|'
- # *** Wordset LOCAL
+ # *** Wordset LOCAL
r'\(local\)|to|'
- # *** Wordset LOCAL-EXT
+ # *** Wordset LOCAL-EXT
r'locals\||'
- # *** Wordset MEMORY
+ # *** Wordset MEMORY
r'allocate|free|resize|'
- # *** Wordset SEARCH
+ # *** Wordset SEARCH
r'definitions|find|forth-wordlist|get-current|'
r'get-order|search-wordlist|set-current|set-order|'
r'wordlist|'
- # *** Wordset SEARCH-EXT
+ # *** Wordset SEARCH-EXT
r'also|forth|only|order|previous|'
- # *** Wordset STRING
+ # *** Wordset STRING
r'-trailing|\/string|blank|cmove|cmove>|compare|'
r'search|sliteral|'
- # *** Wordset TOOLS
+ # *** Wordset TOOLS
r'.s|dump|see|words|'
- # *** Wordset TOOLS-EXT
+ # *** Wordset TOOLS-EXT
r';code|'
r'ahead|assembler|bye|code|cs-pick|cs-roll|'
r'editor|state|\[else\]|\[if\]|\[then\]|'
- # *** Wordset TOOLS-EXT-obsolescent
- r'forget|'
- # Forth 2012
- r'defer|defer@|defer!|action-of|begin-structure|field:|buffer:|'
- r'parse-name|buffer:|traverse-wordlist|n>r|nr>|2value|fvalue|'
- r'name>interpret|name>compile|name>string|'
- r'cfield:|end-structure)'+delimiter, Keyword),
+ # *** Wordset TOOLS-EXT-obsolescent
+ r'forget|'
+ # Forth 2012
+ r'defer|defer@|defer!|action-of|begin-structure|field:|buffer:|'
+ r'parse-name|buffer:|traverse-wordlist|n>r|nr>|2value|fvalue|'
+ r'name>interpret|name>compile|name>string|'
+ r'cfield:|end-structure)(?!\S)', Keyword),
# Numbers
(r'(\$[0-9A-F]+)', Number.Hex),
@@ -156,18 +149,18 @@ class ForthLexer(RegexLexer):
r'itype|icompare|sp@|sp!|rp@|rp!|up@|up!|'
r'>a|a>|a@|a!|a@+|a@-|>b|b>|b@|b!|b@+|b@-|'
r'find-name|1ms|'
- r'sp0|rp0|\(evaluate\)|int-trap|int!)' + delimiter,
+ r'sp0|rp0|\(evaluate\)|int-trap|int!)(?!\S)',
Name.Constant),
# a proposal
(r'(do-recognizer|r:fail|recognizer:|get-recognizers|'
r'set-recognizers|r:float|r>comp|r>int|r>post|'
r'r:name|r:word|r:dnum|r:num|recognizer|forth-recognizer|'
- r'rec:num|rec:float|rec:word)' + delimiter, Name.Decorator),
+ r'rec:num|rec:float|rec:word)(?!\S)', Name.Decorator),
# defining words. The next word is a new command name
(r'(Evalue|Rvalue|Uvalue|Edefer|Rdefer|Udefer)(\s+)',
bygroups(Keyword.Namespace, Text), 'worddef'),
- (valid_name, Name.Function), # Anything else is executed
+ (r'\S+', Name.Function), # Anything else is executed
],
'worddef': [
diff --git a/pygments/lexers/fortran.py b/pygments/lexers/fortran.py
index 75348b1e..e0619c7f 100644
--- a/pygments/lexers/fortran.py
+++ b/pygments/lexers/fortran.py
@@ -5,7 +5,7 @@
Lexers for Fortran languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/foxpro.py b/pygments/lexers/foxpro.py
index 868a44d8..d6ccc62d 100644
--- a/pygments/lexers/foxpro.py
+++ b/pygments/lexers/foxpro.py
@@ -5,7 +5,7 @@
Simple lexer for Microsoft Visual FoxPro source code.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -36,7 +36,7 @@ class FoxProLexer(RegexLexer):
tokens = {
'root': [
- (r';\s*\n', Punctuation), # consume newline
+ (r';\s*\n', Punctuation), # consume newline
(r'(^|\n)\s*', Text, 'newline'),
# Square brackets may be used for array indices
diff --git a/pygments/lexers/freefem.py b/pygments/lexers/freefem.py
index 3e9ac8e8..ca182513 100644
--- a/pygments/lexers/freefem.py
+++ b/pygments/lexers/freefem.py
@@ -5,7 +5,7 @@
Lexer for FreeFem++ language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py
index 2d94aca3..d50038c7 100644
--- a/pygments/lexers/functional.py
+++ b/pygments/lexers/functional.py
@@ -5,7 +5,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/gdscript.py b/pygments/lexers/gdscript.py
index 8dec78fb..7d9b9e6e 100644
--- a/pygments/lexers/gdscript.py
+++ b/pygments/lexers/gdscript.py
@@ -1,36 +1,23 @@
# -*- coding: utf-8 -*-
"""
pygments.lexers.gdscript
- ~~~~~~~~~~~~~~~~~~~~~~
+ ~~~~~~~~~~~~~~~~~~~~~~~~
Lexer for GDScript.
- :copyright: Copyright 2xxx by The Godot Engine Community
- :license: BSD, see LICENSE for details.
+ Modified by Daniel J. Ramirez <djrmuv@gmail.com> based on the original
+ python.py.
- modified by Daniel J. Ramirez <djrmuv@gmail.com> based on the original python.py pygment
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import (
- RegexLexer,
- include,
- bygroups,
- default,
- words,
- combined,
-)
-from pygments.token import (
- Text,
- Comment,
- Operator,
- Keyword,
- Name,
- String,
- Number,
- Punctuation,
-)
+from pygments.lexer import RegexLexer, include, bygroups, default, words, \
+ combined
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
__all__ = ["GDScriptLexer"]
diff --git a/pygments/lexers/go.py b/pygments/lexers/go.py
index f6bb7fc8..3dc8df82 100644
--- a/pygments/lexers/go.py
+++ b/pygments/lexers/go.py
@@ -5,7 +5,7 @@
Lexers for the Google Go language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/grammar_notation.py b/pygments/lexers/grammar_notation.py
index 3cafd32c..ffe2ab14 100644
--- a/pygments/lexers/grammar_notation.py
+++ b/pygments/lexers/grammar_notation.py
@@ -5,7 +5,7 @@
Lexers for grammer notations like BNF.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/graph.py b/pygments/lexers/graph.py
index f7b45e29..24095d82 100644
--- a/pygments/lexers/graph.py
+++ b/pygments/lexers/graph.py
@@ -5,7 +5,7 @@
Lexers for graph query languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/graphics.py b/pygments/lexers/graphics.py
index b0b9145e..fafc155f 100644
--- a/pygments/lexers/graphics.py
+++ b/pygments/lexers/graphics.py
@@ -5,7 +5,7 @@
Lexers for computer graphics and plotting related languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/haskell.py b/pygments/lexers/haskell.py
index 1a284078..da86a507 100644
--- a/pygments/lexers/haskell.py
+++ b/pygments/lexers/haskell.py
@@ -5,7 +5,7 @@
Lexers for Haskell and related languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -559,8 +559,7 @@ class LiterateLexer(Lexer):
latex += line
insertions.append((len(code),
list(lxlexer.get_tokens_unprocessed(latex))))
- for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)):
- yield item
+ yield from do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code))
class LiterateHaskellLexer(LiterateLexer):
diff --git a/pygments/lexers/haxe.py b/pygments/lexers/haxe.py
index b3575080..0a58aefc 100644
--- a/pygments/lexers/haxe.py
+++ b/pygments/lexers/haxe.py
@@ -5,7 +5,7 @@
Lexers for Haxe and related stuff.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/hdl.py b/pygments/lexers/hdl.py
index a44ff9e5..a8a4fc6b 100644
--- a/pygments/lexers/hdl.py
+++ b/pygments/lexers/hdl.py
@@ -5,7 +5,7 @@
Lexers for hardware descriptor languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -183,7 +183,7 @@ class SystemVerilogLexer(RegexLexer):
(r'[~!%^&*+=|?:<>/-]', Operator),
(words(('inside', 'dist'), suffix=r'\b'), Operator.Word),
- (r'[()\[\],.;\']', Punctuation),
+ (r'[()\[\],.;\'$]', Punctuation),
(r'`[a-zA-Z_]\w*', Name.Constant),
(words((
@@ -234,8 +234,8 @@ class SystemVerilogLexer(RegexLexer):
bygroups(Keyword.Declaration, Text, Name.Class)),
(r'(extends)(\s+)([a-zA-Z_]\w*)',
bygroups(Keyword.Declaration, Text, Name.Class)),
- (r'(endclass\b)((\s*)(:)(\s*)([a-zA-Z_]\w*))?',
- bygroups(Keyword.Declaration, None, Text, Punctuation, Text, Name.Class)),
+ (r'(endclass\b)(?:(\s*)(:)(\s*)([a-zA-Z_]\w*))?',
+ bygroups(Keyword.Declaration, Text, Punctuation, Text, Name.Class)),
(words((
# Variable types
diff --git a/pygments/lexers/hexdump.py b/pygments/lexers/hexdump.py
index da285432..4ae57e8b 100644
--- a/pygments/lexers/hexdump.py
+++ b/pygments/lexers/hexdump.py
@@ -5,7 +5,7 @@
Lexers for hexadecimal dumps.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/html.py b/pygments/lexers/html.py
index cbef4f7e..27714808 100644
--- a/pygments/lexers/html.py
+++ b/pygments/lexers/html.py
@@ -5,7 +5,7 @@
Lexers for HTML, XML and related markup.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/idl.py b/pygments/lexers/idl.py
index 292f2de7..0ca37088 100644
--- a/pygments/lexers/idl.py
+++ b/pygments/lexers/idl.py
@@ -5,7 +5,7 @@
Lexers for IDL.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/igor.py b/pygments/lexers/igor.py
index f4a22e1e..666d2b65 100644
--- a/pygments/lexers/igor.py
+++ b/pygments/lexers/igor.py
@@ -5,7 +5,7 @@
Lexers for Igor Pro.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -391,15 +391,10 @@ class IgorLexer(RegexLexer):
'WaveRefIndexedDFR', 'WaveRefsEqual', 'WaveRefWaveToList', 'WaveTextEncoding',
'WaveType', 'WaveUnits', 'WhichListItem', 'WinList', 'WinName', 'WinRecreation',
'WinType', 'wnoise', 'xcsr', 'XWaveName', 'XWaveRefFromTrace', 'x2pnt', 'zcsr',
- 'ZernikeR', 'zeromq_client_connect', 'zeromq_client_connect',
- 'zeromq_client_recv', 'zeromq_client_recv', 'zeromq_client_send',
- 'zeromq_client_send', 'zeromq_handler_start', 'zeromq_handler_start',
- 'zeromq_handler_stop', 'zeromq_handler_stop', 'zeromq_server_bind',
- 'zeromq_server_bind', 'zeromq_server_recv', 'zeromq_server_recv',
- 'zeromq_server_send', 'zeromq_server_send', 'zeromq_set', 'zeromq_set',
- 'zeromq_stop', 'zeromq_stop', 'zeromq_test_callfunction',
- 'zeromq_test_callfunction', 'zeromq_test_serializeWave',
- 'zeromq_test_serializeWave', 'zeta'
+ 'ZernikeR', 'zeromq_client_connect', 'zeromq_client_recv',
+ 'zeromq_client_send', 'zeromq_handler_start', 'zeromq_handler_stop',
+ 'zeromq_server_bind', 'zeromq_server_recv', 'zeromq_server_send', 'zeromq_set',
+ 'zeromq_stop', 'zeromq_test_callfunction', 'zeromq_test_serializeWave', 'zeta'
)
tokens = {
diff --git a/pygments/lexers/inferno.py b/pygments/lexers/inferno.py
index f29808cf..cdadd626 100644
--- a/pygments/lexers/inferno.py
+++ b/pygments/lexers/inferno.py
@@ -5,7 +5,7 @@
Lexers for Inferno os and all the related stuff.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/installers.py b/pygments/lexers/installers.py
index 8c8c39c8..d0aa4fcd 100644
--- a/pygments/lexers/installers.py
+++ b/pygments/lexers/installers.py
@@ -5,7 +5,7 @@
Lexers for installer/packager DSLs and formats.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/int_fiction.py b/pygments/lexers/int_fiction.py
index 393438f8..e3d35088 100644
--- a/pygments/lexers/int_fiction.py
+++ b/pygments/lexers/int_fiction.py
@@ -5,7 +5,7 @@
Lexers for interactive fiction languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/iolang.py b/pygments/lexers/iolang.py
index f33c8713..d6c022d2 100644
--- a/pygments/lexers/iolang.py
+++ b/pygments/lexers/iolang.py
@@ -5,7 +5,7 @@
Lexers for the Io language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/j.py b/pygments/lexers/j.py
index baec89ef..b0103a48 100644
--- a/pygments/lexers/j.py
+++ b/pygments/lexers/j.py
@@ -5,7 +5,7 @@
Lexer for the J programming language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py
index 48c6aafd..f36863dc 100644
--- a/pygments/lexers/javascript.py
+++ b/pygments/lexers/javascript.py
@@ -5,7 +5,7 @@
Lexers for JavaScript and related languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -64,11 +64,17 @@ class JavascriptLexer(RegexLexer):
(r'\A#! ?/.*?\n', Comment.Hashbang), # recognized by node.js
(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
- (r'(\.\d+|[0-9]+\.[0-9]*)([eE][-+]?[0-9]+)?', Number.Float),
- (r'0[bB][01]+', Number.Bin),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
+
+ # Numeric literals
+ (r'0[bB][01]+n?', Number.Bin),
+ (r'0[oO]?[0-7]+n?', Number.Oct), # Browsers support "0o7" and "07" notations
+ (r'0[xX][0-9a-fA-F]+n?', Number.Hex),
+ (r'[0-9]+n', Number.Integer), # Javascript BigInt requires an "n" postfix
+ # Javascript doesn't have actual integer literals, so every other
+ # numeric literal is handled by the regex below (including "normal")
+ # integers
+ (r'(\.[0-9]+|[0-9]+\.[0-9]*|[0-9]+)([eE][-+]?[0-9]+)?', Number.Float),
+
(r'\.\.\.|=>', Punctuation),
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
@@ -263,7 +269,7 @@ class LiveScriptLexer(RegexLexer):
default('#pop'),
],
'root': [
- (r'^(?=\s|/)', Text, 'slashstartsregex'),
+ (r'\A(?=\s|/)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
(r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
@@ -1038,7 +1044,7 @@ class CoffeeScriptLexer(RegexLexer):
_operator_re = (
r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
r'\|\||\\(?=\n)|'
- r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?')
+ r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&|\^/])=?')
flags = re.DOTALL
tokens = {
@@ -1066,7 +1072,7 @@ class CoffeeScriptLexer(RegexLexer):
],
'root': [
include('commentsandwhitespace'),
- (r'^(?=\s|/)', Text, 'slashstartsregex'),
+ (r'\A(?=\s|/)', Text, 'slashstartsregex'),
(_operator_re, Operator, 'slashstartsregex'),
(r'(?:\([^()]*\))?\s*[=-]>', Name.Function, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
diff --git a/pygments/lexers/julia.py b/pygments/lexers/julia.py
index aa1609da..7978074a 100644
--- a/pygments/lexers/julia.py
+++ b/pygments/lexers/julia.py
@@ -5,7 +5,7 @@
Lexers for the Julia language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -145,7 +145,7 @@ class JuliaLexer(RegexLexer):
# operators
# see: https://github.com/JuliaLang/julia/blob/master/src/julia-parser.scm
- (words([
+ (words((
# prec-assignment
u'=', u':=', u'+=', u'-=', u'*=', u'/=', u'//=', u'.//=', u'.*=', u'./=',
u'\\=', u'.\\=', u'^=', u'.^=', u'÷=', u'.÷=', u'%=', u'.%=', u'|=', u'&=',
@@ -169,7 +169,7 @@ class JuliaLexer(RegexLexer):
# prec-colon
u':',
# prec-plus
- u'+', u'-', u'.+', u'.-', u'|', u'∪', u'$',
+ u'.+', u'.-', u'|', u'∪', u'$',
# prec-bitshift
u'<<', u'>>', u'>>>', u'.<<', u'.>>', u'.>>>',
# prec-times
@@ -184,7 +184,7 @@ class JuliaLexer(RegexLexer):
u'.',
# unary op
u'+', u'-', u'!', u'√', u'∛', u'∜'
- ]), Operator),
+ )), Operator),
# chars
(r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
@@ -316,9 +316,8 @@ class JuliaConsoleLexer(Lexer):
curcode += line[6:]
else:
if curcode:
- for item in do_insertions(
- insertions, jllexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(
+ insertions, jllexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
if line.startswith('ERROR: ') or error:
@@ -330,6 +329,5 @@ class JuliaConsoleLexer(Lexer):
start += len(line)
if curcode:
- for item in do_insertions(
- insertions, jllexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(
+ insertions, jllexer.get_tokens_unprocessed(curcode))
diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py
index 5588b796..86af6d9f 100644
--- a/pygments/lexers/jvm.py
+++ b/pygments/lexers/jvm.py
@@ -5,7 +5,7 @@
Pygments lexers for JVM languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -278,7 +278,7 @@ class ScalaLexer(RegexLexer):
u'lazy|match|new|override|pr(?:ivate|otected)'
u'|re(?:quires|turn)|s(?:ealed|uper)|'
u't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\\b|'
- u'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])(\\b|(?=\\s)|$)', Keyword),
+ u'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])\\b', Keyword),
(u':(?!%s)' % op, Keyword, 'type'),
(u'%s%s\\b' % (upper, idrest), Name.Class),
(r'(true|false|null)\b', Keyword.Constant),
@@ -331,7 +331,7 @@ class ScalaLexer(RegexLexer):
(r'\s+', Text),
include('comments'),
(r',+', Punctuation),
- (u'<[%:]|=>|>:|[#_\u21D2]|\bforSome\b|\btype\b', Keyword),
+ (r'<[%:]|=>|>:|[#_\u21D2]|\bforSome\b|\btype\b', Keyword),
(r'([\])}])', Operator, '#pop'),
(r'[(\[{]', Operator, '#push'),
(u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
@@ -467,8 +467,7 @@ class GosuTemplateLexer(Lexer):
def get_tokens_unprocessed(self, text):
lexer = GosuLexer()
stack = ['templateText']
- for item in lexer.get_tokens_unprocessed(text, stack):
- yield item
+ yield from lexer.get_tokens_unprocessed(text, stack)
class GroovyLexer(RegexLexer):
diff --git a/pygments/lexers/lisp.py b/pygments/lexers/lisp.py
index 601d5a5f..5dee6c6f 100644
--- a/pygments/lexers/lisp.py
+++ b/pygments/lexers/lisp.py
@@ -5,7 +5,7 @@
Lexers for Lispy languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -382,7 +382,7 @@ class HyLexer(RegexLexer):
# valid names for identifiers
# well, names can only not consist fully of numbers
# but this should be good enough for now
- valid_name = r'(?!#)[\w!$%*+<=>?/.#-:]+'
+ valid_name = r'(?!#)[\w!$%*+<=>?/.#:-]+'
def _multi_escape(entries):
return words(entries, suffix=' ')
@@ -2277,9 +2277,7 @@ class ShenLexer(RegexLexer):
if self._relevant(token):
if opening_paren and token == Keyword and value in self.DECLARATIONS:
declaration = value
- for index, token, value in \
- self._process_declaration(declaration, tokens):
- yield index, token, value
+ yield from self._process_declaration(declaration, tokens)
opening_paren = value == '(' and token == Punctuation
def _process_symbols(self, tokens):
@@ -2636,29 +2634,34 @@ class FennelLexer(RegexLexer):
# these two lists are taken from fennel-mode.el:
# https://gitlab.com/technomancy/fennel-mode
- # this list is current as of Fennel version 0.1.0.
+ # this list is current as of Fennel version 0.6.0.
special_forms = (
- u'require-macros', u'eval-compiler',
- u'do', u'values', u'if', u'when', u'each', u'for', u'fn', u'lambda',
- u'λ', u'set', u'global', u'var', u'local', u'let', u'tset', u'doto',
- u'set-forcibly!', u'defn', u'partial', u'while', u'or', u'and', u'true',
- u'false', u'nil', u'.', u'+', u'..', u'^', u'-', u'*', u'%', u'/', u'>',
- u'<', u'>=', u'<=', u'=', u'~=', u'#', u'...', u':', u'->', u'->>',
+ 'require-macros', 'eval-compiler', 'doc', 'lua', 'hashfn',
+ 'macro', 'macros', 'import-macros', 'pick-args', 'pick-values',
+ 'macroexpand', 'macrodebug', 'do', 'values', 'if', 'when',
+ 'each', 'for', 'fn', 'lambda', 'λ', 'partial', 'while',
+ 'set', 'global', 'var', 'local', 'let', 'tset', 'set-forcibly!',
+ 'doto', 'match', 'or', 'and', 'true', 'false', 'nil', 'not',
+ 'not=', '.', '+', '..', '^', '-', '*', '%', '/', '>',
+ '<', '>=', '<=', '=', '...', ':', '->', '->>', '-?>',
+ '-?>>', 'rshift', 'lshift', 'bor', 'band', 'bnot', 'bxor',
+ 'with-open', 'length'
)
# Might be nicer to use the list from _lua_builtins.py but it's unclear how?
builtins = (
- u'_G', u'_VERSION', u'arg', u'assert', u'bit32', u'collectgarbage',
- u'coroutine', u'debug', u'dofile', u'error', u'getfenv',
- u'getmetatable', u'io', u'ipairs', u'load', u'loadfile', u'loadstring',
- u'math', u'next', u'os', u'package', u'pairs', u'pcall', u'print',
- u'rawequal', u'rawget', u'rawlen', u'rawset', u'require', u'select',
- u'setfenv', u'setmetatable', u'string', u'table', u'tonumber',
- u'tostring', u'type', u'unpack', u'xpcall'
+ '_G', '_VERSION', 'arg', 'assert', 'bit32', 'collectgarbage',
+ 'coroutine', 'debug', 'dofile', 'error', 'getfenv',
+ 'getmetatable', 'io', 'ipairs', 'load', 'loadfile', 'loadstring',
+ 'math', 'next', 'os', 'package', 'pairs', 'pcall', 'print',
+ 'rawequal', 'rawget', 'rawlen', 'rawset', 'require', 'select',
+ 'setfenv', 'setmetatable', 'string', 'table', 'tonumber',
+ 'tostring', 'type', 'unpack', 'xpcall'
)
- # based on the scheme definition, but disallowing leading digits and commas
- valid_name = r'[a-zA-Z_!$%&*+/:<=>?@^~|-][\w!$%&*+/:<=>?@^~|\.-]*'
+ # based on the scheme definition, but disallowing leading digits and
+ # commas, and @ is not allowed.
+ valid_name = r'[a-zA-Z_!$%&*+/:<=>?^~|-][\w!$%&*+/:<=>?^~|\.-]*'
tokens = {
'root': [
@@ -2669,8 +2672,7 @@ class FennelLexer(RegexLexer):
(r'-?\d+\.\d+', Number.Float),
(r'-?\d+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'(\\\\|\\'|[^'])*'", String),
+ (r'"(\\\\|\\"|\\|[^"\\])*"', String),
# these are technically strings, but it's worth visually
# distinguishing them because their intent is different
@@ -2690,5 +2692,8 @@ class FennelLexer(RegexLexer):
(r'(\(|\))', Punctuation),
(r'(\[|\])', Punctuation),
(r'(\{|\})', Punctuation),
+
+ # the # symbol is shorthand for a lambda function
+ (r'#', Punctuation),
]
}
diff --git a/pygments/lexers/make.py b/pygments/lexers/make.py
index f67f1095..6e63b5aa 100644
--- a/pygments/lexers/make.py
+++ b/pygments/lexers/make.py
@@ -5,7 +5,7 @@
Lexers for Makefiles and similar.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -57,8 +57,7 @@ class MakefileLexer(Lexer):
ins.append((len(done), [(0, Comment, line)]))
else:
done += line
- for item in do_insertions(ins, lex.get_tokens_unprocessed(done)):
- yield item
+ yield from do_insertions(ins, lex.get_tokens_unprocessed(done))
def analyse_text(text):
# Many makefiles have $(BIG_CAPS) style variables
@@ -196,7 +195,12 @@ class CMakeLexer(RegexLexer):
}
def analyse_text(text):
- exp = r'^ *CMAKE_MINIMUM_REQUIRED *\( *VERSION *\d(\.\d)* *( FATAL_ERROR)? *\) *$'
+ exp = (
+ r'^[ \t]*CMAKE_MINIMUM_REQUIRED[ \t]*'
+ r'\([ \t]*VERSION[ \t]*\d+(\.\d+)*[ \t]*'
+ r'([ \t]FATAL_ERROR)?[ \t]*\)[ \t]*'
+ r'(#[^\n]*)?$'
+ )
if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE):
return 0.8
return 0.0
diff --git a/pygments/lexers/markup.py b/pygments/lexers/markup.py
index f185ce9e..bd814a54 100644
--- a/pygments/lexers/markup.py
+++ b/pygments/lexers/markup.py
@@ -5,7 +5,7 @@
Lexers for non-HTML markup languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -165,8 +165,7 @@ class RstLexer(RegexLexer):
code += line[indention_size:]
else:
code += line
- for item in do_insertions(ins, lexer.get_tokens_unprocessed(code)):
- yield item
+ yield from do_insertions(ins, lexer.get_tokens_unprocessed(code))
# from docutils.parsers.rst.states
closers = u'\'")]}>\u2019\u201d\xbb!?'
@@ -204,7 +203,7 @@ class RstLexer(RegexLexer):
bygroups(Text, Operator, using(this, state='inline'))),
# Sourcecode directives
(r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)'
- r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)',
+ r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*)?\n)+)',
_handle_sourcecode),
# A directive
(r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
@@ -229,7 +228,7 @@ class RstLexer(RegexLexer):
(r'^(\S.*(?<!::)\n)((?:(?: +.*)\n)+)',
bygroups(using(this, state='inline'), using(this, state='inline'))),
# Code blocks
- (r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*|)\n)+)',
+ (r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*)?\n)+)',
bygroups(String.Escape, Text, String, String, Text, String)),
include('inline'),
],
@@ -462,8 +461,7 @@ class MozPreprocXulLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super(MozPreprocXulLexer, self).__init__(
- XmlLexer, MozPreprocHashLexer, **options)
+ super().__init__(XmlLexer, MozPreprocHashLexer, **options)
class MozPreprocJavascriptLexer(DelegatingLexer):
@@ -479,8 +477,7 @@ class MozPreprocJavascriptLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super(MozPreprocJavascriptLexer, self).__init__(
- JavascriptLexer, MozPreprocHashLexer, **options)
+ super().__init__(JavascriptLexer, MozPreprocHashLexer, **options)
class MozPreprocCssLexer(DelegatingLexer):
@@ -496,8 +493,7 @@ class MozPreprocCssLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super(MozPreprocCssLexer, self).__init__(
- CssLexer, MozPreprocPercentLexer, **options)
+ super().__init__(CssLexer, MozPreprocPercentLexer, **options)
class MarkdownLexer(RegexLexer):
@@ -508,7 +504,7 @@ class MarkdownLexer(RegexLexer):
"""
name = 'markdown'
aliases = ['md']
- filenames = ['*.md']
+ filenames = ['*.md', '*.markdown']
mimetypes = ["text/x-markdown"]
flags = re.MULTILINE
@@ -536,8 +532,7 @@ class MarkdownLexer(RegexLexer):
if lexer is None:
yield match.start(4), String, code
else:
- for item in do_insertions([], lexer.get_tokens_unprocessed(code)):
- yield item
+ yield from do_insertions([], lexer.get_tokens_unprocessed(code))
yield match.start(5), String.Backtick, match.group(5)
@@ -579,24 +574,27 @@ class MarkdownLexer(RegexLexer):
# warning: the following rules eat outer tags.
# eg. **foo _bar_ baz** => foo and baz are not recognized as bold
# bold fenced by '**'
- (r'(\*\*[^\*\n\ ][^\*\n]*\*\*)', bygroups(Generic.Strong)),
+ (r'(\*\*[^* \n][^*\n]*\*\*)', bygroups(Generic.Strong)),
# # bold fenced by '__'
- (r'(\_\_[^\_\n\ ][^\_\n]*\_\_)', bygroups(Generic.Strong)),
+ (r'(\_\_[^_ \n][^_\n]*\_\_)', bygroups(Generic.Strong)),
# italics fenced by '*'
- (r'(\*[^\*\n\ ][^\*\n]*\*)', bygroups(Generic.Emph)),
+ (r'(\*[^* \n][^*\n]*\*)', bygroups(Generic.Emph)),
# italics fenced by '_'
- (r'(\_[^\_\n\ ][^\_\n]*\_)', bygroups(Generic.Emph)),
+ (r'(\_[^_ \n][^_\n]*\_)', bygroups(Generic.Emph)),
# strikethrough
(r'([^~]*)(~~[^~]+~~)', bygroups(Text, Generic.Deleted)),
# mentions and topics (twitter and github stuff)
(r'[@#][\w/:]+', Name.Entity),
# (image?) links eg: ![Image of Yaktocat](https://octodex.github.com/images/yaktocat.png)
- (r'(!?\[)([^]]+)(\])(\()([^)]+)(\))', bygroups(Text, Name.Tag, Text, Text, Name.Attribute, Text)),
+ (r'(!?\[)([^]]+)(\])(\()([^)]+)(\))',
+ bygroups(Text, Name.Tag, Text, Text, Name.Attribute, Text)),
# reference-style links, e.g.:
# [an example][id]
# [id]: http://example.com/
- (r'(\[)([^]]+)(\])(\[)([^]]*)(\])', bygroups(Text, Name.Tag, Text, Text, Name.Label, Text)),
- (r'^(\s*\[)([^]]*)(\]:\s*)(.+)', bygroups(Text, Name.Label, Text, Name.Attribute)),
+ (r'(\[)([^]]+)(\])(\[)([^]]*)(\])',
+ bygroups(Text, Name.Tag, Text, Text, Name.Label, Text)),
+ (r'^(\s*\[)([^]]*)(\]:\s*)(.+)',
+ bygroups(Text, Name.Label, Text, Name.Attribute)),
# general text, must come last!
(r'[^\\\s]+', Text),
@@ -608,6 +606,7 @@ class MarkdownLexer(RegexLexer):
self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
RegexLexer.__init__(self, **options)
+
class TiddlyWiki5Lexer(RegexLexer):
"""
For `TiddlyWiki5 <https://tiddlywiki.com/#TiddlerFiles>`_ markup.
@@ -627,15 +626,15 @@ class TiddlyWiki5Lexer(RegexLexer):
from pygments.lexers import get_lexer_by_name
# section header
- yield match.start(1), String , match.group(1)
- yield match.start(2), String , match.group(2)
- yield match.start(3), Text , match.group(3)
+ yield match.start(1), String, match.group(1)
+ yield match.start(2), String, match.group(2)
+ yield match.start(3), Text, match.group(3)
# lookup lexer if wanted and existing
lexer = None
if self.handlecodeblocks:
try:
- lexer = get_lexer_by_name( match.group(2).strip() )
+ lexer = get_lexer_by_name(match.group(2).strip())
except ClassNotFound:
pass
code = match.group(4)
@@ -645,10 +644,9 @@ class TiddlyWiki5Lexer(RegexLexer):
yield match.start(4), String, code
return
- for item in do_insertions([], lexer.get_tokens_unprocessed(code)):
- yield item
+ yield from do_insertions([], lexer.get_tokens_unprocessed(code))
- yield match.start(5), String , match.group(5)
+ yield match.start(5), String, match.group(5)
def _handle_cssblock(self, match):
"""
@@ -657,13 +655,13 @@ class TiddlyWiki5Lexer(RegexLexer):
from pygments.lexers import get_lexer_by_name
# section header
- yield match.start(1), String , match.group(1)
- yield match.start(2), String , match.group(2)
+ yield match.start(1), String, match.group(1)
+ yield match.start(2), String, match.group(2)
lexer = None
if self.handlecodeblocks:
try:
- lexer = get_lexer_by_name( 'css' )
+ lexer = get_lexer_by_name('css')
except ClassNotFound:
pass
code = match.group(3)
@@ -673,10 +671,9 @@ class TiddlyWiki5Lexer(RegexLexer):
yield match.start(3), String, code
return
- for item in do_insertions([], lexer.get_tokens_unprocessed(code)):
- yield item
+ yield from do_insertions([], lexer.get_tokens_unprocessed(code))
- yield match.start(4), String , match.group(4)
+ yield match.start(4), String, match.group(4)
tokens = {
'root': [
@@ -688,7 +685,7 @@ class TiddlyWiki5Lexer(RegexLexer):
# bulleted or numbered lists or single-line block quotes
# (can be mixed)
(r'^(\s*)([*#>]+)(\s*)(.+\n)',
- bygroups(Text, Keyword, Text, using(this, state='inline'))),
+ bygroups(Text, Keyword, Text, using(this, state='inline'))),
# multi-line block quotes
(r'^(<<<.*\n)([\w\W]*?)(^<<<.*$)', bygroups(String, Text, String)),
# table header
@@ -722,7 +719,7 @@ class TiddlyWiki5Lexer(RegexLexer):
(r'\d{17}', Number.Integer),
# italics
(r'(\s)(//[^/]+//)((?=\W|\n))',
- bygroups(Text, Generic.Emph, Text)),
+ bygroups(Text, Generic.Emph, Text)),
# superscript
(r'(\s)(\^\^[^\^]+\^\^)', bygroups(Text, Generic.Emph)),
# subscript
@@ -731,13 +728,13 @@ class TiddlyWiki5Lexer(RegexLexer):
(r'(\s)(__[^_]+__)', bygroups(Text, Generic.Strong)),
# bold
(r"(\s)(''[^']+'')((?=\W|\n))",
- bygroups(Text, Generic.Strong, Text)),
+ bygroups(Text, Generic.Strong, Text)),
# strikethrough
(r'(\s)(~~[^~]+~~)((?=\W|\n))',
- bygroups(Text, Generic.Deleted, Text)),
+ bygroups(Text, Generic.Deleted, Text)),
# TiddlyWiki variables
(r'<<[^>]+>>', Name.Tag),
- (r'\$\$[^\$]+\$\$', Name.Tag),
+ (r'\$\$[^$]+\$\$', Name.Tag),
(r'\$\([^)]+\)\$', Name.Tag),
# TiddlyWiki style or class
(r'^@@.*$', Name.Tag),
diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py
index 73115082..a4493f74 100644
--- a/pygments/lexers/math.py
+++ b/pygments/lexers/math.py
@@ -5,7 +5,7 @@
Just export lexers that were contained in this module.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/matlab.py b/pygments/lexers/matlab.py
index f7f77ac9..0e24f6e9 100644
--- a/pygments/lexers/matlab.py
+++ b/pygments/lexers/matlab.py
@@ -5,13 +5,14 @@
Lexers for Matlab and related languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions
+from pygments.lexer import Lexer, RegexLexer, bygroups, default, words, \
+ do_insertions
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Whitespace
@@ -72,7 +73,7 @@ class MatlabLexer(RegexLexer):
"hilb", "invhilb", "magic", "pascal", "rosser", "toeplitz", "vander",
"wilkinson")
- _operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\'
+ _operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\.\\|\./|/|\\'
tokens = {
'root': [
@@ -104,7 +105,7 @@ class MatlabLexer(RegexLexer):
# is recognized if it is either surrounded by spaces or by no
# spaces on both sides; only the former case matters for us. (This
# allows distinguishing `cd ./foo` from `cd ./ foo`.)
- (r'(?:^|(?<=;))(\s*)(\w+)(\s+)(?!=|\(|(%s)\s+)' % _operators,
+ (r'(?:^|(?<=;))(\s*)(\w+)(\s+)(?!=|\(|(?:%s)\s+)' % _operators,
bygroups(Text, Name, Text), 'commandargs'),
# operators:
@@ -156,7 +157,8 @@ class MatlabLexer(RegexLexer):
(r"[ \t]+", Text),
("'[^']*'", String),
(r"[^';\s]+", String),
- (";?", Punctuation, '#pop'),
+ (";", Punctuation, '#pop'),
+ default('#pop'),
]
}
@@ -229,9 +231,8 @@ class MatlabSessionLexer(Lexer):
curcode += line
else:
if curcode:
- for item in do_insertions(
- insertions, mlexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(
+ insertions, mlexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
@@ -245,9 +246,8 @@ class MatlabSessionLexer(Lexer):
continuation = False
if curcode: # or item:
- for item in do_insertions(
- insertions, mlexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(
+ insertions, mlexer.get_tokens_unprocessed(curcode))
class OctaveLexer(RegexLexer):
diff --git a/pygments/lexers/mime.py b/pygments/lexers/mime.py
index 95979f35..f5bae8bd 100644
--- a/pygments/lexers/mime.py
+++ b/pygments/lexers/mime.py
@@ -5,7 +5,7 @@
Lexer for Multipurpose Internet Mail Extensions (MIME) data.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -58,7 +58,7 @@ class MIMELexer(RegexLexer):
"multipart/alternative"]
def __init__(self, **options):
- super(MIMELexer, self).__init__(**options)
+ super().__init__(**options)
self.boundary = options.get("Multipart-Boundary")
self.content_transfer_encoding = options.get("Content_Transfer_Encoding")
self.content_type = options.get("Content_Type", "text/plain")
diff --git a/pygments/lexers/ml.py b/pygments/lexers/ml.py
index e9ab61e6..8ca8ce3e 100644
--- a/pygments/lexers/ml.py
+++ b/pygments/lexers/ml.py
@@ -5,7 +5,7 @@
Lexers for ML family languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -767,6 +767,7 @@ class OpaLexer(RegexLexer):
],
}
+
class ReasonLexer(RegexLexer):
"""
For the ReasonML language (https://reasonml.github.io/).
@@ -780,18 +781,18 @@ class ReasonLexer(RegexLexer):
mimetypes = ['text/x-reasonml']
keywords = (
- 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', 'downto',
- 'else', 'end', 'exception', 'external', 'false', 'for', 'fun', 'esfun',
- 'function', 'functor', 'if', 'in', 'include', 'inherit', 'initializer', 'lazy',
- 'let', 'switch', 'module', 'pub', 'mutable', 'new', 'nonrec', 'object', 'of',
- 'open', 'pri', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
- 'type', 'val', 'virtual', 'when', 'while', 'with'
+ 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', 'downto',
+ 'else', 'end', 'exception', 'external', 'false', 'for', 'fun', 'esfun',
+ 'function', 'functor', 'if', 'in', 'include', 'inherit', 'initializer', 'lazy',
+ 'let', 'switch', 'module', 'pub', 'mutable', 'new', 'nonrec', 'object', 'of',
+ 'open', 'pri', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
+ 'type', 'val', 'virtual', 'when', 'while', 'with',
)
keyopts = (
'!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
r'-\.', '=>', r'\.', r'\.\.', r'\.\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
'<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
- r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|\|', r'\|', r'\|]', r'\}', '~'
+ r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|\|', r'\|]', r'\}', '~'
)
operators = r'[!$%&*+\./:<=>?@^|~-]'
@@ -812,7 +813,7 @@ class ReasonLexer(RegexLexer):
(r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
(r'\b([A-Z][\w\']*)', Name.Class),
(r'//.*?\n', Comment.Single),
- (r'\/\*(?![\/])', Comment.Multiline, 'comment'),
+ (r'\/\*(?!/)', Comment.Multiline, 'comment'),
(r'\b(%s)\b' % '|'.join(keywords), Keyword),
(r'(%s)' % '|'.join(keyopts[::-1]), Operator.Word),
(r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
@@ -837,10 +838,10 @@ class ReasonLexer(RegexLexer):
(r'[~?][a-z][\w\']*:', Name.Variable),
],
'comment': [
- (r'[^\/*]+', Comment.Multiline),
+ (r'[^/*]+', Comment.Multiline),
(r'\/\*', Comment.Multiline, '#push'),
(r'\*\/', Comment.Multiline, '#pop'),
- (r'[\*]', Comment.Multiline),
+ (r'\*', Comment.Multiline),
],
'string': [
(r'[^\\"]+', String.Double),
@@ -885,10 +886,10 @@ class FStarLexer(RegexLexer):
assume_keywords = ('assume', 'admit', 'assert', 'calc')
keyopts = (
r'~', r'-', r'/\\', r'\\/', r'<:', r'<@', r'\(\|', r'\|\)', r'#', r'u#',
- r'&', r'\(\)', r'\(', r'\)', r',', r'~>', r'->', r'<--', r'<-', r'<==>',
- r'==>', r'\.', r'\?\.', r'\?', r'\.\[', r'\.\(\|', r'\.\(', r'\.\[\|',
- r'\{:pattern', r':', r'::', r':=', r';;', r';', r'=', r'%\[', r'!\{',
- r'\[@', r'\[', r'\[\|', r'\|>', r'\]', r'\|\]', r'\{', r'\|', r'\}', r'\$'
+ r'&', r'\(', r'\)', r'\(\)', r',', r'~>', r'->', r'<-', r'<--', r'<==>',
+ r'==>', r'\.', r'\?', r'\?\.', r'\.\[', r'\.\(', r'\.\(\|', r'\.\[\|',
+ r'\{:pattern', r':', r'::', r':=', r';', r';;', r'=', r'%\[', r'!\{',
+ r'\[', r'\[@', r'\[\|', r'\|>', r'\]', r'\|\]', r'\{', r'\|', r'\}', r'\$'
)
operators = r'[!$%&*+\./:<=>?@^|~-]'
@@ -928,7 +929,7 @@ class FStarLexer(RegexLexer):
String.Char),
(r"'.'", String.Char),
(r"'", Keyword), # a stray quote is another syntax element
- (r"\`([\w\'\.]+)\`", Operator.Word), # for infix applications
+ (r"\`([\w\'.]+)\`", Operator.Word), # for infix applications
(r"\`", Keyword), # for quoting
(r'"', String.Double, 'string'),
diff --git a/pygments/lexers/modeling.py b/pygments/lexers/modeling.py
index f4dca4a9..167ec86a 100644
--- a/pygments/lexers/modeling.py
+++ b/pygments/lexers/modeling.py
@@ -5,7 +5,7 @@
Lexers for modeling languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/modula2.py b/pygments/lexers/modula2.py
index 4fd84dab..b95bfaec 100644
--- a/pygments/lexers/modula2.py
+++ b/pygments/lexers/modula2.py
@@ -5,7 +5,7 @@
Multi-Dialect Lexer for Modula-2.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/monte.py b/pygments/lexers/monte.py
index e181c940..311fa172 100644
--- a/pygments/lexers/monte.py
+++ b/pygments/lexers/monte.py
@@ -5,7 +5,7 @@
Lexer for the Monte programming language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/mosel.py b/pygments/lexers/mosel.py
index 1dbda1dc..02cc5e3e 100644
--- a/pygments/lexers/mosel.py
+++ b/pygments/lexers/mosel.py
@@ -6,7 +6,7 @@
Lexers for the mosel language.
http://www.fico.com/en/products/fico-xpress-optimization
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/ncl.py b/pygments/lexers/ncl.py
index e2edd6cc..6e094e00 100644
--- a/pygments/lexers/ncl.py
+++ b/pygments/lexers/ncl.py
@@ -5,7 +5,7 @@
Lexers for NCAR Command Language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/nimrod.py b/pygments/lexers/nimrod.py
index 6391aa54..14816eba 100644
--- a/pygments/lexers/nimrod.py
+++ b/pygments/lexers/nimrod.py
@@ -5,7 +5,7 @@
Lexer for the Nim language (formerly known as Nimrod).
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/nit.py b/pygments/lexers/nit.py
index 42167ece..d96cef59 100644
--- a/pygments/lexers/nit.py
+++ b/pygments/lexers/nit.py
@@ -5,7 +5,7 @@
Lexer for the Nit language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/nix.py b/pygments/lexers/nix.py
index 50210c48..713348e8 100644
--- a/pygments/lexers/nix.py
+++ b/pygments/lexers/nix.py
@@ -5,7 +5,7 @@
Lexers for the NixOS Nix language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/oberon.py b/pygments/lexers/oberon.py
index 1c18488a..01fef038 100644
--- a/pygments/lexers/oberon.py
+++ b/pygments/lexers/oberon.py
@@ -5,7 +5,7 @@
Lexers for Oberon family languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/objective.py b/pygments/lexers/objective.py
index 777d8d4d..3a1c3f65 100644
--- a/pygments/lexers/objective.py
+++ b/pygments/lexers/objective.py
@@ -5,7 +5,7 @@
Lexers for Objective-C family languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/ooc.py b/pygments/lexers/ooc.py
index 438719cd..e0e0f249 100644
--- a/pygments/lexers/ooc.py
+++ b/pygments/lexers/ooc.py
@@ -5,7 +5,7 @@
Lexers for the Ooc language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py
index c3a60cef..8cdedcfc 100644
--- a/pygments/lexers/other.py
+++ b/pygments/lexers/other.py
@@ -5,7 +5,7 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/parasail.py b/pygments/lexers/parasail.py
index 7f8cf073..1b626b08 100644
--- a/pygments/lexers/parasail.py
+++ b/pygments/lexers/parasail.py
@@ -5,7 +5,7 @@
Lexer for ParaSail.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/parsers.py b/pygments/lexers/parsers.py
index 8bcbfc50..13a3a83c 100644
--- a/pygments/lexers/parsers.py
+++ b/pygments/lexers/parsers.py
@@ -5,7 +5,7 @@
Lexers for parser generators.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -28,7 +28,6 @@ __all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer',
'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer',
'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
- # 'AntlrCLexer',
'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
'AntlrJavaLexer', 'AntlrActionScriptLexer',
'TreetopLexer', 'EbnfLexer']
@@ -222,8 +221,7 @@ class RagelRubyLexer(DelegatingLexer):
filenames = ['*.rl']
def __init__(self, **options):
- super(RagelRubyLexer, self).__init__(RubyLexer, RagelEmbeddedLexer,
- **options)
+ super().__init__(RubyLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: ruby' in text
@@ -241,8 +239,7 @@ class RagelCLexer(DelegatingLexer):
filenames = ['*.rl']
def __init__(self, **options):
- super(RagelCLexer, self).__init__(CLexer, RagelEmbeddedLexer,
- **options)
+ super().__init__(CLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: c' in text
@@ -260,7 +257,7 @@ class RagelDLexer(DelegatingLexer):
filenames = ['*.rl']
def __init__(self, **options):
- super(RagelDLexer, self).__init__(DLexer, RagelEmbeddedLexer, **options)
+ super().__init__(DLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: d' in text
@@ -278,7 +275,7 @@ class RagelCppLexer(DelegatingLexer):
filenames = ['*.rl']
def __init__(self, **options):
- super(RagelCppLexer, self).__init__(CppLexer, RagelEmbeddedLexer, **options)
+ super().__init__(CppLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: c++' in text
@@ -296,9 +293,7 @@ class RagelObjectiveCLexer(DelegatingLexer):
filenames = ['*.rl']
def __init__(self, **options):
- super(RagelObjectiveCLexer, self).__init__(ObjectiveCLexer,
- RagelEmbeddedLexer,
- **options)
+ super().__init__(ObjectiveCLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: objc' in text
@@ -316,8 +311,7 @@ class RagelJavaLexer(DelegatingLexer):
filenames = ['*.rl']
def __init__(self, **options):
- super(RagelJavaLexer, self).__init__(JavaLexer, RagelEmbeddedLexer,
- **options)
+ super().__init__(JavaLexer, RagelEmbeddedLexer, **options)
def analyse_text(text):
return '@LANG: java' in text
@@ -515,30 +509,8 @@ class AntlrLexer(RegexLexer):
def analyse_text(text):
return re.search(r'^\s*grammar\s+[a-zA-Z0-9]+\s*;', text, re.M)
-# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets
-
-# TH: I'm not aware of any language features of C++ that will cause
-# incorrect lexing of C files. Antlr doesn't appear to make a distinction,
-# so just assume they're C++. No idea how to make Objective C work in the
-# future.
-
-# class AntlrCLexer(DelegatingLexer):
-# """
-# ANTLR with C Target
-#
-# .. versionadded:: 1.1
-# """
-#
-# name = 'ANTLR With C Target'
-# aliases = ['antlr-c']
-# filenames = ['*.G', '*.g']
-#
-# def __init__(self, **options):
-# super(AntlrCLexer, self).__init__(CLexer, AntlrLexer, **options)
-#
-# def analyse_text(text):
-# return re.match(r'^\s*language\s*=\s*C\s*;', text)
+# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets
class AntlrCppLexer(DelegatingLexer):
"""
@@ -552,7 +524,7 @@ class AntlrCppLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super(AntlrCppLexer, self).__init__(CppLexer, AntlrLexer, **options)
+ super().__init__(CppLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -571,8 +543,7 @@ class AntlrObjectiveCLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super(AntlrObjectiveCLexer, self).__init__(ObjectiveCLexer,
- AntlrLexer, **options)
+ super().__init__(ObjectiveCLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -591,8 +562,7 @@ class AntlrCSharpLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super(AntlrCSharpLexer, self).__init__(CSharpLexer, AntlrLexer,
- **options)
+ super().__init__(CSharpLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -611,8 +581,7 @@ class AntlrPythonLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super(AntlrPythonLexer, self).__init__(PythonLexer, AntlrLexer,
- **options)
+ super().__init__(PythonLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -631,8 +600,7 @@ class AntlrJavaLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super(AntlrJavaLexer, self).__init__(JavaLexer, AntlrLexer,
- **options)
+ super().__init__(JavaLexer, AntlrLexer, **options)
def analyse_text(text):
# Antlr language is Java by default
@@ -651,8 +619,7 @@ class AntlrRubyLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super(AntlrRubyLexer, self).__init__(RubyLexer, AntlrLexer,
- **options)
+ super().__init__(RubyLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -671,8 +638,7 @@ class AntlrPerlLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
- super(AntlrPerlLexer, self).__init__(PerlLexer, AntlrLexer,
- **options)
+ super().__init__(PerlLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -692,8 +658,7 @@ class AntlrActionScriptLexer(DelegatingLexer):
def __init__(self, **options):
from pygments.lexers.actionscript import ActionScriptLexer
- super(AntlrActionScriptLexer, self).__init__(ActionScriptLexer,
- AntlrLexer, **options)
+ super().__init__(ActionScriptLexer, AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
@@ -781,7 +746,7 @@ class TreetopLexer(DelegatingLexer):
filenames = ['*.treetop', '*.tt']
def __init__(self, **options):
- super(TreetopLexer, self).__init__(RubyLexer, TreetopBaseLexer, **options)
+ super().__init__(RubyLexer, TreetopBaseLexer, **options)
class EbnfLexer(RegexLexer):
diff --git a/pygments/lexers/pascal.py b/pygments/lexers/pascal.py
index d4b43fd3..7e0223d6 100644
--- a/pygments/lexers/pascal.py
+++ b/pygments/lexers/pascal.py
@@ -5,7 +5,7 @@
Lexers for Pascal family languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/pawn.py b/pygments/lexers/pawn.py
index 3cdfbd03..2721084b 100644
--- a/pygments/lexers/pawn.py
+++ b/pygments/lexers/pawn.py
@@ -5,7 +5,7 @@
Lexers for the Pawn languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/perl.py b/pygments/lexers/perl.py
index a209ab6a..4a3ca300 100644
--- a/pygments/lexers/perl.py
+++ b/pygments/lexers/perl.py
@@ -5,7 +5,7 @@
Lexers for Perl, Raku and related languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -714,5 +714,5 @@ class Perl6Lexer(ExtendedRegexLexer):
return rating
def __init__(self, **options):
- super(Perl6Lexer, self).__init__(**options)
+ super().__init__(**options)
self.encoding = options.get('encoding', 'utf-8')
diff --git a/pygments/lexers/php.py b/pygments/lexers/php.py
index 4f06d216..2bad339a 100644
--- a/pygments/lexers/php.py
+++ b/pygments/lexers/php.py
@@ -5,19 +5,21 @@
Lexers for PHP and related languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import RegexLexer, include, bygroups, default, using, \
- this, words
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, default, \
+ using, this, words, do_insertions
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Other
+ Number, Punctuation, Other, Generic
from pygments.util import get_bool_opt, get_list_opt, shebang_matches
-__all__ = ['ZephirLexer', 'PhpLexer']
+__all__ = ['ZephirLexer', 'PsyshConsoleLexer', 'PhpLexer']
+
+line_re = re.compile('.*?\n')
class ZephirLexer(RegexLexer):
@@ -85,6 +87,55 @@ class ZephirLexer(RegexLexer):
}
+class PsyshConsoleLexer(Lexer):
+ """
+ For `PsySH`_ console output, such as:
+
+ .. sourcecode:: psysh
+
+ >>> $greeting = function($name): string {
+ ... return "Hello, {$name}";
+ ... };
+ => Closure($name): string {#2371 …3}
+ >>> $greeting('World')
+ => "Hello, World"
+
+ .. _PsySH: https://psysh.org/
+ .. versionadded:: 2.7
+ """
+ name = 'PsySH console session for PHP'
+ aliases = ['psysh']
+
+ def __init__(self, **options):
+ options['startinline'] = True
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ phplexer = PhpLexer(**self.options)
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith(u'>>> ') or line.startswith(u'... '):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:4])]))
+ curcode += line[4:]
+ elif line.rstrip() == u'...':
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, u'...')]))
+ curcode += line[3:]
+ else:
+ if curcode:
+ yield from do_insertions(
+ insertions, phplexer.get_tokens_unprocessed(curcode))
+ curcode = ''
+ insertions = []
+ yield match.start(), Generic.Output, line
+ if curcode:
+ yield from do_insertions(insertions,
+ phplexer.get_tokens_unprocessed(curcode))
+
+
class PhpLexer(RegexLexer):
"""
For `PHP <http://www.php.net/>`_ source code.
diff --git a/pygments/lexers/pointless.py b/pygments/lexers/pointless.py
new file mode 100644
index 00000000..9acc3ffb
--- /dev/null
+++ b/pygments/lexers/pointless.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.pointless
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Pointless.
+
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
+ Punctuation, String, Text
+
+__all__ = ['PointlessLexer']
+
+
+class PointlessLexer(RegexLexer):
+ """
+ For `Pointless <https://ptls.dev>`_ source code.
+
+ .. versionadded:: 2.7
+ """
+
+ name = 'Pointless'
+ aliases = ['pointless']
+ filenames = ['*.ptls']
+
+ ops = words([
+ "+", "-", "*", "/", "**", "%", "+=", "-=", "*=",
+ "/=", "**=", "%=", "|>", "=", "==", "!=", "<", ">",
+ "<=", ">=", "=>", "$", "++",
+ ])
+
+ keywords = words([
+ "if", "then", "else", "where", "with", "cond",
+ "case", "and", "or", "not", "in", "as", "for",
+ "requires", "throw", "try", "catch", "when",
+ "yield", "upval",
+ ], suffix=r'\b')
+
+ tokens = {
+ 'root': [
+ (r'[ \n\r]+', Text),
+ (r'--.*$', Comment.Single),
+ (r'"""', String, 'multiString'),
+ (r'"', String, 'string'),
+ (r'[\[\](){}:;,.]', Punctuation),
+ (ops, Operator),
+ (keywords, Keyword),
+ (r'\d+|\d*\.\d+', Number),
+ (r'(true|false)\b', Name.Builtin),
+ (r'[A-Z][a-zA-Z0-9]*\b', String.Symbol),
+ (r'output\b', Name.Variable.Magic),
+ (r'(export|import)\b', Keyword.Namespace),
+ (r'[a-z][a-zA-Z0-9]*\b', Name.Variable)
+ ],
+ 'multiString': [
+ (r'\\.', String.Escape),
+ (r'"""', String, '#pop'),
+ (r'"', String),
+ (r'[^\\"]+', String),
+ ],
+ 'string': [
+ (r'\\.', String.Escape),
+ (r'"', String, '#pop'),
+ (r'\n', Error),
+ (r'[^\\"]+', String),
+ ],
+ }
diff --git a/pygments/lexers/pony.py b/pygments/lexers/pony.py
index 8f5d4281..d13338b2 100644
--- a/pygments/lexers/pony.py
+++ b/pygments/lexers/pony.py
@@ -5,7 +5,7 @@
Lexers for Pony and related languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -70,7 +70,7 @@ class PonyLexer(RegexLexer):
(r'\d+', Number.Integer),
(r'(true|false)\b', Name.Builtin),
(r'_\d*', Name),
- (r'_?[a-z][\w\'_]*', Name)
+ (r'_?[a-z][\w\']*', Name)
],
'typename': [
(_caps + r'?((?:\s)*)(_?[A-Z]\w*)',
diff --git a/pygments/lexers/praat.py b/pygments/lexers/praat.py
index 4a6a14f0..36c6d69d 100644
--- a/pygments/lexers/praat.py
+++ b/pygments/lexers/praat.py
@@ -5,7 +5,7 @@
Lexer for Praat
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -214,13 +214,13 @@ class PraatLexer(RegexLexer):
(r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
],
'object_reference': [
- include('string_interpolated'),
- (r'([a-z][a-zA-Z0-9_]*|\d+)', Name.Builtin),
+ include('string_interpolated'),
+ (r'([a-z][a-zA-Z0-9_]*|\d+)', Name.Builtin),
- (words(object_attributes, prefix=r'\.'), Name.Builtin, '#pop'),
+ (words(object_attributes, prefix=r'\.'), Name.Builtin, '#pop'),
- (r'\$', Name.Builtin),
- (r'\[', Text, '#pop'),
+ (r'\$', Name.Builtin),
+ (r'\[', Text, '#pop'),
],
'variable_name': [
include('operator'),
@@ -228,7 +228,7 @@ class PraatLexer(RegexLexer):
(words(variables_string, suffix=r'\$'), Name.Variable.Global),
(words(variables_numeric,
- suffix=r'(?=[^a-zA-Z0-9\._"\'\$#\[:\(]|\s|^|$)'),
+ suffix=r'(?=[^a-zA-Z0-9_."\'$#\[:(]|\s|^|$)'),
Name.Variable.Global),
(words(objects, prefix=r'\b', suffix=r"(_)"),
@@ -245,7 +245,7 @@ class PraatLexer(RegexLexer):
(r'(?<![\w.])(and|or|not|div|mod)(?![\w.])', Operator.Word),
],
'string_interpolated': [
- (r'\'[_a-z][^\[\]\'":]*(\[([\d,]+|"[\w\d,]+")\])?(:[0-9]+)?\'',
+ (r'\'[_a-z][^\[\]\'":]*(\[([\d,]+|"[\w,]+")\])?(:[0-9]+)?\'',
String.Interpol),
],
'string_unquoted': [
diff --git a/pygments/lexers/prolog.py b/pygments/lexers/prolog.py
index 70783625..40ef0df3 100644
--- a/pygments/lexers/prolog.py
+++ b/pygments/lexers/prolog.py
@@ -5,7 +5,7 @@
Lexers for Prolog and Prolog-like languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/promql.py b/pygments/lexers/promql.py
new file mode 100644
index 00000000..18069208
--- /dev/null
+++ b/pygments/lexers/promql.py
@@ -0,0 +1,183 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.promql
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Prometheus Query Language.
+
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, default, words
+from pygments.token import (
+ Comment,
+ Keyword,
+ Name,
+ Number,
+ Operator,
+ Punctuation,
+ String,
+ Whitespace,
+)
+
+__all__ = ["PromQLLexer"]
+
+
+class PromQLLexer(RegexLexer):
+ """
+ For `PromQL <https://prometheus.io/docs/prometheus/latest/querying/basics/>`_ queries.
+
+ For details about the grammar see:
+ https://github.com/prometheus/prometheus/tree/master/promql/parser
+
+ .. versionadded: 2.7
+ """
+
+ name = "PromQL"
+ aliases = ["promql"]
+ filenames = ["*.promql"]
+
+ base_keywords = (
+ words(
+ (
+ "bool",
+ "by",
+ "group_left",
+ "group_right",
+ "ignoring",
+ "offset",
+ "on",
+ "without",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword,
+ )
+
+ aggregator_keywords = (
+ words(
+ (
+ "sum",
+ "min",
+ "max",
+ "avg",
+ "group",
+ "stddev",
+ "stdvar",
+ "count",
+ "count_values",
+ "bottomk",
+ "topk",
+ "quantile",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword,
+ )
+
+ function_keywords = (
+ words(
+ (
+ "abs",
+ "absent",
+ "absent_over_time",
+ "avg_over_time",
+ "ceil",
+ "changes",
+ "clamp_max",
+ "clamp_min",
+ "count_over_time",
+ "day_of_month",
+ "day_of_week",
+ "days_in_month",
+ "delta",
+ "deriv",
+ "exp",
+ "floor",
+ "histogram_quantile",
+ "holt_winters",
+ "hour",
+ "idelta",
+ "increase",
+ "irate",
+ "label_join",
+ "label_replace",
+ "ln",
+ "log10",
+ "log2",
+ "max_over_time",
+ "min_over_time",
+ "minute",
+ "month",
+ "predict_linear",
+ "quantile_over_time",
+ "rate",
+ "resets",
+ "round",
+ "scalar",
+ "sort",
+ "sort_desc",
+ "sqrt",
+ "stddev_over_time",
+ "stdvar_over_time",
+ "sum_over_time",
+ "time",
+ "timestamp",
+ "vector",
+ "year",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword.Reserved,
+ )
+
+ tokens = {
+ "root": [
+ (r"\n", Whitespace),
+ (r"\s+", Whitespace),
+ (r",", Punctuation),
+ # Keywords
+ base_keywords,
+ aggregator_keywords,
+ function_keywords,
+ # Offsets
+ (r"[1-9][0-9]*[smhdwy]", String),
+ # Numbers
+ (r"-?[0-9]+\.[0-9]+", Number.Float),
+ (r"-?[0-9]+", Number.Integer),
+ # Comments
+ (r"#.*?$", Comment.Single),
+ # Operators
+ (r"(\+|\-|\*|\/|\%|\^)", Operator),
+ (r"==|!=|>=|<=|<|>", Operator),
+ (r"and|or|unless", Operator.Word),
+ # Metrics
+ (r"[_a-zA-Z][a-zA-Z0-9_]+", Name.Variable),
+ # Params
+ (r'(["\'])(.*?)(["\'])', bygroups(Punctuation, String, Punctuation)),
+ # Other states
+ (r"\(", Operator, "function"),
+ (r"\)", Operator),
+ (r"\{", Punctuation, "labels"),
+ (r"\[", Punctuation, "range"),
+ ],
+ "labels": [
+ (r"\}", Punctuation, "#pop"),
+ (r"\n", Whitespace),
+ (r"\s+", Whitespace),
+ (r",", Punctuation),
+ (r'([_a-zA-Z][a-zA-Z0-9_]*?)(\s*?)(=~|!=|=|~!)(\s*?)(")(.*?)(")',
+ bygroups(Name.Label, Whitespace, Operator, Whitespace,
+ Punctuation, String, Punctuation)),
+ ],
+ "range": [
+ (r"\]", Punctuation, "#pop"),
+ (r"[1-9][0-9]*[smhdwy]", String),
+ ],
+ "function": [
+ (r"\)", Operator, "#pop"),
+ (r"\(", Operator, "#push"),
+ default("#pop"),
+ ],
+ }
diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py
index 334a6b34..22d21430 100644
--- a/pygments/lexers/python.py
+++ b/pygments/lexers/python.py
@@ -5,7 +5,7 @@
Lexers for Python and related languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -172,19 +172,19 @@ class PythonLexer(RegexLexer):
# without format specifier
(r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
r'(\![sraf])?' # conversion
- r'}', String.Interpol, '#pop'),
+ r'\}', String.Interpol, '#pop'),
# with format specifier
# we'll catch the remaining '}' in the outer scope
(r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
r'(\![sraf])?' # conversion
r':', String.Interpol, '#pop'),
- (r'[^\S]+', Text), # allow new lines
+ (r'\s+', Text), # allow new lines
include('expr'),
],
'expr-inside-fstring-inner': [
(r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
(r'[])}]', Punctuation, '#pop'),
- (r'[^\S]+', Text), # allow new lines
+ (r'\s+', Text), # allow new lines
include('expr'),
],
'expr-keywords': [
@@ -317,8 +317,8 @@ class PythonLexer(RegexLexer):
default('#pop'),
],
'fstringescape': [
- ('{{', String.Escape),
- ('}}', String.Escape),
+ (r'\{\{', String.Escape),
+ (r'\}\}', String.Escape),
include('stringescape'),
],
'stringescape': [
@@ -660,9 +660,8 @@ class PythonConsoleLexer(Lexer):
curcode += line[3:]
else:
if curcode:
- for item in do_insertions(
- insertions, pylexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(
+ insertions, pylexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
if (line.startswith(u'Traceback (most recent call last):') or
@@ -682,9 +681,8 @@ class PythonConsoleLexer(Lexer):
else:
yield match.start(), Generic.Output, line
if curcode:
- for item in do_insertions(insertions,
- pylexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(insertions,
+ pylexer.get_tokens_unprocessed(curcode))
if curtb:
for i, t, v in tblexer.get_tokens_unprocessed(curtb):
yield tbindex+i, t, v
@@ -832,7 +830,7 @@ class CythonLexer(RegexLexer):
],
'keywords': [
(words((
- 'assert', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
+ 'assert', 'async', 'await', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil',
'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
diff --git a/pygments/lexers/qvt.py b/pygments/lexers/qvt.py
index b1c1495c..515d7270 100644
--- a/pygments/lexers/qvt.py
+++ b/pygments/lexers/qvt.py
@@ -5,7 +5,7 @@
Lexer for QVT Operational language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/r.py b/pygments/lexers/r.py
index 33e57b3c..6d841a3a 100644
--- a/pygments/lexers/r.py
+++ b/pygments/lexers/r.py
@@ -5,7 +5,7 @@
Lexers for the R/S languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -49,9 +49,8 @@ class RConsoleLexer(Lexer):
# If we have stored prompt lines, need to process them first.
if current_code_block:
# Weave together the prompts and highlight code.
- for item in do_insertions(
- insertions, slexer.get_tokens_unprocessed(current_code_block)):
- yield item
+ yield from do_insertions(
+ insertions, slexer.get_tokens_unprocessed(current_code_block))
# Reset vars for next code block.
current_code_block = ''
insertions = []
@@ -62,9 +61,8 @@ class RConsoleLexer(Lexer):
# process the last code block. This is neither elegant nor DRY so
# should be changed.
if current_code_block:
- for item in do_insertions(
- insertions, slexer.get_tokens_unprocessed(current_code_block)):
- yield item
+ yield from do_insertions(
+ insertions, slexer.get_tokens_unprocessed(current_code_block))
class SLexer(RegexLexer):
@@ -80,7 +78,7 @@ class SLexer(RegexLexer):
mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
'text/x-R', 'text/x-r-history', 'text/x-r-profile']
- valid_name = r'`[^`\\]*(?:\\.[^`\\]*)*`|(?:[a-zA-Z]|\.[A-Za-z_.])[\w_.]*|\.'
+ valid_name = r'`[^`\\]*(?:\\.[^`\\]*)*`|(?:[a-zA-Z]|\.[A-Za-z_.])[\w.]*|\.'
tokens = {
'comments': [
(r'#.*$', Comment.Single),
diff --git a/pygments/lexers/rdf.py b/pygments/lexers/rdf.py
index 5927a686..8bbd4b0c 100644
--- a/pygments/lexers/rdf.py
+++ b/pygments/lexers/rdf.py
@@ -5,7 +5,7 @@
Lexers for semantic web and RDF query languages and markup.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/rebol.py b/pygments/lexers/rebol.py
index 1b3d90f5..211060b4 100644
--- a/pygments/lexers/rebol.py
+++ b/pygments/lexers/rebol.py
@@ -5,7 +5,7 @@
Lexers for the REBOL and related languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/resource.py b/pygments/lexers/resource.py
index ccd4e5f6..28dff49c 100644
--- a/pygments/lexers/resource.py
+++ b/pygments/lexers/resource.py
@@ -5,7 +5,7 @@
Lexer for resource definition files.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/ride.py b/pygments/lexers/ride.py
index 17bc2466..490d1e07 100644
--- a/pygments/lexers/ride.py
+++ b/pygments/lexers/ride.py
@@ -5,7 +5,7 @@
Lexer for the Ride programming language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/rnc.py b/pygments/lexers/rnc.py
index 8f0ba5cf..9fb8ab52 100644
--- a/pygments/lexers/rnc.py
+++ b/pygments/lexers/rnc.py
@@ -5,7 +5,7 @@
Lexer for Relax-NG Compact syntax
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/roboconf.py b/pygments/lexers/roboconf.py
index f820fe1e..2419ee51 100644
--- a/pygments/lexers/roboconf.py
+++ b/pygments/lexers/roboconf.py
@@ -5,7 +5,7 @@
Lexers for Roboconf DSL.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/robotframework.py b/pygments/lexers/robotframework.py
index ddaddb22..cd808292 100644
--- a/pygments/lexers/robotframework.py
+++ b/pygments/lexers/robotframework.py
@@ -5,7 +5,7 @@
Lexer for Robot Framework.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -98,16 +98,13 @@ class VariableTokenizer:
before = string[:var.start]
yield before, orig_token
yield var.identifier + '{', SYNTAX
- for value, token in self.tokenize(var.base, VARIABLE):
- yield value, token
+ yield from self.tokenize(var.base, VARIABLE)
yield '}', SYNTAX
if var.index:
yield '[', SYNTAX
- for value, token in self.tokenize(var.index, VARIABLE):
- yield value, token
+ yield from self.tokenize(var.index, VARIABLE)
yield ']', SYNTAX
- for value, token in self.tokenize(string[var.end:], orig_token):
- yield value, token
+ yield from self.tokenize(string[var.end:], orig_token)
class RowTokenizer:
@@ -123,6 +120,7 @@ class RowTokenizer:
'metadata': settings,
'variables': variables, 'variable': variables,
'testcases': testcases, 'testcase': testcases,
+ 'tasks': testcases, 'task': testcases,
'keywords': keywords, 'keyword': keywords,
'userkeywords': keywords, 'userkeyword': keywords}
@@ -137,9 +135,8 @@ class RowTokenizer:
elif index == 0 and value.startswith('*'):
self._table = self._start_table(value)
heading = True
- for value, token in self._tokenize(value, index, commented,
- separator, heading):
- yield value, token
+ yield from self._tokenize(value, index, commented,
+ separator, heading)
self._table.end_row()
def _start_table(self, header):
@@ -154,8 +151,7 @@ class RowTokenizer:
elif heading:
yield value, HEADING
else:
- for value, token in self._table.tokenize(value, index):
- yield value, token
+ yield from self._table.tokenize(value, index)
class RowSplitter:
@@ -165,14 +161,12 @@ class RowSplitter:
def split(self, row):
splitter = (row.startswith('| ') and self._split_from_pipes
or self._split_from_spaces)
- for value in splitter(row):
- yield value
+ yield from splitter(row)
yield '\n'
def _split_from_spaces(self, row):
yield '' # Start with (pseudo)separator similarly as with pipes
- for value in self._space_splitter.split(row):
- yield value
+ yield from self._space_splitter.split(row)
def _split_from_pipes(self, row):
_, separator, rest = self._pipe_splitter.split(row, 1)
@@ -215,11 +209,11 @@ class Comment(Tokenizer):
class Setting(Tokenizer):
_tokens = (SETTING, ARGUMENT)
_keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown',
- 'suitepostcondition', 'testsetup', 'testprecondition',
- 'testteardown', 'testpostcondition', 'testtemplate')
+ 'suitepostcondition', 'testsetup', 'tasksetup', 'testprecondition',
+ 'testteardown','taskteardown', 'testpostcondition', 'testtemplate', 'tasktemplate')
_import_settings = ('library', 'resource', 'variables')
_other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags',
- 'testtimeout')
+ 'testtimeout','tasktimeout')
_custom_tokenizer = None
def __init__(self, template_setter=None):
@@ -332,8 +326,7 @@ class _Table:
self._tokenizer = self._prev_tokenizer
yield value, SYNTAX
else:
- for value_and_token in self._tokenize(value, index):
- yield value_and_token
+ yield from self._tokenize(value, index)
self._prev_values_on_row.append(value)
def _continues(self, value, index):
diff --git a/pygments/lexers/ruby.py b/pygments/lexers/ruby.py
index 8bcbde67..e16cd711 100644
--- a/pygments/lexers/ruby.py
+++ b/pygments/lexers/ruby.py
@@ -5,7 +5,7 @@
Lexers for Ruby and related languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -58,8 +58,7 @@ class RubyLexer(ExtendedRegexLexer):
ctx.pos = match.start(5)
ctx.end = match.end(5)
# this may find other heredocs
- for i, t, v in self.get_tokens_unprocessed(context=ctx):
- yield i, t, v
+ yield from self.get_tokens_unprocessed(context=ctx)
ctx.pos = match.end()
if outermost:
@@ -109,16 +108,17 @@ class RubyLexer(ExtendedRegexLexer):
(r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
(words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
(r":'(\\\\|\\'|[^'])*'", String.Symbol),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
(r':"', String.Symbol, 'simple-sym'),
(r'([a-zA-Z_]\w*)(:)(?!:)',
bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
- (r'"', String.Double, 'simple-string'),
+ (r'"', String.Double, 'simple-string-double'),
+ (r"'", String.Single, 'simple-string-single'),
(r'(?<!\.)`', String.Backtick, 'simple-backtick'),
]
- # double-quoted string and symbol
- for name, ttype, end in ('string', String.Double, '"'), \
+ # quoted string and symbol
+ for name, ttype, end in ('string-double', String.Double, '"'), \
+ ('string-single', String.Single, "'"),\
('sym', String.Symbol, '"'), \
('backtick', String.Backtick, '`'):
states['simple-'+name] = [
@@ -421,16 +421,14 @@ class RubyConsoleLexer(Lexer):
curcode += line[end:]
else:
if curcode:
- for item in do_insertions(
- insertions, rblexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(
+ insertions, rblexer.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
yield match.start(), Generic.Output, line
if curcode:
- for item in do_insertions(
- insertions, rblexer.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(
+ insertions, rblexer.get_tokens_unprocessed(curcode))
class FancyLexer(RegexLexer):
diff --git a/pygments/lexers/rust.py b/pygments/lexers/rust.py
index 9d91f7fd..6a28a880 100644
--- a/pygments/lexers/rust.py
+++ b/pygments/lexers/rust.py
@@ -5,7 +5,7 @@
Lexers for the Rust language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -40,7 +40,7 @@ class RustLexer(RegexLexer):
'ExactSizeIterator', 'Option', 'Result',
'Box', 'ToOwned', 'String', 'ToString', 'Vec',
'Clone', 'Copy', 'Default', 'Eq', 'Hash', 'Ord', 'PartialEq',
- 'PartialOrd', 'Eq', 'Ord',
+ 'PartialOrd', 'Ord',
), suffix=r'\b'), Name.Builtin)
builtin_funcs_macros = (words((
diff --git a/pygments/lexers/sas.py b/pygments/lexers/sas.py
index 38d80774..85b07adb 100644
--- a/pygments/lexers/sas.py
+++ b/pygments/lexers/sas.py
@@ -5,7 +5,7 @@
Lexer for SAS.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/scdoc.py b/pygments/lexers/scdoc.py
index 4916393f..930060bd 100644
--- a/pygments/lexers/scdoc.py
+++ b/pygments/lexers/scdoc.py
@@ -5,7 +5,7 @@
Lexer for scdoc, a simple man page generator.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -59,7 +59,7 @@ class ScdocLexer(RegexLexer):
# underlines
(r'(\s)(_[^_]+_)(\W|\n)', bygroups(Text, Generic.Emph, Text)),
# bold
- (r'(\s)(\*[^\*]+\*)(\W|\n)', bygroups(Text, Generic.Strong, Text)),
+ (r'(\s)(\*[^*]+\*)(\W|\n)', bygroups(Text, Generic.Strong, Text)),
# inline code
(r'`[^`]+`', String.Backtick),
diff --git a/pygments/lexers/scripting.py b/pygments/lexers/scripting.py
index a20c54be..843cddfc 100644
--- a/pygments/lexers/scripting.py
+++ b/pygments/lexers/scripting.py
@@ -5,7 +5,7 @@
Lexer for scripting and embedded languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -1253,13 +1253,13 @@ class MiniScriptLexer(RegexLexer):
'in', 'isa', 'then', 'repeat', 'return', 'while'), suffix=r'\b'),
Keyword),
(words((
- 'abs', 'acos', 'asin', 'atan', 'ceil', 'char', 'cos', 'floor',
- 'log', 'round', 'rnd', 'pi', 'sign', 'sin', 'sqrt', 'str', 'tan',
- 'hasIndex', 'indexOf', 'len', 'val', 'code', 'remove', 'lower',
- 'upper', 'replace', 'split', 'indexes', 'values', 'join', 'sum',
- 'sort', 'shuffle', 'push', 'pop', 'pull', 'range',
- 'print', 'input', 'time', 'wait', 'locals', 'globals', 'outer',
- 'yield'), suffix=r'\b'),
+ 'abs', 'acos', 'asin', 'atan', 'ceil', 'char', 'cos', 'floor',
+ 'log', 'round', 'rnd', 'pi', 'sign', 'sin', 'sqrt', 'str', 'tan',
+ 'hasIndex', 'indexOf', 'len', 'val', 'code', 'remove', 'lower',
+ 'upper', 'replace', 'split', 'indexes', 'values', 'join', 'sum',
+ 'sort', 'shuffle', 'push', 'pop', 'pull', 'range',
+ 'print', 'input', 'time', 'wait', 'locals', 'globals', 'outer',
+ 'yield'), suffix=r'\b'),
Name.Builtin),
(r'(true|false|null)\b', Keyword.Constant),
(r'(and|or|not|new)\b', Operator.Word),
diff --git a/pygments/lexers/sgf.py b/pygments/lexers/sgf.py
index fed864a6..6dfd275a 100644
--- a/pygments/lexers/sgf.py
+++ b/pygments/lexers/sgf.py
@@ -5,7 +5,7 @@
Lexer for Smart Game Format (sgf) file format.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py
index bc55a52a..ce6bf6d2 100644
--- a/pygments/lexers/shell.py
+++ b/pygments/lexers/shell.py
@@ -5,7 +5,7 @@
Lexers for various shells.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -172,7 +172,7 @@ class ShellSessionBaseLexer(Lexer):
curcode += line
backslash_continuation = curcode.endswith('\\\n')
continue
-
+
venv_match = self._venv.match(line)
if venv_match:
venv = venv_match.group(1)
@@ -250,14 +250,14 @@ class BatchLexer(RegexLexer):
_nl = r'\n\x1a'
_punct = r'&<>|'
_ws = r'\t\v\f\r ,;=\xa0'
+ _nlws = r'\s\x1a\xa0,;='
_space = r'(?:(?:(?:\^[%s])?[%s])+)' % (_nl, _ws)
_keyword_terminator = (r'(?=(?:\^[%s]?)?[%s+./:[\\\]]|[%s%s(])' %
(_nl, _ws, _nl, _punct))
_token_terminator = r'(?=\^?[%s]|[%s%s])' % (_ws, _punct, _nl)
_start_label = r'((?:(?<=^[^:])|^[^:]?)[%s]*)(:)' % _ws
- _label = r'(?:(?:[^%s%s%s+:^]|\^[%s]?[\w\W])*)' % (_nl, _punct, _ws, _nl)
- _label_compound = (r'(?:(?:[^%s%s%s+:^)]|\^[%s]?[^)])*)' %
- (_nl, _punct, _ws, _nl))
+ _label = r'(?:(?:[^%s%s+:^]|\^[%s]?[\w\W])*)' % (_nlws, _punct, _nl)
+ _label_compound = r'(?:(?:[^%s%s+:^)]|\^[%s]?[^)])*)' % (_nlws, _punct, _nl)
_number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator
_opword = r'(?:equ|geq|gtr|leq|lss|neq)'
_string = r'(?:"[^%s"]*(?:"|(?=[%s])))' % (_nl, _nl)
@@ -267,9 +267,8 @@ class BatchLexer(RegexLexer):
r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' %
(_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl))
- _core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s%s])+)' % (_nl, _nl, _punct, _ws)
- _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s%s)])+)' % (_nl, _nl,
- _punct, _ws)
+ _core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s])+)' % (_nl, _nlws, _punct)
+ _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s)])+)' % (_nl, _nlws, _punct)
_token = r'(?:[%s]+|%s)' % (_punct, _core_token)
_token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound)
_stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
@@ -380,7 +379,8 @@ class BatchLexer(RegexLexer):
return state
def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct,
- _string=_string, _variable=_variable, _ws=_ws):
+ _string=_string, _variable=_variable,
+ _ws=_ws, _nlws=_nlws):
op = r'=+\-*/!~'
state = []
if compound:
@@ -391,8 +391,8 @@ class BatchLexer(RegexLexer):
(r'\d+', Number.Integer),
(r'[(),]+', Punctuation),
(r'([%s]|%%|\^\^)+' % op, Operator),
- (r'(%s|%s|(\^[%s]?)?[^()%s%%^"%s%s%s]|\^[%s%s]?%s)+' %
- (_string, _variable, _nl, op, _nl, _punct, _ws, _nl, _ws,
+ (r'(%s|%s|(\^[%s]?)?[^()%s%%\^"%s%s]|\^[%s]?%s)+' %
+ (_string, _variable, _nl, op, _nlws, _punct, _nlws,
r'[^)]' if compound else r'[\w\W]'),
using(this, state='variable')),
(r'(?=[\x00|&])', Text, '#pop'),
@@ -426,15 +426,15 @@ class BatchLexer(RegexLexer):
_core_token_compound=_core_token_compound,
_nl=_nl, _punct=_punct, _stoken=_stoken,
_string=_string, _space=_space,
- _variable=_variable, _ws=_ws):
+ _variable=_variable, _nlws=_nlws):
stoken_compound = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
(_punct, _string, _variable, _core_token_compound))
return [
- (r'((?:(?<=[%s%s])\d)?)(>>?&|<&)([%s%s]*)(\d)' %
- (_nl, _ws, _nl, _ws),
+ (r'((?:(?<=[%s])\d)?)(>>?&|<&)([%s]*)(\d)' %
+ (_nlws, _nlws),
bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
- (r'((?:(?<=[%s%s])(?<!\^[%s])\d)?)(>>?|<)(%s?%s)' %
- (_nl, _ws, _nl, _space, stoken_compound if compound else _stoken),
+ (r'((?:(?<=[%s])(?<!\^[%s])\d)?)(>>?|<)(%s?%s)' %
+ (_nlws, _nl, _space, stoken_compound if compound else _stoken),
bygroups(Number.Integer, Punctuation, using(this, state='text')))
]
@@ -473,7 +473,7 @@ class BatchLexer(RegexLexer):
'text': [
(r'"', String.Double, 'string'),
include('variable-or-escape'),
- (r'[^"%%^%s%s%s\d)]+|.' % (_nl, _punct, _ws), Text)
+ (r'[^"%%^%s%s\d)]+|.' % (_nlws, _punct), Text)
],
'variable': [
(r'"', String.Double, 'string'),
@@ -494,13 +494,13 @@ class BatchLexer(RegexLexer):
include('follow')
],
'for/f': [
- (r'(")((?:%s|[^"])*?")([%s%s]*)(\))' % (_variable, _nl, _ws),
+ (r'(")((?:%s|[^"])*?")([%s]*)(\))' % (_variable, _nlws),
bygroups(String.Double, using(this, state='string'), Text,
Punctuation)),
(r'"', String.Double, ('#pop', 'for2', 'string')),
- (r"('(?:%%%%|%s|[\w\W])*?')([%s%s]*)(\))" % (_variable, _nl, _ws),
+ (r"('(?:%%%%|%s|[\w\W])*?')([%s]*)(\))" % (_variable, _nlws),
bygroups(using(this, state='sqstring'), Text, Punctuation)),
- (r'(`(?:%%%%|%s|[\w\W])*?`)([%s%s]*)(\))' % (_variable, _nl, _ws),
+ (r'(`(?:%%%%|%s|[\w\W])*?`)([%s]*)(\))' % (_variable, _nlws),
bygroups(using(this, state='bqstring'), Text, Punctuation)),
include('for2')
],
@@ -773,7 +773,7 @@ class PowerShellSessionLexer(ShellSessionBaseLexer):
mimetypes = []
_innerLexerCls = PowerShellLexer
- _ps1rgx = re.compile(r'^((?:\[[^]]+\]: )?PS [^>]+> ?)(.*\n?)')
+ _ps1rgx = re.compile(r'^((?:\[[^]]+\]: )?PS[^>]*> ?)(.*\n?)')
_ps2 = '>> '
@@ -907,4 +907,3 @@ class ExeclineLexer(RegexLexer):
def analyse_text(text):
if shebang_matches(text, r'execlineb'):
return 1
-
diff --git a/pygments/lexers/sieve.py b/pygments/lexers/sieve.py
index 814cb102..9d0b16e5 100644
--- a/pygments/lexers/sieve.py
+++ b/pygments/lexers/sieve.py
@@ -14,7 +14,7 @@
https://tools.ietf.org/html/rfc5429
https://tools.ietf.org/html/rfc8580
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/slash.py b/pygments/lexers/slash.py
index 76e5929d..8c9d53d2 100644
--- a/pygments/lexers/slash.py
+++ b/pygments/lexers/slash.py
@@ -6,7 +6,7 @@
Lexer for the `Slash <https://github.com/arturadib/Slash-A>`_ programming
language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -182,4 +182,4 @@ class SlashLexer(DelegatingLexer):
def __init__(self, **options):
from pygments.lexers.web import HtmlLexer
- super(SlashLexer, self).__init__(HtmlLexer, SlashLanguageLexer, **options)
+ super().__init__(HtmlLexer, SlashLanguageLexer, **options)
diff --git a/pygments/lexers/smalltalk.py b/pygments/lexers/smalltalk.py
index 1f63fa85..b7df5f33 100644
--- a/pygments/lexers/smalltalk.py
+++ b/pygments/lexers/smalltalk.py
@@ -5,7 +5,7 @@
Lexers for Smalltalk and related languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/smv.py b/pygments/lexers/smv.py
index 6dac4af3..a5500d93 100644
--- a/pygments/lexers/smv.py
+++ b/pygments/lexers/smv.py
@@ -5,7 +5,7 @@
Lexers for the SMV languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/snobol.py b/pygments/lexers/snobol.py
index ce52f7c7..b0fdb01c 100644
--- a/pygments/lexers/snobol.py
+++ b/pygments/lexers/snobol.py
@@ -5,7 +5,7 @@
Lexers for the SNOBOL language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/solidity.py b/pygments/lexers/solidity.py
index 99668371..af0672ee 100644
--- a/pygments/lexers/solidity.py
+++ b/pygments/lexers/solidity.py
@@ -5,7 +5,7 @@
Lexers for Solidity.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -13,7 +13,7 @@ import re
from pygments.lexer import RegexLexer, bygroups, include, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
+ Number, Punctuation, Whitespace
__all__ = ['SolidityLexer']
@@ -33,7 +33,7 @@ class SolidityLexer(RegexLexer):
flags = re.MULTILINE | re.UNICODE
datatype = (
- r'\b(address|bool|((bytes|hash|int|string|uint)(8|16|24|32|40|48|56|64'
+ r'\b(address|bool|(?:(?:bytes|hash|int|string|uint)(?:8|16|24|32|40|48|56|64'
r'|72|80|88|96|104|112|120|128|136|144|152|160|168|176|184|192|200|208'
r'|216|224|232|240|248|256)?))\b'
)
@@ -44,14 +44,13 @@ class SolidityLexer(RegexLexer):
include('comments'),
(r'\bpragma\s+solidity\b', Keyword, 'pragma'),
(r'\b(contract)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword, Text.WhiteSpace, Name.Entity)),
- (datatype + r'(\s+)((external|public|internal|private)\s+)?' +
+ bygroups(Keyword, Whitespace, Name.Entity)),
+ (datatype + r'(\s+)((?:external|public|internal|private)\s+)?' +
r'([a-zA-Z_]\w*)',
- bygroups(Keyword.Type, None, None, None, Text.WhiteSpace, Keyword,
- None, Name.Variable)),
+ bygroups(Keyword.Type, Whitespace, Keyword, Name.Variable)),
(r'\b(enum|event|function|struct)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Type, Text.WhiteSpace, Name.Variable)),
- (r'\b(msg|block|tx)\.([A-Za-z_][A-Za-z0-9_]*)\b', Keyword),
+ bygroups(Keyword.Type, Whitespace, Name.Variable)),
+ (r'\b(msg|block|tx)\.([A-Za-z_][a-zA-Z0-9_]*)\b', Keyword),
(words((
'block', 'break', 'constant', 'constructor', 'continue',
'contract', 'do', 'else', 'external', 'false', 'for',
@@ -74,8 +73,8 @@ class SolidityLexer(RegexLexer):
(r'/(\\\n)?[*][\w\W]*', Comment.Multiline)
],
'constants': [
- (r'("([\\]"|.)*?")', String.Double),
- (r"('([\\]'|.)*?')", String.Single),
+ (r'("(\\"|.)*?")', String.Double),
+ (r"('(\\'|.)*?')", String.Single),
(r'\b0[xX][0-9a-fA-F]+\b', Number.Hex),
(r'\b\d+\b', Number.Decimal),
],
@@ -83,11 +82,11 @@ class SolidityLexer(RegexLexer):
include('whitespace'),
include('comments'),
(r'(\^|>=|<)(\s*)(\d+\.\d+\.\d+)',
- bygroups(Operator, Text.WhiteSpace, Keyword)),
+ bygroups(Operator, Whitespace, Keyword)),
(r';', Punctuation, '#pop')
],
'whitespace': [
- (r'\s+', Text.WhiteSpace),
- (r'\n', Text.WhiteSpace)
+ (r'\s+', Whitespace),
+ (r'\n', Whitespace)
]
}
diff --git a/pygments/lexers/special.py b/pygments/lexers/special.py
index e97f1944..84a924d9 100644
--- a/pygments/lexers/special.py
+++ b/pygments/lexers/special.py
@@ -5,7 +5,7 @@
Special lexers.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py
index da7c9863..98d53c5c 100644
--- a/pygments/lexers/sql.py
+++ b/pygments/lexers/sql.py
@@ -34,7 +34,7 @@
The ``tests/examplefiles`` contains a few test files with data to be
parsed by these lexers.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -42,11 +42,18 @@ import re
from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words
from pygments.token import Punctuation, Whitespace, Text, Comment, Operator, \
- Keyword, Name, String, Number, Generic
+ Keyword, Name, String, Number, Generic, Literal
from pygments.lexers import get_lexer_by_name, ClassNotFound
from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
PSEUDO_TYPES, PLPGSQL_KEYWORDS
+from pygments.lexers._mysql_builtins import \
+ MYSQL_CONSTANTS, \
+ MYSQL_DATATYPES, \
+ MYSQL_FUNCTIONS, \
+ MYSQL_KEYWORDS, \
+ MYSQL_OPTIMIZER_HINTS
+
from pygments.lexers import _tsql_builtins
@@ -117,9 +124,7 @@ class PostgresBase:
def get_tokens_unprocessed(self, text, *args):
# Have a copy of the entire text to be used by `language_callback`.
self.text = text
- for x in super(PostgresBase, self).get_tokens_unprocessed(
- text, *args):
- yield x
+ yield from super().get_tokens_unprocessed(text, *args)
def _get_lexer(self, lang):
if lang.lower() == 'sql':
@@ -319,8 +324,7 @@ class PostgresConsoleLexer(Lexer):
# Identify a shell prompt in case of psql commandline example
if line.startswith('$') and not curcode:
lexer = get_lexer_by_name('console', **self.options)
- for x in lexer.get_tokens_unprocessed(line):
- yield x
+ yield from lexer.get_tokens_unprocessed(line)
break
# Identify a psql prompt
@@ -340,9 +344,8 @@ class PostgresConsoleLexer(Lexer):
break
# Emit the combined stream of command and prompt(s)
- for item in do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode))
# Emit the output lines
out_token = Generic.Output
@@ -516,7 +519,7 @@ class TransactSqlLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Whitespace),
- (r'(?m)--.*?$\n?', Comment.Single),
+ (r'--.*?$\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(words(_tsql_builtins.OPERATORS), Operator),
(words(_tsql_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word),
@@ -583,8 +586,12 @@ class TransactSqlLexer(RegexLexer):
class MySqlLexer(RegexLexer):
- """
- Special lexer for MySQL.
+ """The Oracle MySQL lexer.
+
+ This lexer does not attempt to maintain strict compatibility with
+ MariaDB syntax or keywords. Although MySQL and MariaDB's common code
+ history suggests there may be significant overlap between the two,
+ compatibility between the two is not a target for this lexer.
"""
name = 'MySQL'
@@ -595,63 +602,151 @@ class MySqlLexer(RegexLexer):
tokens = {
'root': [
(r'\s+', Text),
- (r'(#|--\s+).*\n?', Comment.Single),
- (r'/\*', Comment.Multiline, 'multiline-comments'),
+
+ # Comments
+ (r'(?:#|--\s+).*', Comment.Single),
+ (r'/\*\+', Comment.Special, 'optimizer-hints'),
+ (r'/\*', Comment.Multiline, 'multiline-comment'),
+
+ # Hexadecimal literals
+ (r"x'([0-9a-f]{2})+'", Number.Hex), # MySQL requires paired hex characters in this form.
+ (r'0x[0-9a-f]+', Number.Hex),
+
+ # Binary literals
+ (r"b'[01]+'", Number.Bin),
+ (r'0b[01]+', Number.Bin),
+
+ # Numeric literals
+ (r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float), # Mandatory integer, optional fraction and exponent
+ (r'[0-9]*\.[0-9]+(e[+-]?[0-9]+)?', Number.Float), # Mandatory fraction, optional integer and exponent
+ (r'[0-9]+e[+-]?[0-9]+', Number.Float), # Exponents with integer significands are still floats
(r'[0-9]+', Number.Integer),
- (r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float),
- (r"'(\\\\|\\'|''|[^'])*'", String.Single),
- (r'"(\\\\|\\"|""|[^"])*"', String.Double),
- (r"`(\\\\|\\`|``|[^`])*`", String.Symbol),
- (r'[+*/<>=~!@#%^&|`?-]', Operator),
- (r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|'
- r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|'
- r'tinyblob|mediumblob|longblob|blob|float|double|double\s+'
- r'precision|real|numeric|dec|decimal|timestamp|year|char|'
- r'varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?',
- bygroups(Keyword.Type, Text, Punctuation)),
- (r'\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|'
- r'bigint|binary|blob|both|by|call|cascade|case|change|char|'
- r'character|check|collate|column|condition|constraint|continue|'
- r'convert|create|cross|current_date|current_time|'
- r'current_timestamp|current_user|cursor|database|databases|'
- r'day_hour|day_microsecond|day_minute|day_second|dec|decimal|'
- r'declare|default|delayed|delete|desc|describe|deterministic|'
- r'distinct|distinctrow|div|double|drop|dual|each|else|elseif|'
- r'enclosed|escaped|exists|exit|explain|fetch|flush|float|float4|'
- r'float8|for|force|foreign|from|fulltext|grant|group|having|'
- r'high_priority|hour_microsecond|hour_minute|hour_second|if|'
- r'ignore|in|index|infile|inner|inout|insensitive|insert|int|'
- r'int1|int2|int3|int4|int8|integer|interval|into|is|iterate|'
- r'join|key|keys|kill|leading|leave|left|like|limit|lines|load|'
- r'localtime|localtimestamp|lock|long|loop|low_priority|match|'
- r'minute_microsecond|minute_second|mod|modifies|natural|'
- r'no_write_to_binlog|not|numeric|on|optimize|option|optionally|'
- r'or|order|out|outer|outfile|precision|primary|procedure|purge|'
- r'raid0|read|reads|real|references|regexp|release|rename|repeat|'
- r'replace|require|restrict|return|revoke|right|rlike|schema|'
- r'schemas|second_microsecond|select|sensitive|separator|set|'
- r'show|smallint|soname|spatial|specific|sql|sql_big_result|'
- r'sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|'
- r'sqlwarning|ssl|starting|straight_join|table|terminated|then|'
- r'to|trailing|trigger|undo|union|unique|unlock|unsigned|update|'
- r'usage|use|using|utc_date|utc_time|utc_timestamp|values|'
- r'varying|when|where|while|with|write|x509|xor|year_month|'
- r'zerofill)\b', Keyword),
- # TODO: this list is not complete
- (r'\b(auto_increment|engine|charset|tables)\b', Keyword.Pseudo),
- (r'(true|false|null)', Name.Constant),
- (r'([a-z_]\w*)(\s*)(\()',
+
+ # Date literals
+ (r"\{\s*d\s*(?P<quote>['\"])\s*\d{2}(\d{2})?.?\d{2}.?\d{2}\s*(?P=quote)\s*\}",
+ Literal.Date),
+
+ # Time literals
+ (r"\{\s*t\s*(?P<quote>['\"])\s*(?:\d+\s+)?\d{1,2}.?\d{1,2}.?\d{1,2}(\.\d*)?\s*(?P=quote)\s*\}",
+ Literal.Date),
+
+ # Timestamp literals
+ (
+ r"\{\s*ts\s*(?P<quote>['\"])\s*"
+ r"\d{2}(?:\d{2})?.?\d{2}.?\d{2}" # Date part
+ r"\s+" # Whitespace between date and time
+ r"\d{1,2}.?\d{1,2}.?\d{1,2}(\.\d*)?" # Time part
+ r"\s*(?P=quote)\s*\}",
+ Literal.Date
+ ),
+
+ # String literals
+ (r"'", String.Single, 'single-quoted-string'),
+ (r'"', String.Double, 'double-quoted-string'),
+
+ # Variables
+ (r'@@(?:global\.|persist\.|persist_only\.|session\.)?[a-z_]+', Name.Variable),
+ (r'@[a-z0-9_$.]+', Name.Variable),
+ (r"@'", Name.Variable, 'single-quoted-variable'),
+ (r'@"', Name.Variable, 'double-quoted-variable'),
+ (r"@`", Name.Variable, 'backtick-quoted-variable'),
+ (r'\?', Name.Variable), # For demonstrating prepared statements
+
+ # Operators
+ (r'[!%&*+/:<=>^|~-]+', Operator),
+
+ # Exceptions; these words tokenize differently in different contexts.
+ (r'\b(set)(?!\s*\()', Keyword),
+ (r'\b(character)(\s+)(set)\b', bygroups(Keyword, Text, Keyword)),
+ # In all other known cases, "SET" is tokenized by MYSQL_DATATYPES.
+
+ (words(MYSQL_CONSTANTS, prefix=r'\b', suffix=r'\b'), Name.Constant),
+ (words(MYSQL_DATATYPES, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ (words(MYSQL_KEYWORDS, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(MYSQL_FUNCTIONS, prefix=r'\b', suffix=r'\b(\s*)(\()'),
bygroups(Name.Function, Text, Punctuation)),
- (r'[a-z_]\w*', Name),
- (r'@[a-z0-9]*[._]*[a-z0-9]*', Name.Variable),
- (r'[;:()\[\],.]', Punctuation)
+
+ # Schema object names
+ #
+ # Note: Although the first regex supports unquoted all-numeric
+ # identifiers, this will not be a problem in practice because
+ # numeric literals have already been handled above.
+ #
+ ('[0-9a-z$_\u0080-\uffff]+', Name),
+ (r'`', Name, 'schema-object-name'),
+
+ # Punctuation
+ (r'[(),.;]', Punctuation),
],
- 'multiline-comments': [
- (r'/\*', Comment.Multiline, 'multiline-comments'),
+
+ # Multiline comment substates
+ # ---------------------------
+
+ 'optimizer-hints': [
+ (r'[^*a-z]+', Comment.Special),
+ (r'\*/', Comment.Special, '#pop'),
+ (words(MYSQL_OPTIMIZER_HINTS, suffix=r'\b'), Comment.Preproc),
+ ('[a-z]+', Comment.Special),
+ (r'\*', Comment.Special),
+ ],
+
+ 'multiline-comment': [
+ (r'[^*]+', Comment.Multiline),
(r'\*/', Comment.Multiline, '#pop'),
- (r'[^/*]+', Comment.Multiline),
- (r'[/*]', Comment.Multiline)
- ]
+ (r'\*', Comment.Multiline),
+ ],
+
+ # String substates
+ # ----------------
+
+ 'single-quoted-string': [
+ (r"[^'\\]+", String.Single),
+ (r"''", String.Escape),
+ (r"""\\[0'"bnrtZ\\%_]""", String.Escape),
+ (r"'", String.Single, '#pop'),
+ ],
+
+ 'double-quoted-string': [
+ (r'[^"\\]+', String.Double),
+ (r'""', String.Escape),
+ (r"""\\[0'"bnrtZ\\%_]""", String.Escape),
+ (r'"', String.Double, '#pop'),
+ ],
+
+ # Variable substates
+ # ------------------
+
+ 'single-quoted-variable': [
+ (r"[^']+", Name.Variable),
+ (r"''", Name.Variable),
+ (r"'", Name.Variable, '#pop'),
+ ],
+
+ 'double-quoted-variable': [
+ (r'[^"]+', Name.Variable),
+ (r'""', Name.Variable),
+ (r'"', Name.Variable, '#pop'),
+ ],
+
+ 'backtick-quoted-variable': [
+ (r'[^`]+', Name.Variable),
+ (r'``', Name.Variable),
+ (r'`', Name.Variable, '#pop'),
+ ],
+
+ # Schema object name substates
+ # ----------------------------
+ #
+ # Backtick-quoted schema object names support escape characters.
+ # It may be desirable to tokenize escape sequences differently,
+ # but currently Pygments does not have an obvious token type for
+ # this unique situation (for example, "Name.Escape").
+ #
+ 'schema-object-name': [
+ (r'[^`\\]+', Name),
+ (r'(?:\\\\|\\`|``)', Name), # This could be an escaped name token type.
+ (r'`', Name, '#pop'),
+ ],
}
def analyse_text(text):
@@ -698,9 +793,8 @@ class SqliteConsoleLexer(Lexer):
curcode += line[8:]
else:
if curcode:
- for item in do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode))
curcode = ''
insertions = []
if line.startswith('SQL error: '):
@@ -708,9 +802,8 @@ class SqliteConsoleLexer(Lexer):
else:
yield (match.start(), Generic.Output, line)
if curcode:
- for item in do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode)):
- yield item
+ yield from do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode))
class RqlLexer(RegexLexer):
diff --git a/pygments/lexers/stata.py b/pygments/lexers/stata.py
index c6344fc6..fbb5fdcf 100644
--- a/pygments/lexers/stata.py
+++ b/pygments/lexers/stata.py
@@ -5,12 +5,12 @@
Lexer for Stata
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import RegexLexer, include, words
+from pygments.lexer import RegexLexer, default, include, words
from pygments.token import Comment, Keyword, Name, Number, \
String, Text, Operator
@@ -118,27 +118,27 @@ class StataLexer(RegexLexer):
# A global is more restricted, so we do follow rules. Note only
# locals explicitly enclosed ${} can be nested.
'macros': [
- (r'\$(\{|(?=[\$`]))', Name.Variable.Global, 'macro-global-nested'),
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
(r'\$', Name.Variable.Global, 'macro-global-name'),
(r'`', Name.Variable, 'macro-local'),
],
'macro-local': [
(r'`', Name.Variable, '#push'),
(r"'", Name.Variable, '#pop'),
- (r'\$(\{|(?=[\$`]))', Name.Variable.Global, 'macro-global-nested'),
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
(r'\$', Name.Variable.Global, 'macro-global-name'),
(r'.', Name.Variable), # fallback
],
'macro-global-nested': [
- (r'\$(\{|(?=[\$`]))', Name.Variable.Global, '#push'),
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, '#push'),
(r'\}', Name.Variable.Global, '#pop'),
(r'\$', Name.Variable.Global, 'macro-global-name'),
(r'`', Name.Variable, 'macro-local'),
(r'\w', Name.Variable.Global), # fallback
- (r'(?!\w)', Name.Variable.Global, '#pop'),
+ default('#pop'),
],
'macro-global-name': [
- (r'\$(\{|(?=[\$`]))', Name.Variable.Global, 'macro-global-nested', '#pop'),
+ (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested', '#pop'),
(r'\$', Name.Variable.Global, 'macro-global-name', '#pop'),
(r'`', Name.Variable, 'macro-local', '#pop'),
(r'\w{1,32}', Name.Variable.Global, '#pop'),
diff --git a/pygments/lexers/supercollider.py b/pygments/lexers/supercollider.py
index d0d033a0..c0dc04be 100644
--- a/pygments/lexers/supercollider.py
+++ b/pygments/lexers/supercollider.py
@@ -5,7 +5,7 @@
Lexer for SuperCollider
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/tcl.py b/pygments/lexers/tcl.py
index ea37c824..0c679d68 100644
--- a/pygments/lexers/tcl.py
+++ b/pygments/lexers/tcl.py
@@ -5,7 +5,7 @@
Lexers for Tcl and related languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py
index fce41ec1..accd1f71 100644
--- a/pygments/lexers/templates.py
+++ b/pygments/lexers/templates.py
@@ -5,7 +5,7 @@
Lexers for various template engines' markup.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -293,8 +293,7 @@ class VelocityHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+velocity']
def __init__(self, **options):
- super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer,
- **options)
+ super().__init__(HtmlLexer, VelocityLexer, **options)
class VelocityXmlLexer(DelegatingLexer):
@@ -310,8 +309,7 @@ class VelocityXmlLexer(DelegatingLexer):
mimetypes = ['application/xml+velocity']
def __init__(self, **options):
- super(VelocityXmlLexer, self).__init__(XmlLexer, VelocityLexer,
- **options)
+ super().__init__(XmlLexer, VelocityLexer, **options)
def analyse_text(text):
rv = VelocityLexer.analyse_text(text) - 0.01
@@ -464,8 +462,7 @@ class MyghtyHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+myghty']
def __init__(self, **options):
- super(MyghtyHtmlLexer, self).__init__(HtmlLexer, MyghtyLexer,
- **options)
+ super().__init__(HtmlLexer, MyghtyLexer, **options)
class MyghtyXmlLexer(DelegatingLexer):
@@ -481,8 +478,7 @@ class MyghtyXmlLexer(DelegatingLexer):
mimetypes = ['application/xml+myghty']
def __init__(self, **options):
- super(MyghtyXmlLexer, self).__init__(XmlLexer, MyghtyLexer,
- **options)
+ super().__init__(XmlLexer, MyghtyLexer, **options)
class MyghtyJavascriptLexer(DelegatingLexer):
@@ -500,8 +496,7 @@ class MyghtyJavascriptLexer(DelegatingLexer):
'text/javascript+mygthy']
def __init__(self, **options):
- super(MyghtyJavascriptLexer, self).__init__(JavascriptLexer,
- MyghtyLexer, **options)
+ super().__init__(JavascriptLexer, MyghtyLexer, **options)
class MyghtyCssLexer(DelegatingLexer):
@@ -517,8 +512,7 @@ class MyghtyCssLexer(DelegatingLexer):
mimetypes = ['text/css+myghty']
def __init__(self, **options):
- super(MyghtyCssLexer, self).__init__(CssLexer, MyghtyLexer,
- **options)
+ super().__init__(CssLexer, MyghtyLexer, **options)
class MasonLexer(RegexLexer):
@@ -659,8 +653,7 @@ class MakoHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+mako']
def __init__(self, **options):
- super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
- **options)
+ super().__init__(HtmlLexer, MakoLexer, **options)
class MakoXmlLexer(DelegatingLexer):
@@ -676,8 +669,7 @@ class MakoXmlLexer(DelegatingLexer):
mimetypes = ['application/xml+mako']
def __init__(self, **options):
- super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
- **options)
+ super().__init__(XmlLexer, MakoLexer, **options)
class MakoJavascriptLexer(DelegatingLexer):
@@ -695,8 +687,7 @@ class MakoJavascriptLexer(DelegatingLexer):
'text/javascript+mako']
def __init__(self, **options):
- super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
- MakoLexer, **options)
+ super().__init__(JavascriptLexer, MakoLexer, **options)
class MakoCssLexer(DelegatingLexer):
@@ -712,8 +703,7 @@ class MakoCssLexer(DelegatingLexer):
mimetypes = ['text/css+mako']
def __init__(self, **options):
- super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
- **options)
+ super().__init__(CssLexer, MakoLexer, **options)
# Genshi and Cheetah lexers courtesy of Matt Good.
@@ -786,8 +776,7 @@ class CheetahHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+cheetah', 'text/html+spitfire']
def __init__(self, **options):
- super(CheetahHtmlLexer, self).__init__(HtmlLexer, CheetahLexer,
- **options)
+ super().__init__(HtmlLexer, CheetahLexer, **options)
class CheetahXmlLexer(DelegatingLexer):
@@ -801,8 +790,7 @@ class CheetahXmlLexer(DelegatingLexer):
mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
def __init__(self, **options):
- super(CheetahXmlLexer, self).__init__(XmlLexer, CheetahLexer,
- **options)
+ super().__init__(XmlLexer, CheetahLexer, **options)
class CheetahJavascriptLexer(DelegatingLexer):
@@ -822,8 +810,7 @@ class CheetahJavascriptLexer(DelegatingLexer):
'text/javascript+spitfire']
def __init__(self, **options):
- super(CheetahJavascriptLexer, self).__init__(JavascriptLexer,
- CheetahLexer, **options)
+ super().__init__(JavascriptLexer, CheetahLexer, **options)
class GenshiTextLexer(RegexLexer):
@@ -937,8 +924,7 @@ class HtmlGenshiLexer(DelegatingLexer):
mimetypes = ['text/html+genshi']
def __init__(self, **options):
- super(HtmlGenshiLexer, self).__init__(HtmlLexer, GenshiMarkupLexer,
- **options)
+ super().__init__(HtmlLexer, GenshiMarkupLexer, **options)
def analyse_text(text):
rv = 0.0
@@ -962,8 +948,7 @@ class GenshiLexer(DelegatingLexer):
mimetypes = ['application/x-genshi', 'application/x-kid']
def __init__(self, **options):
- super(GenshiLexer, self).__init__(XmlLexer, GenshiMarkupLexer,
- **options)
+ super().__init__(XmlLexer, GenshiMarkupLexer, **options)
def analyse_text(text):
rv = 0.0
@@ -988,9 +973,7 @@ class JavascriptGenshiLexer(DelegatingLexer):
'text/javascript+genshi']
def __init__(self, **options):
- super(JavascriptGenshiLexer, self).__init__(JavascriptLexer,
- GenshiTextLexer,
- **options)
+ super().__init__(JavascriptLexer, GenshiTextLexer, **options)
def analyse_text(text):
return GenshiLexer.analyse_text(text) - 0.05
@@ -1007,8 +990,7 @@ class CssGenshiLexer(DelegatingLexer):
mimetypes = ['text/css+genshi']
def __init__(self, **options):
- super(CssGenshiLexer, self).__init__(CssLexer, GenshiTextLexer,
- **options)
+ super().__init__(CssLexer, GenshiTextLexer, **options)
def analyse_text(text):
return GenshiLexer.analyse_text(text) - 0.05
@@ -1029,7 +1011,7 @@ class RhtmlLexer(DelegatingLexer):
mimetypes = ['text/html+ruby']
def __init__(self, **options):
- super(RhtmlLexer, self).__init__(HtmlLexer, ErbLexer, **options)
+ super().__init__(HtmlLexer, ErbLexer, **options)
def analyse_text(text):
rv = ErbLexer.analyse_text(text) - 0.01
@@ -1051,7 +1033,7 @@ class XmlErbLexer(DelegatingLexer):
mimetypes = ['application/xml+ruby']
def __init__(self, **options):
- super(XmlErbLexer, self).__init__(XmlLexer, ErbLexer, **options)
+ super().__init__(XmlLexer, ErbLexer, **options)
def analyse_text(text):
rv = ErbLexer.analyse_text(text) - 0.01
@@ -1071,7 +1053,7 @@ class CssErbLexer(DelegatingLexer):
mimetypes = ['text/css+ruby']
def __init__(self, **options):
- super(CssErbLexer, self).__init__(CssLexer, ErbLexer, **options)
+ super().__init__(CssLexer, ErbLexer, **options)
def analyse_text(text):
return ErbLexer.analyse_text(text) - 0.05
@@ -1091,8 +1073,7 @@ class JavascriptErbLexer(DelegatingLexer):
'text/javascript+ruby']
def __init__(self, **options):
- super(JavascriptErbLexer, self).__init__(JavascriptLexer, ErbLexer,
- **options)
+ super().__init__(JavascriptLexer, ErbLexer, **options)
def analyse_text(text):
return ErbLexer.analyse_text(text) - 0.05
@@ -1115,7 +1096,7 @@ class HtmlPhpLexer(DelegatingLexer):
'application/x-httpd-php4', 'application/x-httpd-php5']
def __init__(self, **options):
- super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options)
+ super().__init__(HtmlLexer, PhpLexer, **options)
def analyse_text(text):
rv = PhpLexer.analyse_text(text) - 0.01
@@ -1135,7 +1116,7 @@ class XmlPhpLexer(DelegatingLexer):
mimetypes = ['application/xml+php']
def __init__(self, **options):
- super(XmlPhpLexer, self).__init__(XmlLexer, PhpLexer, **options)
+ super().__init__(XmlLexer, PhpLexer, **options)
def analyse_text(text):
rv = PhpLexer.analyse_text(text) - 0.01
@@ -1155,7 +1136,7 @@ class CssPhpLexer(DelegatingLexer):
mimetypes = ['text/css+php']
def __init__(self, **options):
- super(CssPhpLexer, self).__init__(CssLexer, PhpLexer, **options)
+ super().__init__(CssLexer, PhpLexer, **options)
def analyse_text(text):
return PhpLexer.analyse_text(text) - 0.05
@@ -1175,8 +1156,7 @@ class JavascriptPhpLexer(DelegatingLexer):
'text/javascript+php']
def __init__(self, **options):
- super(JavascriptPhpLexer, self).__init__(JavascriptLexer, PhpLexer,
- **options)
+ super().__init__(JavascriptLexer, PhpLexer, **options)
def analyse_text(text):
return PhpLexer.analyse_text(text)
@@ -1196,7 +1176,7 @@ class HtmlSmartyLexer(DelegatingLexer):
mimetypes = ['text/html+smarty']
def __init__(self, **options):
- super(HtmlSmartyLexer, self).__init__(HtmlLexer, SmartyLexer, **options)
+ super().__init__(HtmlLexer, SmartyLexer, **options)
def analyse_text(text):
rv = SmartyLexer.analyse_text(text) - 0.01
@@ -1217,7 +1197,7 @@ class XmlSmartyLexer(DelegatingLexer):
mimetypes = ['application/xml+smarty']
def __init__(self, **options):
- super(XmlSmartyLexer, self).__init__(XmlLexer, SmartyLexer, **options)
+ super().__init__(XmlLexer, SmartyLexer, **options)
def analyse_text(text):
rv = SmartyLexer.analyse_text(text) - 0.01
@@ -1238,7 +1218,7 @@ class CssSmartyLexer(DelegatingLexer):
mimetypes = ['text/css+smarty']
def __init__(self, **options):
- super(CssSmartyLexer, self).__init__(CssLexer, SmartyLexer, **options)
+ super().__init__(CssLexer, SmartyLexer, **options)
def analyse_text(text):
return SmartyLexer.analyse_text(text) - 0.05
@@ -1258,8 +1238,7 @@ class JavascriptSmartyLexer(DelegatingLexer):
'text/javascript+smarty']
def __init__(self, **options):
- super(JavascriptSmartyLexer, self).__init__(JavascriptLexer, SmartyLexer,
- **options)
+ super().__init__(JavascriptLexer, SmartyLexer, **options)
def analyse_text(text):
return SmartyLexer.analyse_text(text) - 0.05
@@ -1279,7 +1258,7 @@ class HtmlDjangoLexer(DelegatingLexer):
mimetypes = ['text/html+django', 'text/html+jinja']
def __init__(self, **options):
- super(HtmlDjangoLexer, self).__init__(HtmlLexer, DjangoLexer, **options)
+ super().__init__(HtmlLexer, DjangoLexer, **options)
def analyse_text(text):
rv = DjangoLexer.analyse_text(text) - 0.01
@@ -1300,7 +1279,7 @@ class XmlDjangoLexer(DelegatingLexer):
mimetypes = ['application/xml+django', 'application/xml+jinja']
def __init__(self, **options):
- super(XmlDjangoLexer, self).__init__(XmlLexer, DjangoLexer, **options)
+ super().__init__(XmlLexer, DjangoLexer, **options)
def analyse_text(text):
rv = DjangoLexer.analyse_text(text) - 0.01
@@ -1321,7 +1300,7 @@ class CssDjangoLexer(DelegatingLexer):
mimetypes = ['text/css+django', 'text/css+jinja']
def __init__(self, **options):
- super(CssDjangoLexer, self).__init__(CssLexer, DjangoLexer, **options)
+ super().__init__(CssLexer, DjangoLexer, **options)
def analyse_text(text):
return DjangoLexer.analyse_text(text) - 0.05
@@ -1345,8 +1324,7 @@ class JavascriptDjangoLexer(DelegatingLexer):
'text/javascript+jinja']
def __init__(self, **options):
- super(JavascriptDjangoLexer, self).__init__(JavascriptLexer, DjangoLexer,
- **options)
+ super().__init__(JavascriptLexer, DjangoLexer, **options)
def analyse_text(text):
return DjangoLexer.analyse_text(text) - 0.05
@@ -1389,7 +1367,7 @@ class JspLexer(DelegatingLexer):
mimetypes = ['application/x-jsp']
def __init__(self, **options):
- super(JspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
+ super().__init__(XmlLexer, JspRootLexer, **options)
def analyse_text(text):
rv = JavaLexer.analyse_text(text) - 0.01
@@ -1466,8 +1444,7 @@ class EvoqueHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+evoque']
def __init__(self, **options):
- super(EvoqueHtmlLexer, self).__init__(HtmlLexer, EvoqueLexer,
- **options)
+ super().__init__(HtmlLexer, EvoqueLexer, **options)
class EvoqueXmlLexer(DelegatingLexer):
@@ -1483,8 +1460,7 @@ class EvoqueXmlLexer(DelegatingLexer):
mimetypes = ['application/xml+evoque']
def __init__(self, **options):
- super(EvoqueXmlLexer, self).__init__(XmlLexer, EvoqueLexer,
- **options)
+ super().__init__(XmlLexer, EvoqueLexer, **options)
class ColdfusionLexer(RegexLexer):
@@ -1591,8 +1567,7 @@ class ColdfusionHtmlLexer(DelegatingLexer):
mimetypes = ['application/x-coldfusion']
def __init__(self, **options):
- super(ColdfusionHtmlLexer, self).__init__(HtmlLexer, ColdfusionMarkupLexer,
- **options)
+ super().__init__(HtmlLexer, ColdfusionMarkupLexer, **options)
class ColdfusionCFCLexer(DelegatingLexer):
@@ -1607,8 +1582,7 @@ class ColdfusionCFCLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super(ColdfusionCFCLexer, self).__init__(ColdfusionHtmlLexer, ColdfusionLexer,
- **options)
+ super().__init__(ColdfusionHtmlLexer, ColdfusionLexer, **options)
class SspLexer(DelegatingLexer):
@@ -1623,7 +1597,7 @@ class SspLexer(DelegatingLexer):
mimetypes = ['application/x-ssp']
def __init__(self, **options):
- super(SspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
+ super().__init__(XmlLexer, JspRootLexer, **options)
def analyse_text(text):
rv = 0.0
@@ -1670,8 +1644,7 @@ class TeaTemplateLexer(DelegatingLexer):
mimetypes = ['text/x-tea']
def __init__(self, **options):
- super(TeaTemplateLexer, self).__init__(XmlLexer,
- TeaTemplateRootLexer, **options)
+ super().__init__(XmlLexer, TeaTemplateRootLexer, **options)
def analyse_text(text):
rv = TeaLangLexer.analyse_text(text) - 0.01
@@ -1701,7 +1674,7 @@ class LassoHtmlLexer(DelegatingLexer):
'application/x-httpd-lasso[89]']
def __init__(self, **options):
- super(LassoHtmlLexer, self).__init__(HtmlLexer, LassoLexer, **options)
+ super().__init__(HtmlLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.01
@@ -1725,7 +1698,7 @@ class LassoXmlLexer(DelegatingLexer):
mimetypes = ['application/xml+lasso']
def __init__(self, **options):
- super(LassoXmlLexer, self).__init__(XmlLexer, LassoLexer, **options)
+ super().__init__(XmlLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.01
@@ -1749,7 +1722,7 @@ class LassoCssLexer(DelegatingLexer):
def __init__(self, **options):
options['requiredelimiters'] = True
- super(LassoCssLexer, self).__init__(CssLexer, LassoLexer, **options)
+ super().__init__(CssLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.05
@@ -1777,8 +1750,7 @@ class LassoJavascriptLexer(DelegatingLexer):
def __init__(self, **options):
options['requiredelimiters'] = True
- super(LassoJavascriptLexer, self).__init__(JavascriptLexer, LassoLexer,
- **options)
+ super().__init__(JavascriptLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.05
@@ -1875,7 +1847,7 @@ class HandlebarsHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
def __init__(self, **options):
- super(HandlebarsHtmlLexer, self).__init__(HtmlLexer, HandlebarsLexer, **options)
+ super().__init__(HtmlLexer, HandlebarsLexer, **options)
class YamlJinjaLexer(DelegatingLexer):
@@ -1894,7 +1866,7 @@ class YamlJinjaLexer(DelegatingLexer):
mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
def __init__(self, **options):
- super(YamlJinjaLexer, self).__init__(YamlLexer, DjangoLexer, **options)
+ super().__init__(YamlLexer, DjangoLexer, **options)
class LiquidLexer(RegexLexer):
@@ -2200,7 +2172,7 @@ class TwigHtmlLexer(DelegatingLexer):
mimetypes = ['text/html+twig']
def __init__(self, **options):
- super(TwigHtmlLexer, self).__init__(HtmlLexer, TwigLexer, **options)
+ super().__init__(HtmlLexer, TwigLexer, **options)
class Angular2Lexer(RegexLexer):
@@ -2235,9 +2207,9 @@ class Angular2Lexer(RegexLexer):
# *ngIf="..."; #f="ngForm"
(r'([*#])([\w:.-]+)(\s*)(=)(\s*)',
- bygroups(Punctuation, Name.Attribute, Punctuation, Operator), 'attr'),
+ bygroups(Punctuation, Name.Attribute, Text, Operator, Text), 'attr'),
(r'([*#])([\w:.-]+)(\s*)',
- bygroups(Punctuation, Name.Attribute, Punctuation)),
+ bygroups(Punctuation, Name.Attribute, Text)),
],
'ngExpression': [
@@ -2280,4 +2252,4 @@ class Angular2HtmlLexer(DelegatingLexer):
filenames = ['*.ng2']
def __init__(self, **options):
- super(Angular2HtmlLexer, self).__init__(HtmlLexer, Angular2Lexer, **options)
+ super().__init__(HtmlLexer, Angular2Lexer, **options)
diff --git a/pygments/lexers/teraterm.py b/pygments/lexers/teraterm.py
index bd3a9727..ef1a05e3 100644
--- a/pygments/lexers/teraterm.py
+++ b/pygments/lexers/teraterm.py
@@ -5,7 +5,7 @@
Lexer for Tera Term macro files.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -40,7 +40,7 @@ class TeraTermLexer(RegexLexer):
include('numeric-literals'),
include('string-literals'),
include('all-whitespace'),
- (r'[^\s]', Text),
+ (r'\S', Text),
],
'comments': [
(r';[^\r\n]*', Comment.Single),
@@ -52,7 +52,7 @@ class TeraTermLexer(RegexLexer):
(r'[*/]', Comment.Multiline)
],
'labels': [
- (r'(?i)^(\s*)(:[0-9a-z_]+)', bygroups(Text, Name.Label)),
+ (r'(?i)^(\s*)(:[a-z0-9_]+)', bygroups(Text, Name.Label)),
],
'commands': [
(
@@ -259,10 +259,8 @@ class TeraTermLexer(RegexLexer):
r')\b',
Keyword,
),
- (
- r'(?i)(call|goto)([ \t]+)([0-9a-z_]+)',
- bygroups(Keyword, Text, Name.Label),
- )
+ (r'(?i)(call|goto)([ \t]+)([a-z0-9_]+)',
+ bygroups(Keyword, Text, Name.Label)),
],
'builtin-variables': [
(
@@ -297,7 +295,7 @@ class TeraTermLexer(RegexLexer):
),
],
'user-variables': [
- (r'(?i)[A-Z_][A-Z0-9_]*', Name.Variable),
+ (r'(?i)[a-z_][a-z0-9_]*', Name.Variable),
],
'numeric-literals': [
(r'(-?)([0-9]+)', bygroups(Operator, Number.Integer)),
@@ -309,7 +307,7 @@ class TeraTermLexer(RegexLexer):
(r'"', String.Double, 'in-double-string'),
],
'in-general-string': [
- (r'[\\][\\nt]', String.Escape), # Only three escapes are supported.
+ (r'\\[\\nt]', String.Escape), # Only three escapes are supported.
(r'.', String),
],
'in-single-string': [
@@ -326,7 +324,7 @@ class TeraTermLexer(RegexLexer):
(r'[()]', String.Symbol),
],
'all-whitespace': [
- (r'[\s]+', Text),
+ (r'\s+', Text),
],
}
diff --git a/pygments/lexers/testing.py b/pygments/lexers/testing.py
index 9ee8a061..914bc1ec 100644
--- a/pygments/lexers/testing.py
+++ b/pygments/lexers/testing.py
@@ -5,7 +5,7 @@
Lexers for testing languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index 6b431f69..a2733647 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -5,7 +5,7 @@
Lexers for non-source code file types.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/textedit.py b/pygments/lexers/textedit.py
index 3c6fb570..ea2d4cf2 100644
--- a/pygments/lexers/textedit.py
+++ b/pygments/lexers/textedit.py
@@ -5,7 +5,7 @@
Lexers for languages related to text processing.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/textfmts.py b/pygments/lexers/textfmts.py
index 63d1da39..6b3f8d9e 100644
--- a/pygments/lexers/textfmts.py
+++ b/pygments/lexers/textfmts.py
@@ -5,7 +5,7 @@
Lexers for various text formats.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -175,11 +175,11 @@ class HttpLexer(RegexLexer):
tokens = {
'root': [
(r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
- r'(HTTP)(/)(1\.[01]|2|3)(\r?\n|\Z)',
+ r'(HTTP)(/)(1\.[01]|2(?:\.0)?|3)(\r?\n|\Z)',
bygroups(Name.Function, Text, Name.Namespace, Text,
Keyword.Reserved, Operator, Number, Text),
'headers'),
- (r'(HTTP)(/)(1\.[01]|2|3)( +)(\d{3})(?:( +)([^\r\n]*))?(\r?\n|\Z)',
+ (r'(HTTP)(/)(1\.[01]|2(?:\.0)?|3)( +)(\d{3})(?:( +)([^\r\n]*))?(\r?\n|\Z)',
bygroups(Keyword.Reserved, Operator, Number, Text, Number, Text,
Name.Exception, Text),
'headers'),
@@ -326,15 +326,14 @@ class NotmuchLexer(RegexLexer):
except ClassNotFound:
lexer = get_lexer_by_name('text')
- for item in lexer.get_tokens_unprocessed(code):
- yield item
+ yield from lexer.get_tokens_unprocessed(code)
tokens = {
'root': [
- (r'\fmessage{\s*', Keyword, ('message', 'message-attr')),
+ (r'\fmessage\{\s*', Keyword, ('message', 'message-attr')),
],
'message-attr': [
- (r'(\s*id:\s*)([^\s]+)', bygroups(Name.Attribute, String)),
+ (r'(\s*id:\s*)(\S+)', bygroups(Name.Attribute, String)),
(r'(\s*(?:depth|match|excluded):\s*)(\d+)',
bygroups(Name.Attribute, Number.Integer)),
(r'(\s*filename:\s*)(.+\n)',
@@ -342,21 +341,21 @@ class NotmuchLexer(RegexLexer):
default('#pop'),
],
'message': [
- (r'\fmessage}\n', Keyword, '#pop'),
- (r'\fheader{\n', Keyword, 'header'),
- (r'\fbody{\n', Keyword, 'body'),
+ (r'\fmessage\}\n', Keyword, '#pop'),
+ (r'\fheader\{\n', Keyword, 'header'),
+ (r'\fbody\{\n', Keyword, 'body'),
],
'header': [
- (r'\fheader}\n', Keyword, '#pop'),
+ (r'\fheader\}\n', Keyword, '#pop'),
(r'((?:Subject|From|To|Cc|Date):\s*)(.*\n)',
bygroups(Name.Attribute, String)),
(r'(.*)(\s*\(.*\))(\s*\(.*\)\n)',
bygroups(Generic.Strong, Literal, Name.Tag)),
],
'body': [
- (r'\fpart{\n', Keyword, 'part'),
- (r'\f(part|attachment){\s*', Keyword, ('part', 'part-attr')),
- (r'\fbody}\n', Keyword, '#pop'),
+ (r'\fpart\{\n', Keyword, 'part'),
+ (r'\f(part|attachment)\{\s*', Keyword, ('part', 'part-attr')),
+ (r'\fbody\}\n', Keyword, '#pop'),
],
'part-attr': [
(r'(ID:\s*)(\d+)', bygroups(Name.Attribute, Number.Integer)),
@@ -367,10 +366,10 @@ class NotmuchLexer(RegexLexer):
default('#pop'),
],
'part': [
- (r'\f(?:part|attachment)}\n', Keyword, '#pop'),
- (r'\f(?:part|attachment){\s*', Keyword, ('#push', 'part-attr')),
+ (r'\f(?:part|attachment)\}\n', Keyword, '#pop'),
+ (r'\f(?:part|attachment)\{\s*', Keyword, ('#push', 'part-attr')),
(r'^Non-text part: .*\n', Comment),
- (r'(?s)(.*?(?=\f(?:part|attachment)}\n))', _highlight_code),
+ (r'(?s)(.*?(?=\f(?:part|attachment)\}\n))', _highlight_code),
],
}
@@ -408,7 +407,7 @@ class KernelLogLexer(RegexLexer):
default('info'),
],
'base': [
- (r'\[[0-9\. ]+\] ', Number),
+ (r'\[[0-9. ]+\] ', Number),
(r'(?<=\] ).+?:', Keyword),
(r'\n', Text, '#pop'),
],
diff --git a/pygments/lexers/theorem.py b/pygments/lexers/theorem.py
index 2df4a788..fd34de2c 100644
--- a/pygments/lexers/theorem.py
+++ b/pygments/lexers/theorem.py
@@ -5,7 +5,7 @@
Lexers for theorem-proving languages.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/tnt.py b/pygments/lexers/tnt.py
index 13c2b5d7..f62f3ab9 100644
--- a/pygments/lexers/tnt.py
+++ b/pygments/lexers/tnt.py
@@ -5,16 +5,19 @@
Lexer for Typographic Number Theory.
- :copyright: Copyright 2019-2020 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+
import re
+
from pygments.lexer import Lexer
from pygments.token import Text, Comment, Operator, Keyword, Name, Number, \
- Punctuation, Error
+ Punctuation, Error
__all__ = ['TNTLexer']
+
class TNTLexer(Lexer):
"""
Lexer for Typographic Number Theory, as described in the book
@@ -77,18 +80,18 @@ class TNTLexer(Lexer):
def term(self, start, text):
"""Tokenize a term."""
- if text[start] == 'S': # S...S(...) or S...0
+ if text[start] == 'S': # S...S(...) or S...0
end = start+1
while text[end] == 'S':
end += 1
self.cur.append((start, Number.Integer, text[start:end]))
return self.term(end, text)
- if text[start] == '0': # the singleton 0
+ if text[start] == '0': # the singleton 0
self.cur.append((start, Number.Integer, text[start]))
return start+1
- if text[start] in self.VARIABLES: # a''...
+ if text[start] in self.VARIABLES: # a''...
return self.variable(start, text)
- if text[start] == '(': # (...+...)
+ if text[start] == '(': # (...+...)
self.cur.append((start, Punctuation, text[start]))
start = self.term(start+1, text)
assert text[start] in self.OPERATORS
@@ -97,26 +100,26 @@ class TNTLexer(Lexer):
assert text[start] == ')'
self.cur.append((start, Punctuation, text[start]))
return start+1
- raise AssertionError # no matches
+ raise AssertionError # no matches
def formula(self, start, text):
"""Tokenize a formula."""
- if text[start] in '[]': # fantasy push or pop
+ if text[start] in '[]': # fantasy push or pop
self.cur.append((start, Keyword, text[start]))
return start+1
- if text[start] in self.NEGATORS: # ~<...>
+ if text[start] in self.NEGATORS: # ~<...>
end = start+1
while text[end] in self.NEGATORS:
end += 1
self.cur.append((start, Operator, text[start:end]))
return self.formula(end, text)
- if text[start] in self.QUANTIFIERS: # Aa:<...>
+ if text[start] in self.QUANTIFIERS: # Aa:<...>
self.cur.append((start, Keyword.Declaration, text[start]))
start = self.variable(start+1, text)
assert text[start] == ':'
self.cur.append((start, Punctuation, text[start]))
return self.formula(start+1, text)
- if text[start] == '<': # <...&...>
+ if text[start] == '<': # <...&...>
self.cur.append((start, Punctuation, text[start]))
start = self.formula(start+1, text)
assert text[start] in self.LOGIC
@@ -136,9 +139,9 @@ class TNTLexer(Lexer):
"""Tokenize a rule."""
match = self.RULES.match(text, start)
assert match is not None
- groups = sorted(match.regs[1:]) # exclude whole match
+ groups = sorted(match.regs[1:]) # exclude whole match
for group in groups:
- if group[0] >= 0: # this group matched
+ if group[0] >= 0: # this group matched
self.cur.append((start, Keyword, text[start:group[0]]))
self.cur.append((group[0], Number.Integer,
text[group[0]:group[1]]))
@@ -169,7 +172,7 @@ class TNTLexer(Lexer):
"""Mark everything from ``start`` to the end of the line as Error."""
end = start
try:
- while text[end] != '\n': # there's whitespace in rules
+ while text[end] != '\n': # there's whitespace in rules
end += 1
except IndexError:
end = len(text)
@@ -186,7 +189,7 @@ class TNTLexer(Lexer):
# try line number
while text[end] in self.NUMBERS:
end += 1
- if end != start: # actual number present
+ if end != start: # actual number present
self.cur.append((start, Number.Integer, text[start:end]))
# whitespace is required after a line number
orig = len(self.cur)
@@ -210,7 +213,7 @@ class TNTLexer(Lexer):
orig = len(self.cur)
try:
start = end = self.formula(start, text)
- except AssertionError: # not well-formed
+ except AssertionError: # not well-formed
del self.cur[orig:]
while text[end] not in self.WHITESPACE:
end += 1
diff --git a/pygments/lexers/trafficscript.py b/pygments/lexers/trafficscript.py
index 9b767253..d10a283b 100644
--- a/pygments/lexers/trafficscript.py
+++ b/pygments/lexers/trafficscript.py
@@ -5,7 +5,7 @@
Lexer for RiverBed's TrafficScript (RTS) language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/typoscript.py b/pygments/lexers/typoscript.py
index 745292bd..d33f5524 100644
--- a/pygments/lexers/typoscript.py
+++ b/pygments/lexers/typoscript.py
@@ -14,7 +14,7 @@
`TypoScriptHtmlDataLexer`
Lexer that highlights markers, constants and registers within html tags.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/unicon.py b/pygments/lexers/unicon.py
index b67c6895..c27b7fe0 100644
--- a/pygments/lexers/unicon.py
+++ b/pygments/lexers/unicon.py
@@ -5,7 +5,7 @@
Lexers for the Icon and Unicon languages, including ucode VM.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -129,17 +129,15 @@ class UniconLexer(RegexLexer):
'WSync'), prefix=r'\b', suffix=r'\b'),
Name.Function),
include('numbers'),
- (r'<@|<<@|>@|>>@|\.>|\->', Operator),
- (r'\*\*|\+\+|\-\-|\.|\=|\~\=|<\=|>\=|\=\=|\~\=\=|<<|<<\=|>>|>>\=', Operator),
- (r':\=|:\=:|\->|<\->|\+:\=|\|', Operator),
- (r'\=\=\=|\~\=\=\=', Operator),
+ (r'<@|<<@|>@|>>@|\.>|->|===|~===|\*\*|\+\+|--|\.|~==|~=|<=|>=|==|'
+ r'=|<<=|<<|>>=|>>|:=:|:=|->|<->|\+:=|\|', Operator),
(r'"(?:[^\\"]|\\.)*"', String),
(r"'(?:[^\\']|\\.)*'", String.Character),
(r'[*<>+=/&!?@~\\-]', Operator),
(r'\^', Operator),
(r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
- (r"([\[\]])", Punctuation),
- (r"(<>|=>|[()|:;,.'`]|[{}]|[%]|[&?])", Punctuation),
+ (r"[\[\]]", Punctuation),
+ (r"<>|=>|[()|:;,.'`{}%&?]", Punctuation),
(r'\n+', Text),
],
'numbers': [
@@ -272,15 +270,14 @@ class IconLexer(RegexLexer):
'WSync'), prefix=r'\b', suffix=r'\b'),
Name.Function),
include('numbers'),
- (r'\*\*|\+\+|\-\-|\.|\=|\~\=|<\=|>\=|\=\=|\~\=\=|<<|<<\=|>>|>>\=', Operator),
- (r':\=|:\=:|<\-|<\->|\+:\=|\||\|\|', Operator),
- (r'\=\=\=|\~\=\=\=', Operator),
+ (r'===|~===|\*\*|\+\+|--|\.|==|~==|<=|>=|=|~=|<<=|<<|>>=|>>|'
+ r':=:|:=|<->|<-|\+:=|\|\||\|', Operator),
(r'"(?:[^\\"]|\\.)*"', String),
(r"'(?:[^\\']|\\.)*'", String.Character),
(r'[*<>+=/&!?@~\\-]', Operator),
(r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
- (r"([\[\]])", Punctuation),
- (r"(<>|=>|[()|:;,.'`]|[{}]|[%^]|[&?])", Punctuation),
+ (r"[\[\]]", Punctuation),
+ (r"<>|=>|[()|:;,.'`{}%\^&?]", Punctuation),
(r'\n+', Text),
],
'numbers': [
diff --git a/pygments/lexers/urbi.py b/pygments/lexers/urbi.py
index 72349cbd..b5f5b501 100644
--- a/pygments/lexers/urbi.py
+++ b/pygments/lexers/urbi.py
@@ -5,7 +5,7 @@
Lexers for UrbiScript language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/usd.py b/pygments/lexers/usd.py
index 89b0300a..d9d3f448 100644
--- a/pygments/lexers/usd.py
+++ b/pygments/lexers/usd.py
@@ -5,7 +5,7 @@
The module that parses Pixar's Universal Scene Description file format.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -24,7 +24,7 @@ def _keywords(words, type_):
_TYPE = r"(\w+(?:\[\])?)"
-_BASE_ATTRIBUTE = r"([\w_]+(?:\:[\w_]+)*)(?:(\.)(timeSamples))?"
+_BASE_ATTRIBUTE = r"(\w+(?:\:\w+)*)(?:(\.)(timeSamples))?"
_WHITESPACE = r"([ \t]+)"
@@ -69,7 +69,7 @@ class UsdLexer(RegexLexer):
[(type_ + r"\[\]", Keyword.Type) for type_ in TYPES] +
_keywords(TYPES, Keyword.Type) +
[
- (r"[\(\)\[\]{}]", Punctuation),
+ (r"[(){}\[\]]", Punctuation),
("#.*?$", Comment.Single),
(",", Punctuation),
(";", Punctuation), # ";"s are allowed to combine separate metadata lines
@@ -84,7 +84,7 @@ class UsdLexer(RegexLexer):
(r'\(.*"[.\\n]*".*\)', String.Doc),
(r"\A#usda .+$", Comment.Hashbang),
(r"\s+", Whitespace),
- (r"[\w_]+", Text),
- (r"[_:\.]+", Punctuation),
+ (r"\w+", Text),
+ (r"[_:.]+", Punctuation),
],
}
diff --git a/pygments/lexers/varnish.py b/pygments/lexers/varnish.py
index 4db74e3c..ecb91e13 100644
--- a/pygments/lexers/varnish.py
+++ b/pygments/lexers/varnish.py
@@ -5,7 +5,7 @@
Lexers for Varnish configuration
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/verification.py b/pygments/lexers/verification.py
index f70a0a2c..7ae0a243 100644
--- a/pygments/lexers/verification.py
+++ b/pygments/lexers/verification.py
@@ -5,7 +5,7 @@
Lexer for Intermediate Verification Languages (IVLs).
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py
index 587b0cde..0fd61492 100644
--- a/pygments/lexers/web.py
+++ b/pygments/lexers/web.py
@@ -5,7 +5,7 @@
Just export previously exported lexers.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/webidl.py b/pygments/lexers/webidl.py
index 1cc162cf..81ac44c2 100644
--- a/pygments/lexers/webidl.py
+++ b/pygments/lexers/webidl.py
@@ -1,11 +1,11 @@
# -*- coding: utf-8 -*-
"""
pygments.lexers.webidl
- ~~~~~~~~~~~~~~~~~~~
+ ~~~~~~~~~~~~~~~~~~~~~~
Lexers for Web IDL, including some extensions.
- :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -32,10 +32,11 @@ _builtin_types = (
# other
'any', 'void', 'object', 'RegExp',
)
-_identifier = r'_?[A-Za-z][0-9A-Z_a-z-]*'
+_identifier = r'_?[A-Za-z][a-zA-Z0-9_-]*'
_keyword_suffix = r'(?![\w-])'
_string = r'"[^"]*"'
+
class WebIDLLexer(RegexLexer):
"""
For Web IDL.
@@ -132,7 +133,8 @@ class WebIDLLexer(RegexLexer):
default(('#pop', 'type_null'))
],
'type_null': [
- (r'\??', Punctuation, '#pop:2'),
+ (r'\?', Punctuation),
+ default('#pop:2'),
],
'default_value': [
include('common'),
diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py
index 448aff50..84ba490c 100644
--- a/pygments/lexers/webmisc.py
+++ b/pygments/lexers/webmisc.py
@@ -5,7 +5,7 @@
Lexers for misc. web stuff.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/whiley.py b/pygments/lexers/whiley.py
index feeeb3b7..255e9d22 100644
--- a/pygments/lexers/whiley.py
+++ b/pygments/lexers/whiley.py
@@ -5,7 +5,7 @@
Lexers for the Whiley language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/x10.py b/pygments/lexers/x10.py
index eac87b1c..76138c9e 100644
--- a/pygments/lexers/x10.py
+++ b/pygments/lexers/x10.py
@@ -5,7 +5,7 @@
Lexers for the X10 programming language.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/lexers/xorg.py b/pygments/lexers/xorg.py
index eadcfe76..8f605be4 100644
--- a/pygments/lexers/xorg.py
+++ b/pygments/lexers/xorg.py
@@ -5,7 +5,7 @@
Lexers for Xorg configs.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -29,7 +29,7 @@ class XorgLexer(RegexLexer):
(r'((?:Sub)?Section)(\s+)("\w+")',
bygroups(String.Escape, Text, String.Escape)),
- (r'(End(|Sub)Section)', String.Escape),
+ (r'(End(?:Sub)?Section)', String.Escape),
(r'(\w+)(\s+)([^\n#]+)',
bygroups(Name.Builtin, Text, Name.Constant)),
diff --git a/pygments/lexers/yang.py b/pygments/lexers/yang.py
index b4cb1b7e..b484de64 100644
--- a/pygments/lexers/yang.py
+++ b/pygments/lexers/yang.py
@@ -59,7 +59,7 @@ class YangLexer(RegexLexer):
"int8", "leafref", "string", "uint16", "uint32", "uint64",
"uint8", "union")
- suffix_re_pattern = r'(?=[^\w\-\:])'
+ suffix_re_pattern = r'(?=[^\w\-:])'
tokens = {
'comments': [
@@ -70,7 +70,7 @@ class YangLexer(RegexLexer):
],
"root": [
(r'\s+', Text.Whitespace),
- (r'[\{\}\;]+', Token.Punctuation),
+ (r'[{};]+', Token.Punctuation),
(r'(?<![\-\w])(and|or|not|\+|\.)(?![\-\w])', Token.Operator),
(r'"(?:\\"|[^"])*?"', String.Double),
@@ -84,9 +84,9 @@ class YangLexer(RegexLexer):
bygroups(Name.Namespace, Token.Punctuation, Name.Variable)),
#match BNF stmt `date-arg-str`
- (r'([0-9]{4}\-[0-9]{2}\-[0-9]{2})(?=[\s\{\}\;])', Name.Label),
- (r'([0-9]+\.[0-9]+)(?=[\s\{\}\;])', Number.Float),
- (r'([0-9]+)(?=[\s\{\}\;])', Number.Integer),
+ (r'([0-9]{4}\-[0-9]{2}\-[0-9]{2})(?=[\s{};])', Name.Label),
+ (r'([0-9]+\.[0-9]+)(?=[\s{};])', Number.Float),
+ (r'([0-9]+)(?=[\s{};])', Number.Integer),
(words(TOP_STMTS_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
(words(MODULE_HEADER_STMT_KEYWORDS, suffix=suffix_re_pattern), Token.Keyword),
@@ -99,6 +99,6 @@ class YangLexer(RegexLexer):
(words(TYPES, suffix=suffix_re_pattern), Name.Class),
(words(CONSTANTS_KEYWORDS, suffix=suffix_re_pattern), Name.Class),
- (r'[^;{}\s\'\"]+', Name.Variable),
+ (r'[^;{}\s\'"]+', Name.Variable),
]
}
diff --git a/pygments/lexers/zig.py b/pygments/lexers/zig.py
index c9893862..bc7809f5 100644
--- a/pygments/lexers/zig.py
+++ b/pygments/lexers/zig.py
@@ -5,7 +5,7 @@
Lexers for Zig.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -102,7 +102,7 @@ class ZigLexer(RegexLexer):
# Characters
(r'\'\\\'\'', String.Escape),
- (r'\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'',
+ (r'\'\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'',
String.Escape),
(r'\'[^\\\']\'', String),
@@ -122,8 +122,3 @@ class ZigLexer(RegexLexer):
(r'"', String, '#pop')
]
}
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- yield index, token, value
diff --git a/pygments/modeline.py b/pygments/modeline.py
index 31b2e7fb..e76dc622 100644
--- a/pygments/modeline.py
+++ b/pygments/modeline.py
@@ -5,7 +5,7 @@
A simple modeline parser (based on pymodeline).
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/plugin.py b/pygments/plugin.py
index 3d185efc..76e8f6cb 100644
--- a/pygments/plugin.py
+++ b/pygments/plugin.py
@@ -32,7 +32,7 @@
yourfilter = yourfilter:YourFilter
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
LEXER_ENTRY_POINT = 'pygments.lexers'
diff --git a/pygments/regexopt.py b/pygments/regexopt.py
index 59d77ee0..18b7ca07 100644
--- a/pygments/regexopt.py
+++ b/pygments/regexopt.py
@@ -6,7 +6,7 @@
An algorithm that generates optimized regexes for matching long lists of
literal strings.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/scanner.py b/pygments/scanner.py
index c7f9ab50..3842335d 100644
--- a/pygments/scanner.py
+++ b/pygments/scanner.py
@@ -12,7 +12,7 @@
Have a look at the `DelphiLexer` to get an idea of how to use
this scanner.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
diff --git a/pygments/sphinxext.py b/pygments/sphinxext.py
index 022548a6..1bf49d2e 100644
--- a/pygments/sphinxext.py
+++ b/pygments/sphinxext.py
@@ -6,7 +6,7 @@
Sphinx extension to generate automatic documentation of lexers,
formatters and filters.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/style.py b/pygments/style.py
index b2bc9879..9c994c74 100644
--- a/pygments/style.py
+++ b/pygments/style.py
@@ -5,7 +5,7 @@
Basic style object.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/__init__.py b/pygments/styles/__init__.py
index c0614718..6bb08154 100644
--- a/pygments/styles/__init__.py
+++ b/pygments/styles/__init__.py
@@ -5,7 +5,7 @@
Contains built-in styles.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -81,7 +81,6 @@ def get_style_by_name(name):
def get_all_styles():
"""Return an generator for all styles by name,
both builtin and plugin."""
- for name in STYLE_MAP:
- yield name
+ yield from STYLE_MAP
for name, _ in find_plugin_styles():
yield name
diff --git a/pygments/styles/abap.py b/pygments/styles/abap.py
index d0622d29..6f2eebff 100644
--- a/pygments/styles/abap.py
+++ b/pygments/styles/abap.py
@@ -5,7 +5,7 @@
ABAP workbench like style.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/algol.py b/pygments/styles/algol.py
index d4d91870..af06d240 100644
--- a/pygments/styles/algol.py
+++ b/pygments/styles/algol.py
@@ -26,7 +26,7 @@
[1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/algol_nu.py b/pygments/styles/algol_nu.py
index 09d69452..73c4ac8c 100644
--- a/pygments/styles/algol_nu.py
+++ b/pygments/styles/algol_nu.py
@@ -26,7 +26,7 @@
[1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/arduino.py b/pygments/styles/arduino.py
index 160ca094..c734642f 100644
--- a/pygments/styles/arduino.py
+++ b/pygments/styles/arduino.py
@@ -5,7 +5,7 @@
Arduino® Syntax highlighting style.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/autumn.py b/pygments/styles/autumn.py
index d5ee045d..b6ac50ef 100644
--- a/pygments/styles/autumn.py
+++ b/pygments/styles/autumn.py
@@ -5,7 +5,7 @@
A colorful style, inspired by the terminal highlighting style.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/borland.py b/pygments/styles/borland.py
index e8ec1229..0c679d18 100644
--- a/pygments/styles/borland.py
+++ b/pygments/styles/borland.py
@@ -5,7 +5,7 @@
Style similar to the style used in the Borland IDEs.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/bw.py b/pygments/styles/bw.py
index 54a653b6..ad73a360 100644
--- a/pygments/styles/bw.py
+++ b/pygments/styles/bw.py
@@ -5,7 +5,7 @@
Simple black/white only style.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/colorful.py b/pygments/styles/colorful.py
index 6aa493c9..6ba9209f 100644
--- a/pygments/styles/colorful.py
+++ b/pygments/styles/colorful.py
@@ -5,7 +5,7 @@
A colorful style, inspired by CodeRay.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/default.py b/pygments/styles/default.py
index c93d29fc..99218091 100644
--- a/pygments/styles/default.py
+++ b/pygments/styles/default.py
@@ -5,7 +5,7 @@
The default highlighting style.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/emacs.py b/pygments/styles/emacs.py
index 8408b09a..9768b470 100644
--- a/pygments/styles/emacs.py
+++ b/pygments/styles/emacs.py
@@ -5,7 +5,7 @@
A highlighting style for Pygments, inspired by Emacs.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/friendly.py b/pygments/styles/friendly.py
index 4184a8e3..19179530 100644
--- a/pygments/styles/friendly.py
+++ b/pygments/styles/friendly.py
@@ -5,7 +5,7 @@
A modern style based on the VIM pyte theme.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/fruity.py b/pygments/styles/fruity.py
index 7f5c0e3f..57cd3f26 100644
--- a/pygments/styles/fruity.py
+++ b/pygments/styles/fruity.py
@@ -5,7 +5,7 @@
pygments version of my "fruity" vim theme.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/igor.py b/pygments/styles/igor.py
index 70ee38bc..9d593cee 100644
--- a/pygments/styles/igor.py
+++ b/pygments/styles/igor.py
@@ -5,7 +5,7 @@
Igor Pro default style.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/inkpot.py b/pygments/styles/inkpot.py
index 0b7ea74e..1c0e4211 100644
--- a/pygments/styles/inkpot.py
+++ b/pygments/styles/inkpot.py
@@ -5,7 +5,7 @@
A highlighting style for Pygments, inspired by the Inkpot theme for VIM.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/lovelace.py b/pygments/styles/lovelace.py
index 65b641ec..2ae6ace3 100644
--- a/pygments/styles/lovelace.py
+++ b/pygments/styles/lovelace.py
@@ -9,7 +9,7 @@
A desaturated, somewhat subdued style created for the Lovelace interactive
learning environment.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/manni.py b/pygments/styles/manni.py
index bd14f6a1..772563b9 100644
--- a/pygments/styles/manni.py
+++ b/pygments/styles/manni.py
@@ -8,7 +8,7 @@
This is a port of the style used in the `php port`_ of pygments
by Manni. The style is called 'default' there.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/monokai.py b/pygments/styles/monokai.py
index c9db9f22..60677750 100644
--- a/pygments/styles/monokai.py
+++ b/pygments/styles/monokai.py
@@ -7,7 +7,7 @@
http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/murphy.py b/pygments/styles/murphy.py
index e9b50d7a..20fb9878 100644
--- a/pygments/styles/murphy.py
+++ b/pygments/styles/murphy.py
@@ -5,7 +5,7 @@
Murphy's style from CodeRay.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/native.py b/pygments/styles/native.py
index a41852ac..04ec0eca 100644
--- a/pygments/styles/native.py
+++ b/pygments/styles/native.py
@@ -5,7 +5,7 @@
pygments version of my "native" vim theme.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/paraiso_dark.py b/pygments/styles/paraiso_dark.py
index 50afc37e..6f62c9c7 100644
--- a/pygments/styles/paraiso_dark.py
+++ b/pygments/styles/paraiso_dark.py
@@ -9,7 +9,7 @@
Created with Base16 Builder by Chris Kempson
(https://github.com/chriskempson/base16-builder).
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/paraiso_light.py b/pygments/styles/paraiso_light.py
index a5f357ba..e69bff5f 100644
--- a/pygments/styles/paraiso_light.py
+++ b/pygments/styles/paraiso_light.py
@@ -9,7 +9,7 @@
Created with Base16 Builder by Chris Kempson
(https://github.com/chriskempson/base16-builder).
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/pastie.py b/pygments/styles/pastie.py
index 6b022025..d41c7ed1 100644
--- a/pygments/styles/pastie.py
+++ b/pygments/styles/pastie.py
@@ -7,7 +7,7 @@
.. _pastie: http://pastie.caboo.se/
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/perldoc.py b/pygments/styles/perldoc.py
index a6c21e90..54edea7d 100644
--- a/pygments/styles/perldoc.py
+++ b/pygments/styles/perldoc.py
@@ -7,7 +7,7 @@
.. _perldoc: http://perldoc.perl.org/
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/rainbow_dash.py b/pygments/styles/rainbow_dash.py
index c2c2c7c5..b0eb2918 100644
--- a/pygments/styles/rainbow_dash.py
+++ b/pygments/styles/rainbow_dash.py
@@ -7,7 +7,7 @@
.. _theme: http://sanssecours.github.io/Rainbow-Dash.tmbundle
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/rrt.py b/pygments/styles/rrt.py
index d61085e0..af171744 100644
--- a/pygments/styles/rrt.py
+++ b/pygments/styles/rrt.py
@@ -5,7 +5,7 @@
pygments "rrt" theme, based on Zap and Emacs defaults.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/sas.py b/pygments/styles/sas.py
index 3121aa53..89e1b5a7 100644
--- a/pygments/styles/sas.py
+++ b/pygments/styles/sas.py
@@ -7,7 +7,7 @@
meant to be a complete style. It's merely meant to mimic SAS'
program editor syntax highlighting.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/solarized.py b/pygments/styles/solarized.py
index ac94a984..50b22bd0 100644
--- a/pygments/styles/solarized.py
+++ b/pygments/styles/solarized.py
@@ -8,7 +8,7 @@
A Pygments style for the Solarized themes (licensed under MIT).
See: https://github.com/altercation/solarized
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/stata_dark.py b/pygments/styles/stata_dark.py
index 122b3d77..3bec33c0 100644
--- a/pygments/styles/stata_dark.py
+++ b/pygments/styles/stata_dark.py
@@ -7,7 +7,7 @@
meant to be a complete style, just for Stata's file formats.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/stata_light.py b/pygments/styles/stata_light.py
index 41009a18..430b8a21 100644
--- a/pygments/styles/stata_light.py
+++ b/pygments/styles/stata_light.py
@@ -6,7 +6,7 @@
Light Style inspired by Stata's do-file editor. Note this is not
meant to be a complete style, just for Stata's file formats.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/tango.py b/pygments/styles/tango.py
index 5205244c..71ed52b9 100644
--- a/pygments/styles/tango.py
+++ b/pygments/styles/tango.py
@@ -33,7 +33,7 @@
have been chosen to have the same style. Similarly, keywords (Keyword.*),
and Operator.Word (and, or, in) have been assigned the same style.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/trac.py b/pygments/styles/trac.py
index 1e96584d..a5c23ee4 100644
--- a/pygments/styles/trac.py
+++ b/pygments/styles/trac.py
@@ -5,7 +5,7 @@
Port of the default trac highlighter design.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/vim.py b/pygments/styles/vim.py
index 954e91d5..46272747 100644
--- a/pygments/styles/vim.py
+++ b/pygments/styles/vim.py
@@ -5,7 +5,7 @@
A highlighting style for Pygments, inspired by vim.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/vs.py b/pygments/styles/vs.py
index 111e4aed..da393477 100644
--- a/pygments/styles/vs.py
+++ b/pygments/styles/vs.py
@@ -5,7 +5,7 @@
Simple style with MS Visual Studio colors.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/xcode.py b/pygments/styles/xcode.py
index 7e87d083..0f2f1cba 100644
--- a/pygments/styles/xcode.py
+++ b/pygments/styles/xcode.py
@@ -5,7 +5,7 @@
Style similar to the `Xcode` default theme.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/token.py b/pygments/token.py
index 5c30eb46..5ed79f74 100644
--- a/pygments/token.py
+++ b/pygments/token.py
@@ -5,7 +5,7 @@
Basic token types and the standard tokens.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/unistring.py b/pygments/unistring.py
index 466ebdb6..027df53e 100644
--- a/pygments/unistring.py
+++ b/pygments/unistring.py
@@ -8,7 +8,7 @@
Inspired by chartypes_create.py from the MoinMoin project.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/util.py b/pygments/util.py
index 92e4f259..febde921 100644
--- a/pygments/util.py
+++ b/pygments/util.py
@@ -5,7 +5,7 @@
Utility functions.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/scripts/check_sources.py b/scripts/check_sources.py
index 57151192..4c0a27b5 100755
--- a/scripts/check_sources.py
+++ b/scripts/check_sources.py
@@ -7,7 +7,7 @@
Make sure each Python file has a correct file header
including copyright and license information.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -34,7 +34,7 @@ def checker(*suffixes, **kwds):
name_mail_re = r'[\w ]+(<.*?>)?'
-copyright_re = re.compile(r'^ :copyright: Copyright 2006-2019 by '
+copyright_re = re.compile(r'^ :copyright: Copyright 2006-2020 by '
r'the Pygments team, see AUTHORS\.$', re.UNICODE)
copyright_2_re = re.compile(r'^ %s(, %s)*[,.]$' %
(name_mail_re, name_mail_re), re.UNICODE)
diff --git a/scripts/debug_lexer.py b/scripts/debug_lexer.py
index 6963a5c0..d00b6102 100755
--- a/scripts/debug_lexer.py
+++ b/scripts/debug_lexer.py
@@ -8,7 +8,7 @@
the text where Error tokens are being generated, along
with some context.
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -52,12 +52,10 @@ class DebuggingRegexLexer(ExtendedRegexLexer):
ctx.pos = m.end()
else:
if not isinstance(self, ExtendedRegexLexer):
- for item in action(self, m):
- yield item
+ yield from action(self, m)
ctx.pos = m.end()
else:
- for item in action(self, m, ctx):
- yield item
+ yield from action(self, m, ctx)
if not new_state:
# altered the state stack?
statetokens = tokendefs[ctx.stack[-1]]
diff --git a/scripts/get_vimkw.py b/scripts/get_vimkw.py
index eecbc43f..90c45191 100644
--- a/scripts/get_vimkw.py
+++ b/scripts/get_vimkw.py
@@ -14,7 +14,7 @@ HEADER = '''\
This file is autogenerated by scripts/get_vimkw.py
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/setup.py b/setup.py
index 62db3a56..f25bb92a 100755
--- a/setup.py
+++ b/setup.py
@@ -17,7 +17,7 @@ are:
formats that PIL supports and ANSI sequences
* it is usable as a command-line tool and as a library
-:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+:copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/__init__.py b/tests/__init__.py
index 35c9a781..30516d26 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -3,6 +3,6 @@
Pygments test package
~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/examplefiles/bare.bare b/tests/examplefiles/bare.bare
new file mode 100644
index 00000000..ba2725ce
--- /dev/null
+++ b/tests/examplefiles/bare.bare
@@ -0,0 +1,43 @@
+type PublicRSAKey data<128>
+type PublicED25519Key data<32>
+type Time string # ISO 8601
+
+enum Department {
+ ACCOUNTING
+ ADMINISTRATION
+ CUSTOMER_SERVICE
+ DEVELOPMENT # IMPORTANT
+
+ # Reserved for the CEO
+ JSMITH = 99
+}
+
+type Customer {
+ name: string
+ email: string
+ address: Address
+ orders: []{
+ orderId: i64
+ quantity: i32
+ }
+ metadata: map[string]data
+}
+
+type Employee {
+ name: string
+ email: string
+ address: Address
+ department: Department
+ hireDate: Time
+ publicKey: optional<(PublicRSAKey | PublicED25519Key)>
+ metadata: map[string]data
+}
+
+type Person (Customer | Employee)
+
+type Address {
+ address: [4]string
+ city: string
+ state: string
+ country: string
+}
diff --git a/tests/examplefiles/example.promql b/tests/examplefiles/example.promql
new file mode 100644
index 00000000..e2fee087
--- /dev/null
+++ b/tests/examplefiles/example.promql
@@ -0,0 +1,8 @@
+# A metric with label filtering
+go_gc_duration_seconds{instance="localhost:9090", job="alertmanager"}
+
+# Aggregation operators
+sum by (app, proc) (
+ instance_memory_limit_bytes - instance_memory_usage_bytes
+) / 1024 / 1024
+
diff --git a/tests/examplefiles/example.ptls b/tests/examplefiles/example.ptls
new file mode 100644
index 00000000..f3dc641a
--- /dev/null
+++ b/tests/examplefiles/example.ptls
@@ -0,0 +1,30 @@
+
+-----------------------------------------------------------
+-- Print the lyrics to the song '99 bottles of beer'
+
+output =
+ range(99, 1)
+ |> map(showBeer)
+ |> printLines
+
+beerFmt = """{} of beer on the wall!
+{} of beer!
+You take one down, pass it around
+{}"""
+
+showBeer(n) =
+ format(
+ beerFmt,
+ [showBottle(n), showBottle(n), nextBeer(n - 1)]
+ )
+
+nextBeer(n) =
+ if n == 0 then "No more bottles of beer on the wall!"
+ else format("{} of beer on the wall!\n", [showBottle(n)])
+
+-----------------------------------------------------------
+-- Get appropriate singular / plural form of 'n bottle(s)'
+
+showBottle(n) =
+ format("{} {}", [n, bottleStr])
+ where bottleStr = if n == 1 then "bottle" else "bottles"
diff --git a/tests/examplefiles/fennelview.fnl b/tests/examplefiles/fennelview.fnl
index fd0fc648..19d9c167 100644
--- a/tests/examplefiles/fennelview.fnl
+++ b/tests/examplefiles/fennelview.fnl
@@ -1,13 +1,13 @@
;; A pretty-printer that outputs tables in Fennel syntax.
;; Loosely based on inspect.lua: http://github.com/kikito/inspect.lua
-(local quote (fn [str] (.. '"' (: str :gsub '"' '\\"') '"')))
+(fn view-quote [str] (.. "\"" (: str :gsub "\"" "\\\"") "\""))
(local short-control-char-escapes
{"\a" "\\a" "\b" "\\b" "\f" "\\f" "\n" "\\n"
"\r" "\\r" "\t" "\\t" "\v" "\\v"})
-(local long-control-char-esapes
+(local long-control-char-escapes
(let [long {}]
(for [i 0 31]
(let [ch (string.char i)]
@@ -17,9 +17,10 @@
long))
(fn escape [str]
- (let [str (: str :gsub "\\" "\\\\")
- str (: str :gsub "(%c)%f[0-9]" long-control-char-esapes)]
- (: str :gsub "%c" short-control-char-escapes)))
+ (-> str
+ (: :gsub "\\" "\\\\")
+ (: :gsub "(%c)%f[0-9]" long-control-char-escapes)
+ (: :gsub "%c" short-control-char-escapes)))
(fn sequence-key? [k len]
(and (= (type k) "number")
@@ -32,7 +33,7 @@
(fn sort-keys [a b]
(let [ta (type a) tb (type b)]
- (if (and (= ta tb) (~= ta "boolean")
+ (if (and (= ta tb)
(or (= ta "string") (= ta "number")))
(< a b)
(let [dta (. type-order a)
@@ -58,13 +59,12 @@
(values keys sequence-length)))
(fn count-table-appearances [t appearances]
- (if (= (type t) "table")
- (when (not (. appearances t))
- (tset appearances t 1)
- (each [k v (pairs t)]
- (count-table-appearances k appearances)
- (count-table-appearances v appearances)))
- (when (and t (= t t)) ; no nans please
+ (when (= (type t) "table")
+ (if (not (. appearances t))
+ (do (tset appearances t 1)
+ (each [k v (pairs t)]
+ (count-table-appearances k appearances)
+ (count-table-appearances v appearances)))
(tset appearances t (+ (or (. appearances t) 0) 1))))
appearances)
@@ -78,7 +78,7 @@
(fn tabify [self] (puts self "\n" (: self.indent :rep self.level)))
-(fn already-visited? [self v] (~= (. self.ids v) nil))
+(fn already-visited? [self v] (not= (. self.ids v) nil))
(fn get-id [self v]
(var id (. self.ids v))
@@ -89,54 +89,70 @@
(tset self.ids v id)))
(tostring id))
-(fn put-sequential-table [self t length]
+(fn put-sequential-table [self t len]
(puts self "[")
(set self.level (+ self.level 1))
- (for [i 1 length]
- (puts self " ")
+ (for [i 1 len]
+ (when (< 1 i (+ 1 len))
+ (puts self " "))
(put-value self (. t i)))
(set self.level (- self.level 1))
- (puts self " ]"))
+ (puts self "]"))
(fn put-key [self k]
(if (and (= (type k) "string")
- (: k :find "^[-%w?\\^_`!#$%&*+./@~:|<=>]+$"))
+ (: k :find "^[-%w?\\^_!$%&*+./@:|<=>]+$"))
(puts self ":" k)
(put-value self k)))
-(fn put-kv-table [self t]
+(fn put-kv-table [self t ordered-keys]
(puts self "{")
(set self.level (+ self.level 1))
- (each [k v (pairs t)]
- (tabify self)
+ ;; first, output sorted nonsequential keys
+ (each [i k (ipairs ordered-keys)]
+ (when (or self.table-edges (not= i 1))
+ (tabify self))
(put-key self k)
(puts self " ")
+ (put-value self (. t k)))
+ ;; next, output any sequential keys
+ (each [i v (ipairs t)]
+ (tabify self)
+ (put-key self i)
+ (puts self " ")
(put-value self v))
(set self.level (- self.level 1))
- (tabify self)
+ (when self.table-edges
+ (tabify self))
(puts self "}"))
(fn put-table [self t]
- (if (already-visited? self t)
- (puts self "#<table " (get-id self t) ">")
- (>= self.level self.depth)
- (puts self "{...}")
- :else
- (let [(non-seq-keys length) (get-nonsequential-keys t)
- id (get-id self t)]
- (if (> (. self.appearances t) 1)
- (puts self "#<" id ">")
- (and (= (# non-seq-keys) 0) (= (# t) 0))
- (puts self "{}")
- (= (# non-seq-keys) 0)
- (put-sequential-table self t length)
- :else
- (put-kv-table self t)))))
+ (let [metamethod (and self.metamethod? (-?> t getmetatable (. :__fennelview)))]
+ (if (and (already-visited? self t) self.detect-cycles?)
+ (puts self "#<table @" (get-id self t) ">")
+ (>= self.level self.depth)
+ (puts self "{...}")
+ metamethod
+ (puts self (metamethod t self.fennelview))
+ :else
+ (let [(non-seq-keys len) (get-nonsequential-keys t)
+ id (get-id self t)]
+ ;; fancy metatable stuff can result in self.appearances not including
+ ;; a table, so if it's not found, assume we haven't seen it; we can't
+ ;; do cycle detection in that case.
+ (when (and (< 1 (or (. self.appearances t) 0)) self.detect-cycles?)
+ (puts self "@" id))
+ (if (and (= (length non-seq-keys) 0) (= (length t) 0))
+ (puts self (if self.empty-as-square "[]" "{}"))
+ (= (length non-seq-keys) 0)
+ (put-sequential-table self t len)
+ :else
+ (put-kv-table self t non-seq-keys))))))
(set put-value (fn [self v]
(let [tv (type v)]
(if (= tv "string")
- (puts self (quote (escape v)))
+ (puts self (view-quote (escape v)))
(or (= tv "number") (= tv "boolean") (= tv "nil"))
(puts self (tostring v))
(= tv "table")
@@ -146,11 +162,41 @@
-(fn fennelview [root options]
+(fn one-line [str]
+ ;; save return value as local to ignore gsub's extra return value
+ (let [ret (-> str
+ (: :gsub "\n" " ")
+ (: :gsub "%[ " "[") (: :gsub " %]" "]")
+ (: :gsub "%{ " "{") (: :gsub " %}" "}")
+ (: :gsub "%( " "(") (: :gsub " %)" ")"))]
+ ret))
+
+(fn fennelview [x options]
+ "Return a string representation of x.
+
+Can take an options table with these keys:
+* :one-line (boolean: default: false) keep the output string as a one-liner
+* :depth (number, default: 128) limit how many levels to go (default: 128)
+* :indent (string, default: \" \") use this string to indent each level
+* :detect-cycles? (boolean, default: true) don't try to traverse a looping table
+* :metamethod? (boolean: default: true) use the __fennelview metamethod if found
+* :table-edges (boolean: default: true) put {} table brackets on their own line
+* :empty-as-square (boolean: default: false) render empty tables as [], not {}
+
+The __fennelview metamethod should take the table being serialized as its first
+argument and a function as its second arg which can be used on table elements to
+continue the fennelview process on them.
+"
(let [options (or options {})
- inspector {:appearances (count-table-appearances root {})
+ inspector {:appearances (count-table-appearances x {})
:depth (or options.depth 128)
:level 0 :buffer {} :ids {} :max-ids {}
- :indent (or options.indent " ")}]
- (put-value inspector root)
- (table.concat inspector.buffer)))
+ :indent (or options.indent (if options.one-line "" " "))
+ :detect-cycles? (not (= false options.detect-cycles?))
+ :metamethod? (not (= false options.metamethod?))
+ :fennelview #(fennelview $1 options)
+ :table-edges (not= options.table-edges false)
+ :empty-as-square options.empty-as-square}]
+ (put-value inspector x)
+ (let [str (table.concat inspector.buffer)]
+ (if options.one-line (one-line str) str))))
diff --git a/tests/examplefiles/mysql.txt b/tests/examplefiles/mysql.txt
new file mode 100644
index 00000000..4927abd8
--- /dev/null
+++ b/tests/examplefiles/mysql.txt
@@ -0,0 +1,132 @@
+-- Samples of MySQL parsing
+
+
+-- Comments
+# standalone comment line
+-- standalone comment line
+SELECT 1; -- trailing comment
+SELECT 1; # trailing comment
+SELECT 1; /* trailing comment */
+SELECT /* interruption */ /**/ 1;
+ /*
+ Multiline / * / comment
+ */
+ /* /* MySQL does not support nested comments */
+SELECT 'If this line is a comment then nested commenting is enabled (and therefore broken).';
+
+
+-- Optimizer hints
+SELECT /*+ SEMIJOIN(FIRSTMATCH, LOOSESCAN) */ 1;
+SELECT /*+ SET_VAR(foreign_key_checks=OFF) */ 1;
+
+
+-- Literals
+SELECT
+ -- Integers
+ 123,
+
+ -- Floats
+ .123, 1.23, 123.,
+
+ -- Exponents
+ 1e10, 1e-10, 1.e20, .1e-20,
+
+ -- Hexadecimal
+ X'0af019', x'0AF019', 0xaf019,
+
+ -- Binary
+ B'010', b'010', 0b010,
+
+ -- Temporal literals
+ {d'2020-01-01'}, { d ' 2020^01@01 ' },
+ {t'8 9:10:11'}, { t ' 09:10:11.12 ' }, { t ' 091011 ' },
+ {ts"2020-01-01 09:10:11"}, { ts ' 2020@01/01 09:10:11 ' },
+
+ -- Strings
+ '', 'abc', '1''2\03\%4\_5\\6\'7\"8',
+ "", "abc", "1""2\03\%4\_5\\6\'7\"8",
+;
+
+
+-- Variables
+SET @a = 1, @1 = 2, @._.$ = 3;
+SET @'?' = 1, @'abc''def"`ghi' = 2;
+SET @"#" = 1, @"abc""def'`ghi" = 2;
+SET @`^` = 1, @`abc``def'"ghi` = 2;
+SELECT
+ @@timestamp,
+ @@global.auto_increment_offset,
+ @@session.auto_increment_offset,
+ @@auto_increment_offset
+;
+
+
+-- Prepared statements
+SELECT POW(?, 3) AS cubed;
+
+
+-- Constants
+SELECT TRUE, FALSE, NULL, UNKNOWN;
+
+
+-- Data types
+CREATE TABLE table1 (
+ id INT AUTO_INCREMENT PRIMARY KEY,
+ name VARCHAR(20) NOT NULL,
+ birthyear YEAR
+);
+
+
+-- Keywords
+INSERT INTO table1 (person, birthyear) VALUES ('abc', 2020);
+
+WITH RECURSIVE example (n) AS (
+ SELECT 1
+ UNION ALL
+ SELECT n + 1 FROM example
+ WHERE n < 10
+)
+SELECT n FROM example;
+
+SELECT 17 MEMBER OF ('[23, "abc", 17, "ab", 10]');
+
+
+-- Functions
+SELECT CONCAT('function');
+SELECT MAX(quantity) FROM example;
+
+
+-- Schema object names
+CREATE TABLE basic (
+ example INT,
+ 股票编号 INT,
+ `select` INT,
+ `concat(` INT
+);
+
+SELECT e1.`apple` AS a, `example2`.b
+FROM example1 AS e1
+JOIN example2 e2
+ON `example1`.`id` = e2.id;
+
+
+-- Operators
+SELECT 1 + 2 - 3 << 2;
+SELECT 1::DECIMAL(5, 2);
+SET @a = 1;
+SET a := 1;
+SELECT c->>'$.name' FROM example;
+
+
+
+-- Exceptions
+CREATE TABLE t1
+(
+ c1 VARCHAR(5) CHARACTER SET latin1,
+ c2 SET('r', 'g', 'b')
+);
+
+
+-- Introducers
+SELECT _latin1'abc';
+SELECT _binary'abc';
diff --git a/tests/examplefiles/psysh_test.psysh b/tests/examplefiles/psysh_test.psysh
new file mode 100644
index 00000000..4ab9b134
--- /dev/null
+++ b/tests/examplefiles/psysh_test.psysh
@@ -0,0 +1,47 @@
+>>> (int) 10.88
+=> 10
+>>> (string) 10.88
+=> "10.88"
+>>> (bool) 10.88
+=> true
+>>> (array) 10.88
+=> [
+ 10.88,
+ ]
+>>> $object = (object) 10.88
+=> {#2373
+ +"scalar": 10.88,
+ }
+>>> $object->scalar
+=> 10.88
+>>> $fileHandle = fopen('hello.txt', 'w');
+=> stream resource #400
+>>> (int) $fileHandle
+=> 400
+>>> (string) $fileHandle
+=> "Resource id #400"
+>>> $greeting = 'Hello!';
+=> "Hello!"
+>>> $_greeting = 'Hello!';
+=> "Hello!"
+>>> $gruß = 'Hallo!';
+=> "Hallo!"
+>>> namespace Foo\Bar;
+>>> class Baz {
+... public function getBaz(): string {
+... return 'baz';
+... }
+... }
+>>> $baz = new Foo\Bar\Baz();
+PHP Fatal error: Class 'Foo/Bar/Foo/Bar/Baz' not
+ found in Psy Shell code on line 1
+>>> $baz = new Baz();
+=> Foo\Bar\Baz {#2382}
+>>> $baz->getBaz();
+=> "baz"
+>>> $greeting = function($name): string {
+... return "Hello, {$name}";
+... };
+=> Closure($name): string {#2371 …3}
+>>> $greeting('World')
+=> "Hello, World"
diff --git a/tests/examplefiles/test.sco b/tests/examplefiles/test.sco
index cffcfded..4f1298d8 100644
--- a/tests/examplefiles/test.sco
+++ b/tests/examplefiles/test.sco
@@ -3,7 +3,7 @@
*/
; comment
// comment
-a b C d e f i q s t v x y
+a B b C d e f i q s t v x y
z
np0 nP1 Np2 NP3
m/**/label;
diff --git a/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_0_anchor.html b/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_0_anchor.html
index 78b7675e..4c247cbc 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_0_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos">1</span><span class="c1"># a</span>
- <span class="linenos">2</span><span class="c1"># b</span>
- <span class="linenos">3</span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos">1</span><span class="c1"># a</span>
+<span class="linenos">2</span><span class="c1"># b</span>
+<span class="linenos">3</span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_0_noanchor.html b/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_0_noanchor.html
index 78b7675e..4c247cbc 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_0_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos">1</span><span class="c1"># a</span>
- <span class="linenos">2</span><span class="c1"># b</span>
- <span class="linenos">3</span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos">1</span><span class="c1"># a</span>
+<span class="linenos">2</span><span class="c1"># b</span>
+<span class="linenos">3</span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_3_anchor.html b/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_3_anchor.html
index a133f7a8..0200a290 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_3_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos">1</span><span class="c1"># a</span>
- <span class="linenos">2</span><span class="c1"># b</span>
- <span class="linenos special">3</span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos">1</span><span class="c1"># a</span>
+<span class="linenos">2</span><span class="c1"># b</span>
+<span class="linenos special">3</span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_3_noanchor.html b/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_3_noanchor.html
index a133f7a8..0200a290 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_1_start_1_special_3_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos">1</span><span class="c1"># a</span>
- <span class="linenos">2</span><span class="c1"># b</span>
- <span class="linenos special">3</span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos">1</span><span class="c1"># a</span>
+<span class="linenos">2</span><span class="c1"># b</span>
+<span class="linenos special">3</span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_0_anchor.html b/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_0_anchor.html
index e4000c6e..53dc0e72 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_0_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos"> 8</span><span class="c1"># a</span>
- <span class="linenos"> 9</span><span class="c1"># b</span>
- <span class="linenos">10</span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos"> 8</span><span class="c1"># a</span>
+<span class="linenos"> 9</span><span class="c1"># b</span>
+<span class="linenos">10</span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_0_noanchor.html b/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_0_noanchor.html
index e4000c6e..53dc0e72 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_0_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos"> 8</span><span class="c1"># a</span>
- <span class="linenos"> 9</span><span class="c1"># b</span>
- <span class="linenos">10</span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos"> 8</span><span class="c1"># a</span>
+<span class="linenos"> 9</span><span class="c1"># b</span>
+<span class="linenos">10</span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_3_anchor.html b/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_3_anchor.html
index bcbd693c..1f2f5cac 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_3_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos"> 8</span><span class="c1"># a</span>
- <span class="linenos special"> 9</span><span class="c1"># b</span>
- <span class="linenos">10</span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos"> 8</span><span class="c1"># a</span>
+<span class="linenos special"> 9</span><span class="c1"># b</span>
+<span class="linenos">10</span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_3_noanchor.html b/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_3_noanchor.html
index bcbd693c..1f2f5cac 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_1_start_8_special_3_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos"> 8</span><span class="c1"># a</span>
- <span class="linenos special"> 9</span><span class="c1"># b</span>
- <span class="linenos">10</span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos"> 8</span><span class="c1"># a</span>
+<span class="linenos special"> 9</span><span class="c1"># b</span>
+<span class="linenos">10</span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_0_anchor.html b/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_0_anchor.html
index 56275271..911d9b9b 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_0_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos"> </span><span class="c1"># a</span>
- <span class="linenos">2</span><span class="c1"># b</span>
- <span class="linenos"> </span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos"> </span><span class="c1"># a</span>
+<span class="linenos">2</span><span class="c1"># b</span>
+<span class="linenos"> </span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_0_noanchor.html b/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_0_noanchor.html
index 56275271..911d9b9b 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_0_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos"> </span><span class="c1"># a</span>
- <span class="linenos">2</span><span class="c1"># b</span>
- <span class="linenos"> </span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos"> </span><span class="c1"># a</span>
+<span class="linenos">2</span><span class="c1"># b</span>
+<span class="linenos"> </span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_3_anchor.html b/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_3_anchor.html
index 9f10418b..7f2c5181 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_3_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos"> </span><span class="c1"># a</span>
- <span class="linenos">2</span><span class="c1"># b</span>
- <span class="linenos special"> </span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos"> </span><span class="c1"># a</span>
+<span class="linenos">2</span><span class="c1"># b</span>
+<span class="linenos special"> </span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_3_noanchor.html b/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_3_noanchor.html
index 9f10418b..7f2c5181 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_2_start_1_special_3_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos"> </span><span class="c1"># a</span>
- <span class="linenos">2</span><span class="c1"># b</span>
- <span class="linenos special"> </span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos"> </span><span class="c1"># a</span>
+<span class="linenos">2</span><span class="c1"># b</span>
+<span class="linenos special"> </span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_0_anchor.html b/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_0_anchor.html
index 490d7484..ce635c0e 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_0_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos"> 8</span><span class="c1"># a</span>
- <span class="linenos"> </span><span class="c1"># b</span>
- <span class="linenos">10</span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos"> 8</span><span class="c1"># a</span>
+<span class="linenos"> </span><span class="c1"># b</span>
+<span class="linenos">10</span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_0_noanchor.html b/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_0_noanchor.html
index 490d7484..ce635c0e 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_0_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos"> 8</span><span class="c1"># a</span>
- <span class="linenos"> </span><span class="c1"># b</span>
- <span class="linenos">10</span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos"> 8</span><span class="c1"># a</span>
+<span class="linenos"> </span><span class="c1"># b</span>
+<span class="linenos">10</span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_3_anchor.html b/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_3_anchor.html
index d6b50fbb..08d3da78 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_3_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos"> 8</span><span class="c1"># a</span>
- <span class="linenos special"> </span><span class="c1"># b</span>
- <span class="linenos">10</span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos"> 8</span><span class="c1"># a</span>
+<span class="linenos special"> </span><span class="c1"># b</span>
+<span class="linenos">10</span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_3_noanchor.html b/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_3_noanchor.html
index d6b50fbb..08d3da78 100644
--- a/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_cls_step_2_start_8_special_3_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight">
- <pre>
- <span></span>
- <span class="linenos"> 8</span><span class="c1"># a</span>
- <span class="linenos special"> </span><span class="c1"># b</span>
- <span class="linenos">10</span><span class="c1"># c</span>
- </pre>
+ <pre><span></span><span class="linenos"> 8</span><span class="c1"># a</span>
+<span class="linenos special"> </span><span class="c1"># b</span>
+<span class="linenos">10</span><span class="c1"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_0_anchor.html b/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_0_anchor.html
index 6bbdf29c..24fcc2f6 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_0_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">1</span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">3</span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">1</span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">3</span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_0_noanchor.html b/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_0_noanchor.html
index 6bbdf29c..24fcc2f6 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_0_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">1</span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">3</span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">1</span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">3</span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_3_anchor.html b/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_3_anchor.html
index 33b6330f..b6245f11 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_3_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">1</span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;">3</span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">1</span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;">3</span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_3_noanchor.html b/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_3_noanchor.html
index 33b6330f..b6245f11 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_1_start_1_special_3_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">1</span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;">3</span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">1</span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;">3</span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_0_anchor.html b/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_0_anchor.html
index 2692b73d..d38e2bab 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_0_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 9</span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 9</span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_0_noanchor.html b/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_0_noanchor.html
index 2692b73d..d38e2bab 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_0_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 9</span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 9</span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_3_anchor.html b/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_3_anchor.html
index c1a67b5c..332f729f 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_3_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> 9</span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> 9</span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_3_noanchor.html b/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_3_noanchor.html
index c1a67b5c..332f729f 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_1_start_8_special_3_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> 9</span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> 9</span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_0_anchor.html b/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_0_anchor.html
index 1a9e833a..9367b9c0 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_0_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_0_noanchor.html b/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_0_noanchor.html
index 1a9e833a..9367b9c0 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_0_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_3_anchor.html b/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_3_anchor.html
index cd89d205..6843e0f7 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_3_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_3_noanchor.html b/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_3_noanchor.html
index cd89d205..6843e0f7 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_2_start_1_special_3_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_0_anchor.html b/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_0_anchor.html
index 8f078e42..09bf305b 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_0_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_0_noanchor.html b/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_0_noanchor.html
index 8f078e42..09bf305b 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_0_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_3_anchor.html b/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_3_anchor.html
index 7d0ecdd8..39d7fc06 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_3_anchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_3_noanchor.html b/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_3_noanchor.html
index 7d0ecdd8..39d7fc06 100644
--- a/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/inline_nocls_step_2_start_8_special_3_noanchor.html
@@ -1,8 +1,6 @@
<div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
- </pre>
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </span><span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span><span style="color: #408080; font-style: italic"># c</span>
+</pre>
</div>
diff --git a/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_0_anchor.html b/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_0_anchor.html
index a34780d8..36653531 100644
--- a/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_0_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre><a href="#-1">1</a></pre>
- <pre><a href="#-2">2</a></pre>
- <pre><a href="#-3">3</a></pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><a href="#-1">1</a>
+<a href="#-2">2</a>
+<a href="#-3">3</a></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_0_noanchor.html b/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_0_noanchor.html
index 04037b2c..ddc7594e 100644
--- a/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_0_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre>1</pre>
- <pre>2</pre>
- <pre>3</pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre>1
+2
+3</pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_3_anchor.html b/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_3_anchor.html
index c6f89066..ab0fdfb3 100644
--- a/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_3_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre><a href="#-1">1</a></pre>
- <pre><a href="#-2">2</a></pre>
- <pre class="special"><a href="#-3">3</a></pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><a href="#-1">1</a>
+<a href="#-2">2</a>
+<span class="special"><a href="#-3">3</a></span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_3_noanchor.html b/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_3_noanchor.html
index 464b1b32..1f3c424b 100644
--- a/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_1_start_1_special_3_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre>1</pre>
- <pre>2</pre>
- <pre class="special">3</pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre>1
+2
+<span class="special">3</span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_0_anchor.html b/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_0_anchor.html
index 0eacbbaf..03ff1442 100644
--- a/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_0_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre><a href="#-8"> 8</a></pre>
- <pre><a href="#-9"> 9</a></pre>
- <pre><a href="#-10">10</a></pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><a href="#-8"> 8</a>
+<a href="#-9"> 9</a>
+<a href="#-10">10</a></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_0_noanchor.html b/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_0_noanchor.html
index c274351c..b8bf29ee 100644
--- a/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_0_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre> 8</pre>
- <pre> 9</pre>
- <pre>10</pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre> 8
+ 9
+10</pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_3_anchor.html b/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_3_anchor.html
index 95521c15..98d1abbb 100644
--- a/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_3_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre><a href="#-8"> 8</a></pre>
- <pre class="special"><a href="#-9"> 9</a></pre>
- <pre><a href="#-10">10</a></pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><a href="#-8"> 8</a>
+<span class="special"><a href="#-9"> 9</a></span>
+<a href="#-10">10</a></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_3_noanchor.html b/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_3_noanchor.html
index 07601811..67642acd 100644
--- a/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_1_start_8_special_3_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre> 8</pre>
- <pre class="special"> 9</pre>
- <pre>10</pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre> 8
+<span class="special"> 9</span>
+10</pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_0_anchor.html b/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_0_anchor.html
index 566d4bcc..80e5bdb1 100644
--- a/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_0_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre> </pre>
- <pre><a href="#-2">2</a></pre>
- <pre> </pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre>
+<a href="#-2">2</a>
+ </pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_0_noanchor.html b/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_0_noanchor.html
index b19d9af6..a95ad95f 100644
--- a/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_0_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre> </pre>
- <pre>2</pre>
- <pre> </pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre>
+2
+ </pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_3_anchor.html b/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_3_anchor.html
index 921c13c9..1a4cc59f 100644
--- a/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_3_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre> </pre>
- <pre><a href="#-2">2</a></pre>
- <pre class="special"> </pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre>
+<a href="#-2">2</a>
+<span class="special"> </span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_3_noanchor.html b/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_3_noanchor.html
index dd177c14..34b469e9 100644
--- a/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_2_start_1_special_3_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre> </pre>
- <pre>2</pre>
- <pre class="special"> </pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre>
+2
+<span class="special"> </span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_0_anchor.html b/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_0_anchor.html
index a3091766..3bbb52a5 100644
--- a/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_0_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre><a href="#-8"> 8</a></pre>
- <pre> </pre>
- <pre><a href="#-10">10</a></pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><a href="#-8"> 8</a>
+
+<a href="#-10">10</a></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_0_noanchor.html b/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_0_noanchor.html
index cfc968ff..907c06fc 100644
--- a/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_0_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre> 8</pre>
- <pre> </pre>
- <pre>10</pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre> 8
+
+10</pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_3_anchor.html b/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_3_anchor.html
index 1b30e6d7..a39b486d 100644
--- a/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_3_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre><a href="#-8"> 8</a></pre>
- <pre class="special"> </pre>
- <pre><a href="#-10">10</a></pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><a href="#-8"> 8</a>
+<span class="special"> </span>
+<a href="#-10">10</a></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_3_noanchor.html b/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_3_noanchor.html
index 2a5ebde0..25bde602 100644
--- a/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/table_cls_step_2_start_8_special_3_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre> 8</pre>
- <pre class="special"> </pre>
- <pre>10</pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight">
- <pre>
- <span></span>
- <span class="c1"># a</span>
- <span class="c1"># b</span>
- <span class="c1"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre> 8
+<span class="special"> </span>
+10</pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight">
+ <pre><span></span><span class="c1"># a</span>
+<span class="c1"># b</span>
+<span class="c1"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_0_anchor.html b/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_0_anchor.html
index 86ce85ea..92e4e413 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_0_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-1">1</a></pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-2">2</a></pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-3">3</a></pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-1">1</a></span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-2">2</a></span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-3">3</a></span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_0_noanchor.html b/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_0_noanchor.html
index c32f2745..196033a1 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_0_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">1</pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">3</pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">1</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">3</span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_3_anchor.html b/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_3_anchor.html
index 1d17cad9..afa6152a 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_3_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-1">1</a></pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-2">2</a></pre>
- <pre style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"><a href="#-3">3</a></pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-1">1</a></span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-2">2</a></span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"><a href="#-3">3</a></span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_3_noanchor.html b/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_3_noanchor.html
index 7b3bc29c..a72788df 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_1_start_1_special_3_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">1</pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</pre>
- <pre style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;">3</pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">1</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;">3</span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_0_anchor.html b/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_0_anchor.html
index c61cb280..23ed43af 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_0_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-8"> 8</a></pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-9"> 9</a></pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-10">10</a></pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-8"> 8</a></span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-9"> 9</a></span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-10">10</a></span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_0_noanchor.html b/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_0_noanchor.html
index af07ebb1..3248ba3a 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_0_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 9</pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 9</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_3_anchor.html b/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_3_anchor.html
index 122283b6..276b76d5 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_3_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-8"> 8</a></pre>
- <pre style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"><a href="#-9"> 9</a></pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-10">10</a></pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-8"> 8</a></span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"><a href="#-9"> 9</a></span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-10">10</a></span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_3_noanchor.html b/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_3_noanchor.html
index f50cdcdd..4298ac69 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_1_start_8_special_3_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</pre>
- <pre style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> 9</pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> 9</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_0_anchor.html b/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_0_anchor.html
index 6ed8c005..15c4975e 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_0_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-2">2</a></pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-2">2</a></span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_0_noanchor.html b/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_0_noanchor.html
index dba1249b..987bd12d 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_0_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_3_anchor.html b/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_3_anchor.html
index 69bf7428..da570378 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_3_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-2">2</a></pre>
- <pre style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-2">2</a></span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_3_noanchor.html b/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_3_noanchor.html
index 87660afb..0f502688 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_2_start_1_special_3_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</pre>
- <pre style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">2</span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_0_anchor.html b/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_0_anchor.html
index ac7570f3..d0bef32d 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_0_anchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_0_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-8"> 8</a></pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-10">10</a></pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-8"> 8</a></span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-10">10</a></span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_0_noanchor.html b/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_0_noanchor.html
index 035aa795..98b94471 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_0_noanchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_0_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> </span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_3_anchor.html b/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_3_anchor.html
index 0ed43dbd..baa70130 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_3_anchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_3_anchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-8"> 8</a></pre>
- <pre style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-10">10</a></pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-8"> 8</a></span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"><a href="#-10">10</a></span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_3_noanchor.html b/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_3_noanchor.html
index 13e38705..6ba03c4f 100644
--- a/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_3_noanchor.html
+++ b/tests/html_linenos_expected_output/table_nocls_step_2_start_8_special_3_noanchor.html
@@ -1,21 +1,19 @@
<table class="highlighttable">
- <tr>
- <td class="linenos">
- <div class="linenodiv">
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</pre>
- <pre style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </pre>
- <pre style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</pre>
- </div>
- </td>
- <td class="code">
- <div class="highlight" style="background: #f8f8f8">
- <pre style="line-height: 125%; margin: 0;">
- <span></span>
- <span style="color: #408080; font-style: italic"># a</span>
- <span style="color: #408080; font-style: italic"># b</span>
- <span style="color: #408080; font-style: italic"># c</span>
- </pre>
- </div>
- </td>
- </tr>
-</table>
+ <tr>
+ <td class="linenos">
+ <div class="linenodiv">
+ <pre><span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;"> 8</span>
+<span style="color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px;"> </span>
+<span style="color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px;">10</span></pre>
+ </div>
+ </td>
+ <td class="code">
+ <div class="highlight" style="background: #f8f8f8">
+ <pre style="line-height: 125%; margin: 0;"><span></span><span style="color: #408080; font-style: italic"># a</span>
+<span style="color: #408080; font-style: italic"># b</span>
+<span style="color: #408080; font-style: italic"># c</span>
+</pre>
+ </div>
+ </td>
+ </tr>
+</table> \ No newline at end of file
diff --git a/tests/test_apache_conf.py b/tests/test_apache_conf.py
index 63f4f542..cdb8bc33 100644
--- a/tests/test_apache_conf.py
+++ b/tests/test_apache_conf.py
@@ -3,7 +3,7 @@
Basic Apache Configuration Test
~~~~~~~~~~~~~~~~~--------------
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_asm.py b/tests/test_asm.py
index 5bf69e38..d351ce30 100644
--- a/tests/test_asm.py
+++ b/tests/test_asm.py
@@ -3,7 +3,7 @@
Basic GasLexer/NasmLexer Test
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_basic.py b/tests/test_basic.py
index e2255f5b..e3826b28 100644
--- a/tests/test_basic.py
+++ b/tests/test_basic.py
@@ -3,7 +3,7 @@
Pygments Basic lexers tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_basic_api.py b/tests/test_basic_api.py
index ea555513..378ea5e4 100644
--- a/tests/test_basic_api.py
+++ b/tests/test_basic_api.py
@@ -3,7 +3,7 @@
Pygments basic API tests
~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_bibtex.py b/tests/test_bibtex.py
index 756a6589..dfa668f2 100644
--- a/tests/test_bibtex.py
+++ b/tests/test_bibtex.py
@@ -3,7 +3,7 @@
BibTeX Test
~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_cfm.py b/tests/test_cfm.py
index e3175215..a1600944 100644
--- a/tests/test_cfm.py
+++ b/tests/test_cfm.py
@@ -3,7 +3,7 @@
Basic ColdfusionHtmlLexer Test
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_clexer.py b/tests/test_clexer.py
index 69f39b24..d40ec491 100644
--- a/tests/test_clexer.py
+++ b/tests/test_clexer.py
@@ -3,7 +3,7 @@
Basic CLexer Test
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_cmdline.py b/tests/test_cmdline.py
index e11c666d..fe27df7f 100644
--- a/tests/test_cmdline.py
+++ b/tests/test_cmdline.py
@@ -3,7 +3,7 @@
Command line test
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_coffeescript.py b/tests/test_coffeescript.py
new file mode 100644
index 00000000..41ca8e0d
--- /dev/null
+++ b/tests/test_coffeescript.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+"""
+ CoffeeScript tests
+ ~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import pytest
+
+from pygments.lexers import CoffeeScriptLexer
+from pygments.token import Token
+
+COFFEE_SLASH_GOLDEN = [
+ # input_str, slashes_are_regex_here
+ (r'/\\/', True),
+ (r'/\\/i', True),
+ (r'/\//', True),
+ (r'/(\s)/', True),
+ ('/a{2,8}/', True),
+ ('/b*c?d+/', True),
+ ('/(capture-match)/', True),
+ ('/(?:do-not-capture-match)/', True),
+ ('/this|or|that/', True),
+ ('/[char-set]/', True),
+ ('/[^neg-char_st]/', True),
+ ('/^.*$/', True),
+ (r'/\n(\f)\0\1\d\b\cm\u1234/', True),
+ (r'/^.?([^/\\\n\w]*)a\1+$/.something(or_other) # something more complex', True),
+ ("foo = (str) ->\n /'|\"/.test str", True),
+ ('a = a / b / c', False),
+ ('a = a/b/c', False),
+ ('a = a/b/ c', False),
+ ('a = a /b/c', False),
+ ('a = 1 + /d/.test(a)', True),
+]
+
+
+@pytest.fixture(scope='module')
+def lexer():
+ yield CoffeeScriptLexer()
+
+
+@pytest.mark.parametrize('golden', COFFEE_SLASH_GOLDEN)
+def test_coffee_slashes(lexer, golden):
+ input_str, slashes_are_regex_here = golden
+ output = list(lexer.get_tokens(input_str))
+ print(output)
+ for t, s in output:
+ if '/' in s:
+ is_regex = t is Token.String.Regex
+ assert is_regex == slashes_are_regex_here, (t, s)
+
+
+def test_mixed_slashes(lexer):
+ fragment = u'a?/foo/:1/2;\n'
+ tokens = [
+ (Token.Name.Other, u'a'),
+ (Token.Operator, u'?'),
+ (Token.Literal.String.Regex, u'/foo/'),
+ (Token.Operator, u':'),
+ (Token.Literal.Number.Integer, u'1'),
+ (Token.Operator, u'/'),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Punctuation, u';'),
+ (Token.Text, u'\n'),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_beware_infinite_loop(lexer):
+ # This demonstrates the case that "This isn't really guarding" comment
+ # refers to.
+ fragment = '/a/x;\n'
+ tokens = [
+ (Token.Text, ''),
+ (Token.Operator, '/'),
+ (Token.Name.Other, 'a'),
+ (Token.Operator, '/'),
+ (Token.Name.Other, 'x'),
+ (Token.Punctuation, ';'),
+ (Token.Text, '\n'),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_crystal.py b/tests/test_crystal.py
index 30177e41..08aa7741 100644
--- a/tests/test_crystal.py
+++ b/tests/test_crystal.py
@@ -3,7 +3,7 @@
Basic CrystalLexer Test
~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_csound.py b/tests/test_csound.py
index 7259cf5b..0186da7a 100644
--- a/tests/test_csound.py
+++ b/tests/test_csound.py
@@ -3,7 +3,7 @@
Csound lexer tests
~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_data.py b/tests/test_data.py
index 23f1d4a0..9724d235 100644
--- a/tests/test_data.py
+++ b/tests/test_data.py
@@ -56,6 +56,34 @@ def test_basic_json(lexer_json):
assert list(lexer_json.get_tokens(fragment)) == tokens
+def test_json_escape_backtracking(lexer_json):
+ # This tests that an (invalid) sequence of escapes doesn't cause the lexer
+ # to fall into catastrophic backtracking. unfortunately, if it's broken
+ # this test will hang and that's how we know it's broken :(
+ fragment = r'{"\u00D0000\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\63CD'
+ tokens = (
+ [(Token.Punctuation, u'{'),
+ (Token.Error, r'"'),
+ (Token.Error, '\\'),
+ (Token.Error, r'u'),
+ (Token.Error, r'0'),
+ (Token.Error, r'0'),
+ (Token.Error, r'D'),
+ (Token.Error, r'0'),
+ (Token.Error, r'0'),
+ (Token.Error, r'0'),
+ (Token.Error, r'0')]
+ + [(Token.Error, '\\')] * 178
+ + [(Token.Error, r'6'),
+ (Token.Error, r'3'),
+ (Token.Error, r'C'),
+ (Token.Error, r'D'),
+ (Token.Text, '\n')]
+ )
+
+ assert list(lexer_json.get_tokens(fragment)) == tokens
+
+
def test_basic_bare(lexer_bare):
# This is the same as testBasic for JsonLexer above, except the
# enclosing curly braces are removed.
diff --git a/tests/test_examplefiles.py b/tests/test_examplefiles.py
index 2486f876..22b5ced0 100644
--- a/tests/test_examplefiles.py
+++ b/tests/test_examplefiles.py
@@ -3,7 +3,7 @@
Pygments tests with example files
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_grammar_notation.py b/tests/test_grammar_notation.py
index b20ca975..0d7e6865 100644
--- a/tests/test_grammar_notation.py
+++ b/tests/test_grammar_notation.py
@@ -3,7 +3,7 @@
Basic Grammar Notation Tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_haskell.py b/tests/test_haskell.py
index 515c19e9..f67db1fb 100644
--- a/tests/test_haskell.py
+++ b/tests/test_haskell.py
@@ -3,7 +3,7 @@
Haskell Tests
~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_hdl.py b/tests/test_hdl.py
index 9d981202..f3975124 100644
--- a/tests/test_hdl.py
+++ b/tests/test_hdl.py
@@ -3,7 +3,7 @@
HDL Tests
~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_html_formatter.py b/tests/test_html_formatter.py
index 75986db5..0a1b7a9a 100644
--- a/tests/test_html_formatter.py
+++ b/tests/test_html_formatter.py
@@ -3,7 +3,7 @@
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_inherit.py b/tests/test_inherit.py
index 03527724..997748a6 100644
--- a/tests/test_inherit.py
+++ b/tests/test_inherit.py
@@ -3,7 +3,7 @@
Tests for inheritance in RegexLexer
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_irc_formatter.py b/tests/test_irc_formatter.py
index af90731e..6dc43b7d 100644
--- a/tests/test_irc_formatter.py
+++ b/tests/test_irc_formatter.py
@@ -3,7 +3,7 @@
Pygments IRC formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_java.py b/tests/test_java.py
index 94463246..467a3b72 100644
--- a/tests/test_java.py
+++ b/tests/test_java.py
@@ -3,7 +3,7 @@
Basic JavaLexer Test
~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_javascript.py b/tests/test_javascript.py
index 25e06fdc..78350612 100644
--- a/tests/test_javascript.py
+++ b/tests/test_javascript.py
@@ -3,83 +3,82 @@
Javascript tests
~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import pytest
-from pygments.lexers import CoffeeScriptLexer
-from pygments.token import Token
-
-COFFEE_SLASH_GOLDEN = [
- # input_str, slashes_are_regex_here
- (r'/\\/', True),
- (r'/\\/i', True),
- (r'/\//', True),
- (r'/(\s)/', True),
- ('/a{2,8}/', True),
- ('/b*c?d+/', True),
- ('/(capture-match)/', True),
- ('/(?:do-not-capture-match)/', True),
- ('/this|or|that/', True),
- ('/[char-set]/', True),
- ('/[^neg-char_st]/', True),
- ('/^.*$/', True),
- (r'/\n(\f)\0\1\d\b\cm\u1234/', True),
- (r'/^.?([^/\\\n\w]*)a\1+$/.something(or_other) # something more complex', True),
- ("foo = (str) ->\n /'|\"/.test str", True),
- ('a = a / b / c', False),
- ('a = a/b/c', False),
- ('a = a/b/ c', False),
- ('a = a /b/c', False),
- ('a = 1 + /d/.test(a)', True),
-]
+from pygments.lexers.javascript import JavascriptLexer
+from pygments.token import Number
@pytest.fixture(scope='module')
def lexer():
- yield CoffeeScriptLexer()
-
-
-@pytest.mark.parametrize('golden', COFFEE_SLASH_GOLDEN)
-def test_coffee_slashes(lexer, golden):
- input_str, slashes_are_regex_here = golden
- output = list(lexer.get_tokens(input_str))
- print(output)
- for t, s in output:
- if '/' in s:
- is_regex = t is Token.String.Regex
- assert is_regex == slashes_are_regex_here, (t, s)
-
-
-def test_mixed_slashes(lexer):
- fragment = u'a?/foo/:1/2;\n'
- tokens = [
- (Token.Name.Other, u'a'),
- (Token.Operator, u'?'),
- (Token.Literal.String.Regex, u'/foo/'),
- (Token.Operator, u':'),
- (Token.Literal.Number.Integer, u'1'),
- (Token.Operator, u'/'),
- (Token.Literal.Number.Integer, u'2'),
- (Token.Punctuation, u';'),
- (Token.Text, u'\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-
-
-def test_beware_infinite_loop(lexer):
- # This demonstrates the case that "This isn't really guarding" comment
- # refers to.
- fragment = '/a/x;\n'
- tokens = [
- (Token.Text, ''),
- (Token.Operator, '/'),
- (Token.Name.Other, 'a'),
- (Token.Operator, '/'),
- (Token.Name.Other, 'x'),
- (Token.Punctuation, ';'),
- (Token.Text, '\n'),
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
+ yield JavascriptLexer()
+
+
+@pytest.mark.parametrize(
+ 'text',
+ (
+ '1', '1.', '.1', '1.1', '1e1', '1E1', '1e+1', '1E-1', '1.e1', '.1e1',
+ '0888', # octal prefix with non-octal numbers
+ )
+)
+def test_float_literal_positive_matches(lexer, text):
+ """Test literals that should be tokenized as float literals."""
+ assert list(lexer.get_tokens(text))[0] == (Number.Float, text)
+
+
+@pytest.mark.parametrize('text', ('.\u0b6a', '.', '1..', '1n', '1ee', '1e', '1e-', '1e--1', '1e++1', '1e1.0'))
+def test_float_literals_negative_matches(lexer, text):
+ """Test text that should **not** be tokenized as float literals."""
+ assert list(lexer.get_tokens(text))[0] != (Number.Float, text)
+
+
+@pytest.mark.parametrize('text', ('0n', '123n'))
+def test_integer_literal_positive_matches(lexer, text):
+ """Test literals that should be tokenized as integer literals."""
+ assert list(lexer.get_tokens(text))[0] == (Number.Integer, text)
+
+
+@pytest.mark.parametrize('text', ('1N', '1', '1.0'))
+def test_integer_literals_negative_matches(lexer, text):
+ """Test text that should **not** be tokenized as integer literals."""
+ assert list(lexer.get_tokens(text))[0] != (Number.Integer, text)
+
+
+@pytest.mark.parametrize('text', ('0b01', '0B10n'))
+def test_binary_literal_positive_matches(lexer, text):
+ """Test literals that should be tokenized as binary literals."""
+ assert list(lexer.get_tokens(text))[0] == (Number.Bin, text)
+
+
+@pytest.mark.parametrize('text', ('0b0N', '0b', '0bb', '0b2'))
+def test_binary_literals_negative_matches(lexer, text):
+ """Test text that should **not** be tokenized as binary literals."""
+ assert list(lexer.get_tokens(text))[0] != (Number.Bin, text)
+
+
+@pytest.mark.parametrize('text', ('017', '071n', '0o11', '0O77n'))
+def test_octal_literal_positive_matches(lexer, text):
+ """Test literals that should be tokenized as octal literals."""
+ assert list(lexer.get_tokens(text))[0] == (Number.Oct, text)
+
+
+@pytest.mark.parametrize('text', ('01N', '089', '098n', '0o', '0OO', '0o88', '0O88n'))
+def test_octal_literals_negative_matches(lexer, text):
+ """Test text that should **not** be tokenized as octal literals."""
+ assert list(lexer.get_tokens(text))[0] != (Number.Oct, text)
+
+
+@pytest.mark.parametrize('text', ('0x01', '0Xefn', '0x0EF'))
+def test_hexadecimal_literal_positive_matches(lexer, text):
+ """Test literals that should be tokenized as hexadecimal literals."""
+ assert list(lexer.get_tokens(text))[0] == (Number.Hex, text)
+
+
+@pytest.mark.parametrize('text', ('0x0N', '0x', '0Xx', '0xg', '0xhn'))
+def test_hexadecimal_literals_negative_matches(lexer, text):
+ """Test text that should **not** be tokenized as hexadecimal literals."""
+ assert list(lexer.get_tokens(text))[0] != (Number.Hex, text)
diff --git a/tests/test_julia.py b/tests/test_julia.py
index 3f115931..e041377b 100644
--- a/tests/test_julia.py
+++ b/tests/test_julia.py
@@ -3,7 +3,7 @@
Julia Tests
~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_kotlin.py b/tests/test_kotlin.py
index 69b19c17..2f0eb376 100644
--- a/tests/test_kotlin.py
+++ b/tests/test_kotlin.py
@@ -3,7 +3,7 @@
Basic JavaLexer Test
~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_latex_formatter.py b/tests/test_latex_formatter.py
index 2a1c911e..64d99c6a 100644
--- a/tests/test_latex_formatter.py
+++ b/tests/test_latex_formatter.py
@@ -3,7 +3,7 @@
Pygments LaTeX formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py
index 70ffba17..1afefc98 100644
--- a/tests/test_lexers_other.py
+++ b/tests/test_lexers_other.py
@@ -3,7 +3,7 @@
Tests for other lexers
~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_make.py b/tests/test_make.py
new file mode 100644
index 00000000..b311850c
--- /dev/null
+++ b/tests/test_make.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+"""
+ CMake Tests
+ ~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers import CMakeLexer, guess_lexer
+
+
+def test_guess_cmake_lexer_from_header():
+ headers = [
+ "CMAKE_MINIMUM_REQUIRED(VERSION 2.6 FATAL_ERROR)",
+ "cmake_minimum_required(version 3.13) # CMake version check",
+ " CMAKE_MINIMUM_REQUIRED\t( VERSION 2.6 FATAL_ERROR ) ",
+ ]
+ for header in headers:
+ code = '\n'.join([
+ header,
+ 'project(example)',
+ 'set(CMAKE_CXX_STANDARD 14)',
+ 'set(SOURCE_FILES main.cpp)',
+ 'add_executable(example ${SOURCE_FILES})',
+ ])
+ lexer = guess_lexer(code)
+ assert isinstance(lexer, CMakeLexer), \
+ "header must be detected as CMake: %r" % header
diff --git a/tests/test_markdown_lexer.py b/tests/test_markdown_lexer.py
index 524becd7..362e6da4 100644
--- a/tests/test_markdown_lexer.py
+++ b/tests/test_markdown_lexer.py
@@ -3,7 +3,7 @@
Pygments Markdown lexer tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_matlab.py b/tests/test_matlab.py
index 0ac1df95..4a94f351 100644
--- a/tests/test_matlab.py
+++ b/tests/test_matlab.py
@@ -198,7 +198,6 @@ def test_command_mode(lexer):
(Token.Name, 'help'),
(Token.Text, ' '),
(Token.Literal.String, 'sin'),
- (Token.Punctuation, ''),
(Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_modeline.py b/tests/test_modeline.py
index 4d85a436..f7dbe8c1 100644
--- a/tests/test_modeline.py
+++ b/tests/test_modeline.py
@@ -3,7 +3,7 @@
Tests for the vim modeline feature
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_mysql.py b/tests/test_mysql.py
new file mode 100644
index 00000000..9b5e2b8c
--- /dev/null
+++ b/tests/test_mysql.py
@@ -0,0 +1,249 @@
+# -*- coding: utf-8 -*-
+"""
+ Pygments MySQL lexer tests
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import pytest
+
+from pygments.lexers.sql import MySqlLexer
+
+from pygments.token import \
+ Comment, \
+ Keyword, \
+ Literal, \
+ Name, \
+ Number, \
+ Operator, \
+ Punctuation, \
+ String, \
+ Text
+
+
+@pytest.fixture(scope='module')
+def lexer():
+ yield MySqlLexer()
+
+
+@pytest.mark.parametrize('text', ('123',))
+def test_integer_literals(lexer, text):
+ assert list(lexer.get_tokens(text))[0] == (Number.Integer, text)
+
+
+@pytest.mark.parametrize(
+ 'text',
+ (
+ '.123', '1.23', '123.',
+ '1e10', '1.0e10', '1.e-10', '.1e+10',
+ ),
+)
+def test_float_literals(lexer, text):
+ assert list(lexer.get_tokens(text))[0] == (Number.Float, text)
+
+
+@pytest.mark.parametrize('text', ("X'0af019'", "x'0AF019'", "0xaf019"))
+def test_hexadecimal_literals(lexer, text):
+ assert list(lexer.get_tokens(text))[0] == (Number.Hex, text)
+
+
+@pytest.mark.parametrize('text', ("B'010'", "b'010'", "0b010"))
+def test_binary_literals(lexer, text):
+ assert list(lexer.get_tokens(text))[0] == (Number.Bin, text)
+
+
+@pytest.mark.parametrize(
+ 'text',
+ (
+ "{d'2020-01-01'}", "{ d ' 2020^01@01 ' }",
+ "{t'8 9:10:11'}", "{ t ' 09:10:11.12 ' }", "{ t ' 091011 ' }",
+ '{ts"2020-01-01 09:10:11"}', "{ ts ' 2020@01/01 09:10:11 ' }",
+ ),
+)
+def test_temporal_literals(lexer, text):
+ assert list(lexer.get_tokens(text))[0] == (Literal.Date, text)
+
+
+@pytest.mark.parametrize(
+ 'text, expected_types',
+ (
+ (r"'a'", (String.Single,) * 3),
+ (r"""'""'""", (String.Single,) * 3),
+ (r"''''", (String.Single, String.Escape, String.Single)),
+ (r"'\''", (String.Single, String.Escape, String.Single)),
+ (r'"a"', (String.Double,) * 3),
+ (r'''"''"''', (String.Double,) * 3),
+ (r'""""', (String.Double, String.Escape, String.Double)),
+ (r'"\""', (String.Double, String.Escape, String.Double)),
+ ),
+)
+def test_string_literals(lexer, text, expected_types):
+ tokens = list(lexer.get_tokens(text))[:len(expected_types)]
+ assert all(t[0] == e for t, e in zip(tokens, expected_types))
+
+
+@pytest.mark.parametrize(
+ 'text',
+ (
+ "@a", "@1", "@._.$",
+ "@'?'", """@'abc''def"`ghi'""",
+ '@"#"', '''@"abc""def'`ghi"''',
+ '@`^`', """@`abc``def'"ghi`""",
+ "@@timestamp",
+ "@@session.auto_increment_offset",
+ "@@global.auto_increment_offset",
+ "@@persist.auto_increment_offset",
+ "@@persist_only.auto_increment_offset",
+ '?',
+ ),
+)
+def test_variables(lexer, text):
+ tokens = list(lexer.get_tokens(text))
+ assert all(t[0] == Name.Variable for t in tokens[:-1])
+ assert ''.join([t[1] for t in tokens]).strip() == text.strip()
+
+
+@pytest.mark.parametrize('text', ('true', 'false', 'null', 'unknown'))
+def test_constants(lexer, text):
+ assert list(lexer.get_tokens(text))[0] == (Name.Constant, text)
+
+
+@pytest.mark.parametrize('text', ('-- abc', '--\tabc', '#abc'))
+def test_comments_single_line(lexer, text):
+ # Test the standalone comment.
+ tokens = list(lexer.get_tokens(text))
+ assert tokens[0] == (Comment.Single, text)
+
+ # Test the comment with mixed tokens.
+ tokens = list(lexer.get_tokens('select' + text + '\nselect'))
+ assert tokens[0] == (Keyword, 'select')
+ assert tokens[1] == (Comment.Single, text)
+ assert tokens[-2] == (Keyword, 'select')
+
+
+@pytest.mark.parametrize(
+ 'text',
+ (
+ '/**/a', '/*a*b/c*/a', '/*\nabc\n*/a',
+ '/* /* */a'
+ )
+)
+def test_comments_multi_line(lexer, text):
+ tokens = list(lexer.get_tokens(text))
+ assert all(token[0] == Comment.Multiline for token in tokens[:-2])
+ assert ''.join(token[1] for token in tokens).strip() == text.strip()
+
+ # Validate nested comments are not supported.
+ assert tokens[-2][0] != Comment.Multiline
+
+
+@pytest.mark.parametrize(
+ 'text', ('BKA', 'SEMIJOIN'))
+def test_optimizer_hints(lexer, text):
+ good = '/*+ ' + text + '(), */'
+ ignore = '/* ' + text + ' */'
+ bad1 = '/*+ a' + text + '() */'
+ bad2 = '/*+ ' + text + 'a */'
+ assert (Comment.Preproc, text) in lexer.get_tokens(good)
+ assert (Comment.Preproc, text) not in lexer.get_tokens(ignore)
+ assert (Comment.Preproc, text) not in lexer.get_tokens(bad1)
+ assert (Comment.Preproc, text) not in lexer.get_tokens(bad2)
+
+
+@pytest.mark.parametrize(
+ 'text, expected_types',
+ (
+ # SET exceptions
+ ('SET', (Keyword,)),
+ ('SET abc = 1;', (Keyword,)),
+ ('SET @abc = 1;', (Keyword,)),
+ ('CHARACTER SET latin1', (Keyword, Text, Keyword)),
+ ('SET("r", "g", "b")', (Keyword.Type, Punctuation)),
+ ('SET ("r", "g", "b")', (Keyword.Type, Text, Punctuation)),
+ ),
+)
+def test_exceptions(lexer, text, expected_types):
+ tokens = list(lexer.get_tokens(text))[:len(expected_types)]
+ assert all(t[0] == e for t, e in zip(tokens, expected_types))
+
+
+@pytest.mark.parametrize(
+ 'text',
+ (
+ 'SHOW', 'CREATE', 'ALTER', 'DROP',
+ 'SELECT', 'INSERT', 'UPDATE', 'DELETE',
+ 'WHERE', 'GROUP', 'ORDER', 'BY', 'AS',
+ 'DISTINCT', 'JOIN', 'WITH', 'RECURSIVE',
+ 'PARTITION', 'NTILE', 'MASTER_PASSWORD', 'XA',
+ 'REQUIRE_TABLE_PRIMARY_KEY_CHECK', 'STREAM',
+ ),
+)
+def test_keywords(lexer, text):
+ assert list(lexer.get_tokens(text))[0] == (Keyword, text)
+
+
+@pytest.mark.parametrize(
+ 'text',
+ (
+ # Standard
+ 'INT(', 'VARCHAR(', 'ENUM(', 'DATETIME', 'GEOMETRY', 'POINT', 'JSON',
+ # Aliases and compatibility
+ 'FIXED', 'MEDIUMINT', 'INT3', 'REAL', 'SERIAL',
+ 'LONG', 'NATIONAL', 'PRECISION', 'VARYING',
+ ),
+)
+def test_data_types(lexer, text):
+ assert list(lexer.get_tokens(text))[0] == (Keyword.Type, text.strip('('))
+
+
+@pytest.mark.parametrize(
+ 'text',
+ (
+ # Common
+ 'CAST', 'CONCAT_WS', 'DAYNAME', 'IFNULL', 'NOW', 'SUBSTR',
+ # Less common
+ 'CAN_ACCESS_COLUMN', 'JSON_CONTAINS_PATH', 'ST_GEOMFROMGEOJSON',
+ ),
+)
+def test_functions(lexer, text):
+ assert list(lexer.get_tokens(text + '('))[0] == (Name.Function, text)
+ assert list(lexer.get_tokens(text + ' ('))[0] == (Name.Function, text)
+
+
+@pytest.mark.parametrize(
+ 'text',
+ (
+ 'abc_$123', '上市年限', 'ひらがな',
+ '`a`', '`上市年限`', '`ひらがな`', '`select`', '`concat(`',
+ '````', r'`\``', r'`\\`',
+ '`-- `', '`/*`', '`#`',
+ ),
+)
+def test_schema_object_names(lexer, text):
+ tokens = list(lexer.get_tokens(text))[:-1]
+ assert all(token[0] == Name for token in tokens)
+ assert ''.join(token[1] for token in tokens) == text
+
+
+@pytest.mark.parametrize(
+ 'text',
+ ('+', '*', '/', '%', '&&', ':=', '!', '<', '->>', '^', '|', '~'),
+)
+def test_operators(lexer, text):
+ assert list(lexer.get_tokens(text))[0] == (Operator, text)
+
+
+@pytest.mark.parametrize(
+ 'text, expected_types',
+ (
+ ('abc.efg', (Name, Punctuation, Name)),
+ ('abc,efg', (Name, Punctuation, Name)),
+ ('MAX(abc)', (Name.Function, Punctuation, Name, Punctuation)),
+ ('efg;', (Name, Punctuation)),
+ ),
+)
+def test_punctuation(lexer, text, expected_types):
+ tokens = list(lexer.get_tokens(text))[:len(expected_types)]
+ assert all(t[0] == e for t, e in zip(tokens, expected_types))
diff --git a/tests/test_objectiveclexer.py b/tests/test_objectiveclexer.py
index 54f31db4..31f833cf 100644
--- a/tests/test_objectiveclexer.py
+++ b/tests/test_objectiveclexer.py
@@ -3,7 +3,7 @@
Basic CLexer Test
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_perllexer.py b/tests/test_perllexer.py
index 8849bacf..6ec29993 100644
--- a/tests/test_perllexer.py
+++ b/tests/test_perllexer.py
@@ -3,7 +3,7 @@
Pygments regex lexer tests
~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_php.py b/tests/test_php.py
index 1660183a..890f88c0 100644
--- a/tests/test_php.py
+++ b/tests/test_php.py
@@ -3,7 +3,7 @@
PHP Tests
~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_praat.py b/tests/test_praat.py
index 61ddfd57..e3997671 100644
--- a/tests/test_praat.py
+++ b/tests/test_praat.py
@@ -3,7 +3,7 @@
Praat lexer tests
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_promql.py b/tests/test_promql.py
new file mode 100644
index 00000000..cd02a57a
--- /dev/null
+++ b/tests/test_promql.py
@@ -0,0 +1,310 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic PromQLLexer Tests
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import pytest
+
+from pygments.lexers import PromQLLexer
+from pygments.token import Token
+
+
+@pytest.fixture(scope="module")
+def lexer():
+ yield PromQLLexer()
+
+
+def test_metric(lexer):
+ fragment = u"go_gc_duration_seconds"
+ tokens = [
+ (Token.Name.Variable, "go_gc_duration_seconds"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_metric_one_label(lexer):
+ fragment = u'go_gc_duration_seconds{instance="localhost:9090"}'
+ tokens = [
+ (Token.Name.Variable, "go_gc_duration_seconds"),
+ (Token.Punctuation, "{"),
+ (Token.Name.Label, "instance"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "localhost:9090"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, "}"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_metric_multiple_labels(lexer):
+ fragment = u'go_gc_duration_seconds{instance="localhost:9090",job="alertmanager"}'
+ tokens = [
+ (Token.Name.Variable, "go_gc_duration_seconds"),
+ (Token.Punctuation, "{"),
+ (Token.Name.Label, "instance"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "localhost:9090"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Name.Label, "job"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "alertmanager"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, "}"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_metric_multiple_labels_with_spaces(lexer):
+ fragment = u'go_gc_duration_seconds{ instance="localhost:9090", job="alertmanager" }'
+ tokens = [
+ (Token.Name.Variable, "go_gc_duration_seconds"),
+ (Token.Punctuation, "{"),
+ (Token.Text.Whitespace, " "),
+ (Token.Name.Label, "instance"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "localhost:9090"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, " "),
+ (Token.Name.Label, "job"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "alertmanager"),
+ (Token.Punctuation, '"'),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, "}"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_expression_and_comment(lexer):
+ fragment = u'go_gc_duration_seconds{instance="localhost:9090"} # single comment\n'
+ tokens = [
+ (Token.Name.Variable, "go_gc_duration_seconds"),
+ (Token.Punctuation, "{"),
+ (Token.Name.Label, "instance"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "localhost:9090"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, "}"),
+ (Token.Text.Whitespace, " "),
+ (Token.Comment.Single, "# single comment"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_function_delta(lexer):
+ fragment = u'delta(cpu_temp_celsius{host="zeus"}[2h])'
+ tokens = [
+ (Token.Keyword.Reserved, "delta"),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "cpu_temp_celsius"),
+ (Token.Punctuation, "{"),
+ (Token.Name.Label, "host"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "zeus"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, "}"),
+ (Token.Punctuation, "["),
+ (Token.Literal.String, "2h"),
+ (Token.Punctuation, "]"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_function_sum_with_args(lexer):
+ fragment = u"sum by (app, proc) (instance_memory_usage_bytes)\n"
+ tokens = [
+ (Token.Keyword, "sum"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword, "by"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "app"),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, " "),
+ (Token.Name.Variable, "proc"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "instance_memory_usage_bytes"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_function_multi_line(lexer):
+ fragment = u"""label_replace(
+ sum by (instance) (
+ irate(node_disk_read_bytes_total[2m])
+ ) / 1024 / 1024,
+ "device",
+ 'disk',
+ "instance",
+ ".*"
+)
+"""
+ tokens = [
+ (Token.Keyword.Reserved, "label_replace"),
+ (Token.Operator, "("),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword, "sum"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword, "by"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "instance"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "("),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword.Reserved, "irate"),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "node_disk_read_bytes_total"),
+ (Token.Punctuation, "["),
+ (Token.Literal.String, "2m"),
+ (Token.Punctuation, "]"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "/"),
+ (Token.Text.Whitespace, " "),
+ (Token.Literal.Number.Integer, "1024"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "/"),
+ (Token.Text.Whitespace, " "),
+ (Token.Literal.Number.Integer, "1024"),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "device"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, "'"),
+ (Token.Literal.String, "disk"),
+ (Token.Punctuation, "'"),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "instance"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, ".*"),
+ (Token.Punctuation, '"'),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_function_multi_line_with_offset(lexer):
+ fragment = u"""label_replace(
+ avg by(instance)
+ (irate(node_cpu_seconds_total{mode = "idle"}[5m] offset 3s)
+ ) * 100,
+ "device",
+ "cpu",
+ "instance",
+ ".*"
+)"""
+ tokens = [
+ (Token.Keyword.Reserved, "label_replace"),
+ (Token.Operator, "("),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword, "avg"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword, "by"),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "instance"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "("),
+ (Token.Keyword.Reserved, "irate"),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "node_cpu_seconds_total"),
+ (Token.Punctuation, "{"),
+ (Token.Name.Label, "mode"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "="),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "idle"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, "}"),
+ (Token.Punctuation, "["),
+ (Token.Literal.String, "5m"),
+ (Token.Punctuation, "]"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword, "offset"),
+ (Token.Text.Whitespace, " "),
+ (Token.Literal.String, "3s"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "*"),
+ (Token.Text.Whitespace, " "),
+ (Token.Literal.Number.Integer, "100"),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "device"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "cpu"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "instance"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, ".*"),
+ (Token.Punctuation, '"'),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_properties.py b/tests/test_properties.py
index 25368d93..0cdd0e41 100644
--- a/tests/test_properties.py
+++ b/tests/test_properties.py
@@ -3,7 +3,7 @@
Properties Tests
~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_python.py b/tests/test_python.py
index 23bb6ed1..8e53677b 100644
--- a/tests/test_python.py
+++ b/tests/test_python.py
@@ -3,7 +3,7 @@
Python Tests
~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_qbasiclexer.py b/tests/test_qbasiclexer.py
index 3c64d69e..fb721437 100644
--- a/tests/test_qbasiclexer.py
+++ b/tests/test_qbasiclexer.py
@@ -3,7 +3,7 @@
Tests for QBasic
~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_r.py b/tests/test_r.py
index 6dcc78e3..2814acd7 100644
--- a/tests/test_r.py
+++ b/tests/test_r.py
@@ -3,7 +3,7 @@
R Tests
~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_regexlexer.py b/tests/test_regexlexer.py
index 4e832361..8e55696c 100644
--- a/tests/test_regexlexer.py
+++ b/tests/test_regexlexer.py
@@ -3,7 +3,7 @@
Pygments regex lexer tests
~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_regexopt.py b/tests/test_regexopt.py
index 20d48dda..2d210c9b 100644
--- a/tests/test_regexopt.py
+++ b/tests/test_regexopt.py
@@ -3,7 +3,7 @@
Tests for pygments.regexopt
~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_rtf_formatter.py b/tests/test_rtf_formatter.py
index 23d6695f..6f60d06e 100644
--- a/tests/test_rtf_formatter.py
+++ b/tests/test_rtf_formatter.py
@@ -3,7 +3,7 @@
Pygments RTF formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_ruby.py b/tests/test_ruby.py
index a6da4bf9..86a9ee77 100644
--- a/tests/test_ruby.py
+++ b/tests/test_ruby.py
@@ -3,7 +3,7 @@
Basic RubyLexer Test
~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_shell.py b/tests/test_shell.py
index 753a37e3..79f78b3a 100644
--- a/tests/test_shell.py
+++ b/tests/test_shell.py
@@ -3,7 +3,7 @@
Basic Shell Tests
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -179,6 +179,24 @@ def test_powershell_session(lexer_powershell_session):
]
assert list(lexer_powershell_session.get_tokens(fragment)) == tokens
+ fragment = u'PS> Get-ChildItem\n'
+ tokens = [
+ (Token.Name.Builtin, u''),
+ (Token.Generic.Prompt, u'PS> '),
+ (Token.Name.Builtin, u'Get-ChildItem'),
+ (Token.Text, u'\n')
+ ]
+ assert list(lexer_powershell_session.get_tokens(fragment)) == tokens
+
+ fragment = u'PS > Get-ChildItem\n'
+ tokens = [
+ (Token.Name.Builtin, u''),
+ (Token.Generic.Prompt, u'PS > '),
+ (Token.Name.Builtin, u'Get-ChildItem'),
+ (Token.Text, u'\n')
+ ]
+ assert list(lexer_powershell_session.get_tokens(fragment)) == tokens
+
def test_powershell_remoting_session(lexer_powershell_session):
fragment = u'[Long-NetBIOS-Hostname]: PS C:\\> Get-ChildItem\n'
diff --git a/tests/test_smarty.py b/tests/test_smarty.py
index 2d172559..2b45abee 100644
--- a/tests/test_smarty.py
+++ b/tests/test_smarty.py
@@ -3,7 +3,7 @@
Basic SmartyLexer Test
~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_sql.py b/tests/test_sql.py
index efd63be6..ead06de7 100644
--- a/tests/test_sql.py
+++ b/tests/test_sql.py
@@ -3,7 +3,7 @@
Pygments SQL lexers tests
~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_terminal_formatter.py b/tests/test_terminal_formatter.py
index 02076f19..e08a3b21 100644
--- a/tests/test_terminal_formatter.py
+++ b/tests/test_terminal_formatter.py
@@ -3,7 +3,7 @@
Pygments terminal formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_textfmts.py b/tests/test_textfmts.py
index 5a4f56c4..f4ce9b33 100644
--- a/tests/test_textfmts.py
+++ b/tests/test_textfmts.py
@@ -3,7 +3,7 @@
Basic Tests for textfmts
~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_token.py b/tests/test_token.py
index 11e4d375..6fe99014 100644
--- a/tests/test_token.py
+++ b/tests/test_token.py
@@ -3,7 +3,7 @@
Test suite for the token module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_unistring.py b/tests/test_unistring.py
index 3a0acec9..5df61e69 100644
--- a/tests/test_unistring.py
+++ b/tests/test_unistring.py
@@ -3,7 +3,7 @@
Test suite for the unistring module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_using_api.py b/tests/test_using_api.py
index b5310aa8..e7932a7b 100644
--- a/tests/test_using_api.py
+++ b/tests/test_using_api.py
@@ -3,7 +3,7 @@
Pygments tests for using()
~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_util.py b/tests/test_util.py
index 94985a25..81b3b054 100644
--- a/tests/test_util.py
+++ b/tests/test_util.py
@@ -3,7 +3,7 @@
Test suite for the util module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_whiley.py b/tests/test_whiley.py
index 84fef25b..e844dafb 100644
--- a/tests/test_whiley.py
+++ b/tests/test_whiley.py
@@ -3,7 +3,7 @@
Whiley Test
~~~~~~~~~~~
- :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tox.ini b/tox.ini
index b7c91e7d..fc15a81d 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py27, py35, py36, py37, py38, pypy, pypy3
+envlist = py35, py36, py37, py38, pypy3
[testenv]
deps =