summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--AUTHORS1
-rw-r--r--pygments/lexers/_mapping.py1
-rw-r--r--pygments/lexers/promql.py189
-rw-r--r--tests/examplefiles/example.promql8
-rw-r--r--tests/test_promql.py310
5 files changed, 509 insertions, 0 deletions
diff --git a/AUTHORS b/AUTHORS
index 5f234f66..1afd7ffc 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -191,6 +191,7 @@ Other contributors, listed alphabetically, are:
* René Schwaiger -- Rainbow Dash style
* Sebastian Schweizer -- Whiley lexer
* Tassilo Schweyer -- Io, MOOCode lexers
+* Pablo Seminario -- PromQL lexer
* Ted Shaw -- AutoIt lexer
* Joerg Sieker -- ABAP lexer
* Robert Simmons -- Standard ML lexer
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index dad78679..1c346c37 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -347,6 +347,7 @@ LEXERS = {
'PowerShellSessionLexer': ('pygments.lexers.shell', 'PowerShell Session', ('ps1con',), (), ()),
'PraatLexer': ('pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()),
'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
+ 'PromQLLexer': ('pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()),
'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
'PugLexer': ('pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')),
diff --git a/pygments/lexers/promql.py b/pygments/lexers/promql.py
new file mode 100644
index 00000000..6359fb09
--- /dev/null
+++ b/pygments/lexers/promql.py
@@ -0,0 +1,189 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.promql
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Prometheus Query Language.
+
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, default, words
+from pygments.token import (
+ Comment,
+ Keyword,
+ Name,
+ Number,
+ Operator,
+ Punctuation,
+ String,
+ Whitespace,
+)
+
+__all__ = ["PromQLLexer"]
+
+
+class PromQLLexer(RegexLexer):
+ """
+ For `PromQL <https://prometheus.io/docs/prometheus/latest/querying/basics/>`_ queries.
+
+ For details about the grammar see:
+ https://github.com/prometheus/prometheus/tree/master/promql/parser
+
+ .. versionadded: 2.7
+ """
+
+ name = "PromQL"
+ aliases = ["promql"]
+ filenames = ["*.promql"]
+
+ base_keywords = (
+ words(
+ (
+ "bool",
+ "by",
+ "group_left",
+ "group_right",
+ "ignoring",
+ "offset",
+ "on",
+ "without",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword,
+ )
+
+ aggregator_keywords = (
+ words(
+ (
+ "sum",
+ "min",
+ "max",
+ "avg",
+ "group",
+ "stddev",
+ "stdvar",
+ "count",
+ "count_values",
+ "bottomk",
+ "topk",
+ "quantile",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword,
+ )
+
+ function_keywords = (
+ words(
+ (
+ "abs",
+ "absent",
+ "absent_over_time",
+ "avg_over_time",
+ "ceil",
+ "changes",
+ "clamp_max",
+ "clamp_min",
+ "count_over_time",
+ "day_of_month",
+ "day_of_week",
+ "days_in_month",
+ "delta",
+ "deriv",
+ "exp",
+ "floor",
+ "histogram_quantile",
+ "holt_winters",
+ "hour",
+ "idelta",
+ "increase",
+ "irate",
+ "label_join",
+ "label_replace",
+ "ln",
+ "log10",
+ "log2",
+ "max_over_time",
+ "min_over_time",
+ "minute",
+ "month",
+ "predict_linear",
+ "quantile_over_time",
+ "rate",
+ "resets",
+ "round",
+ "scalar",
+ "sort",
+ "sort_desc",
+ "sqrt",
+ "stddev_over_time",
+ "stdvar_over_time",
+ "sum_over_time",
+ "time",
+ "timestamp",
+ "vector",
+ "year",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword.Reserved,
+ )
+
+ tokens = {
+ "root": [
+ (r"\n", Whitespace),
+ (r"\s+", Whitespace),
+ (r",", Punctuation),
+ # Keywords
+ base_keywords,
+ aggregator_keywords,
+ function_keywords,
+ # Offsets
+ (r"[1-9][0-9]*[s|m|h|d|w|y]", String),
+ # Numbers
+ (r"-?[0-9]+\.[0-9]+", Number.Float),
+ (r"-?[0-9]+", Number.Integer),
+ # Comments
+ (r"#.*?$", Comment.Single),
+ # Operators
+ (r"(\+|\-|\*|\/|\%|\^)", Operator),
+ (r"==|!=|>=|<=|<|>", Operator),
+ (r"and|or|unless", Operator.Word),
+ # Metrics
+ (r"[_a-zA-Z][_a-zA-Z0-9]+", Name.Variable),
+ # Params
+ (r'(["\'])(.*?)(["\'])', bygroups(Punctuation, String, Punctuation)),
+ # Other states
+ (r"\(", Operator, "function"),
+ (r"\)", Operator),
+ (r"{", Punctuation, "labels"),
+ (r"\[", Punctuation, "range"),
+ ],
+ "labels": [
+ (r"}", Punctuation, "#pop"),
+ (r"\n", Whitespace),
+ (r"\s+", Whitespace),
+ (r",", Punctuation),
+ (
+ r'([_a-zA-Z][_a-zA-Z0-9]*?)(\s*?)(=|!=|=~|~!)(\s*?)(")(.*?)(")',
+ bygroups(
+ Name.Label,
+ Whitespace,
+ Operator,
+ Whitespace,
+ Punctuation,
+ String,
+ Punctuation,
+ ),
+ ),
+ ],
+ "range": [(r"\]", Punctuation, "#pop"), (r"[1-9][0-9]*[s|m|h|d|w|y]", String)],
+ "function": [
+ (r"\)", Operator, "#pop"),
+ (r"\(", Operator, "#push"),
+ default("#pop"),
+ ],
+ }
diff --git a/tests/examplefiles/example.promql b/tests/examplefiles/example.promql
new file mode 100644
index 00000000..e2fee087
--- /dev/null
+++ b/tests/examplefiles/example.promql
@@ -0,0 +1,8 @@
+# A metric with label filtering
+go_gc_duration_seconds{instance="localhost:9090", job="alertmanager"}
+
+# Aggregation operators
+sum by (app, proc) (
+ instance_memory_limit_bytes - instance_memory_usage_bytes
+) / 1024 / 1024
+
diff --git a/tests/test_promql.py b/tests/test_promql.py
new file mode 100644
index 00000000..cd02a57a
--- /dev/null
+++ b/tests/test_promql.py
@@ -0,0 +1,310 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic PromQLLexer Tests
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import pytest
+
+from pygments.lexers import PromQLLexer
+from pygments.token import Token
+
+
+@pytest.fixture(scope="module")
+def lexer():
+ yield PromQLLexer()
+
+
+def test_metric(lexer):
+ fragment = u"go_gc_duration_seconds"
+ tokens = [
+ (Token.Name.Variable, "go_gc_duration_seconds"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_metric_one_label(lexer):
+ fragment = u'go_gc_duration_seconds{instance="localhost:9090"}'
+ tokens = [
+ (Token.Name.Variable, "go_gc_duration_seconds"),
+ (Token.Punctuation, "{"),
+ (Token.Name.Label, "instance"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "localhost:9090"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, "}"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_metric_multiple_labels(lexer):
+ fragment = u'go_gc_duration_seconds{instance="localhost:9090",job="alertmanager"}'
+ tokens = [
+ (Token.Name.Variable, "go_gc_duration_seconds"),
+ (Token.Punctuation, "{"),
+ (Token.Name.Label, "instance"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "localhost:9090"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Name.Label, "job"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "alertmanager"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, "}"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_metric_multiple_labels_with_spaces(lexer):
+ fragment = u'go_gc_duration_seconds{ instance="localhost:9090", job="alertmanager" }'
+ tokens = [
+ (Token.Name.Variable, "go_gc_duration_seconds"),
+ (Token.Punctuation, "{"),
+ (Token.Text.Whitespace, " "),
+ (Token.Name.Label, "instance"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "localhost:9090"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, " "),
+ (Token.Name.Label, "job"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "alertmanager"),
+ (Token.Punctuation, '"'),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, "}"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_expression_and_comment(lexer):
+ fragment = u'go_gc_duration_seconds{instance="localhost:9090"} # single comment\n'
+ tokens = [
+ (Token.Name.Variable, "go_gc_duration_seconds"),
+ (Token.Punctuation, "{"),
+ (Token.Name.Label, "instance"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "localhost:9090"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, "}"),
+ (Token.Text.Whitespace, " "),
+ (Token.Comment.Single, "# single comment"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_function_delta(lexer):
+ fragment = u'delta(cpu_temp_celsius{host="zeus"}[2h])'
+ tokens = [
+ (Token.Keyword.Reserved, "delta"),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "cpu_temp_celsius"),
+ (Token.Punctuation, "{"),
+ (Token.Name.Label, "host"),
+ (Token.Operator, "="),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "zeus"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, "}"),
+ (Token.Punctuation, "["),
+ (Token.Literal.String, "2h"),
+ (Token.Punctuation, "]"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_function_sum_with_args(lexer):
+ fragment = u"sum by (app, proc) (instance_memory_usage_bytes)\n"
+ tokens = [
+ (Token.Keyword, "sum"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword, "by"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "app"),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, " "),
+ (Token.Name.Variable, "proc"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "instance_memory_usage_bytes"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_function_multi_line(lexer):
+ fragment = u"""label_replace(
+ sum by (instance) (
+ irate(node_disk_read_bytes_total[2m])
+ ) / 1024 / 1024,
+ "device",
+ 'disk',
+ "instance",
+ ".*"
+)
+"""
+ tokens = [
+ (Token.Keyword.Reserved, "label_replace"),
+ (Token.Operator, "("),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword, "sum"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword, "by"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "instance"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "("),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword.Reserved, "irate"),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "node_disk_read_bytes_total"),
+ (Token.Punctuation, "["),
+ (Token.Literal.String, "2m"),
+ (Token.Punctuation, "]"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "/"),
+ (Token.Text.Whitespace, " "),
+ (Token.Literal.Number.Integer, "1024"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "/"),
+ (Token.Text.Whitespace, " "),
+ (Token.Literal.Number.Integer, "1024"),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "device"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, "'"),
+ (Token.Literal.String, "disk"),
+ (Token.Punctuation, "'"),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "instance"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, ".*"),
+ (Token.Punctuation, '"'),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_function_multi_line_with_offset(lexer):
+ fragment = u"""label_replace(
+ avg by(instance)
+ (irate(node_cpu_seconds_total{mode = "idle"}[5m] offset 3s)
+ ) * 100,
+ "device",
+ "cpu",
+ "instance",
+ ".*"
+)"""
+ tokens = [
+ (Token.Keyword.Reserved, "label_replace"),
+ (Token.Operator, "("),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword, "avg"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword, "by"),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "instance"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "("),
+ (Token.Keyword.Reserved, "irate"),
+ (Token.Operator, "("),
+ (Token.Name.Variable, "node_cpu_seconds_total"),
+ (Token.Punctuation, "{"),
+ (Token.Name.Label, "mode"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "="),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "idle"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, "}"),
+ (Token.Punctuation, "["),
+ (Token.Literal.String, "5m"),
+ (Token.Punctuation, "]"),
+ (Token.Text.Whitespace, " "),
+ (Token.Keyword, "offset"),
+ (Token.Text.Whitespace, " "),
+ (Token.Literal.String, "3s"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, " "),
+ (Token.Operator, "*"),
+ (Token.Text.Whitespace, " "),
+ (Token.Literal.Number.Integer, "100"),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "device"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "cpu"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, "instance"),
+ (Token.Punctuation, '"'),
+ (Token.Punctuation, ","),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Text.Whitespace, " "),
+ (Token.Punctuation, '"'),
+ (Token.Literal.String, ".*"),
+ (Token.Punctuation, '"'),
+ (Token.Text.Whitespace, "\n"),
+ (Token.Operator, ")"),
+ (Token.Text.Whitespace, "\n"),
+ ]
+ assert list(lexer.get_tokens(fragment)) == tokens