Skip to content

Instantly share code, notes, and snippets.

@thomasdarimont
Last active January 14, 2022 11:31
Show Gist options
  • Save thomasdarimont/e3e58b9c54817ca0c7bcb204f92c0c8c to your computer and use it in GitHub Desktop.
Save thomasdarimont/e3e58b9c54817ca0c7bcb204f92c0c8c to your computer and use it in GitHub Desktop.
PoC for custom Highlighting for the RuleLang of the Graylog Pipeline Plugin
define(function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
var DocCommentHighlightRules = require("./doc_comment_highlight_rules").DocCommentHighlightRules;
var identifierRe = "[a-zA-Z\\$_\u00a1-\uffff][a-zA-Z\\d\\$_\u00a1-\uffff]*";
var GraylogRuleLangHighlightRules = function() {
//TODO generate from grammar
var keywords = (
"all|either|and|or|not|pipeline|rule|during|"+
"stage|when|then|end|let|match"
);
//TODO generate from FunctionRegistry
var builtinFunctions = (
"to_bool|to_double|to_long|to_string|to_url|is_null|"+
"is_not_null|abbreviate|capitalize|uncapitalize|uppercase|"+
"lowercase|swapcase|contains|substring|concat|split|regex|"+
"grok|key_value|crc32|crc32c|md5|murmur3_32|murmur3_128|sha1|"+
"sha256|sha512|parse_json|select_jsonpath|to_ip|cidr_match|"+
"from_input|route_to_stream|create_message|clone_message|"+
"drop_message|has_field|remove_field|set_field|set_fields|"+
"rename_field|syslog_facility|syslog_level|expand_syslog_priority|"+
"expand_syslog_priority_as_string|now|parse_date|flex_parse_date|"+
"format_date|to_date|years|months|weeks|days|hours|minutes|seconds|"+
"millis|period"
);
var keywordMapper = this.createKeywordMapper({
"variable.language": "$message",
"keyword": keywords,
"constant.language": "null",
"support.function" : builtinFunctions
}, "identifier", true); //true = case insensitive
// regexp must not have capturing parentheses. Use (?:) instead.
// regexps are ordered -> the first match is used
var stringRules = function() {
return [{
token : "string", // single line with backticks
regex : "[``](?:(?:\\\\.)|(?:[^`\\\\]))*?[``]"
},{
token : "string", // single line double quotes
regex : '["](?:(?:\\\\.)|(?:[^"\\\\]))*?["]'
}, {
token : "string", // single line single quotes
regex : "['](?:(?:\\\\.)|(?:[^'\\\\]))*?[']"
}];
};
var basicPreRules = function(blockCommentRules) {
return [{
token : "comment",
regex : "\\/\\/.*$"
},
DocCommentHighlightRules.getStartRule("doc-start"),
{
token : "comment", // multi line comment
regex : "\\/\\*",
next : blockCommentRules
}, {
token : "constant.numeric", // hex
regex : "0[xX][0-9a-fA-F]+\\b"
}, {
token : "constant.numeric", // float
regex : "[+-]?\\d+(?:(?:\\.\\d*)?(?:[eE][+-]?\\d+)?)?\\b"
}, {
token : "constant.language.boolean",
regex : "(?:true|false)\\b"
}];
};
var blockCommentRules = function(returnRule) {
return [
{
token : "comment.block", // closing comment
regex : ".*?\\*\\/",
next : returnRule
}, {
token : "comment.block", // comment spanning whole line
regex : ".+"
}
];
}
var basicPostRules = function() {
return [{
token : keywordMapper,
// TODO: Unicode escape sequences
// TODO: Unicode identifiers
regex : "[a-zA-Z_$][a-zA-Z0-9_$]*\\b"
}, {
token : "keyword.operator",
regex : "!|\\$|%|&|\\*|\\-\\-|\\-|\\+\\+|\\+|~|===|==|=|!=|!==|<=|>=|<<=|>>=|>>>=|<>|<|>|!|&&|\\|\\||\\?\\:|\\*=|%=|\\+=|\\-=|&=|\\^="
}, {
token : "lparen",
regex : "[[({]"
}, {
token : "rparen",
regex : "[\\])}]"
}, {
token : "text",
regex : "\\s+"
}];
};
this.$rules = {
"start" : [].concat(basicPreRules("block.comment"), [
{
// declare trait DeclaredType
token : ["keyword","text","keyword","text","entity.name.type"],
regex : "(declare)(\\s+)(trait)(\\s+)(" + identifierRe +")"
}, {
// declare trait DeclaredType
token : ["keyword","text","entity.name.type"],
regex : "(declare)(\\s+)(" + identifierRe +")"
}, {
// rule ...
token : ["keyword","text"],
regex : "(rule)(\\s+)",
next : "asset.name"
}],
stringRules(),
[{
// variable :
token : ["variable.other","text","text"],
regex : "(" + identifierRe + ")(\\s*)(:)"
}, {
// when ...
token : ["keyword","text"],
regex : "(when)(\\s*)"
}, {
// then
token : ["keyword","text"],
regex : "(then)(\\s*)",
}, {
token : "paren.lparen",
regex : /[\[({]/
}, {
token : "paren.rparen",
regex : /[\])}]/
}], basicPostRules()),
"block.comment" : blockCommentRules("start")
};
this.embedRules(DocCommentHighlightRules, "doc-",
[ DocCommentHighlightRules.getEndRule("start") ]);
};
oop.inherits(GraylogRuleLangHighlightRules, TextHighlightRules);
exports.GraylogRuleLangHighlightRules = GraylogRuleLangHighlightRules;
});
//arithmetic.txt
rule "arithmetic operators"
when
1.0 + 1.0 == 2.0 &&
8 * 2 > 15 &&
double_valued_func() / 20.0 == 0.0 &&
21 % 20 == 1 &&
10.0 / 20.0 == 0.5 &&
+10.0 / -5.0 == -2.0 &&
-double_valued_func() == -0.0 &&
double_valued_func() + 1.0 > 0.0
then
trigger_test();
end
//basicRule.txt
rule "something"
when double_valued_func() > 1.0d AND false == true
then
double_valued_func();
end
//booleanNot.txt
rule "booleanNot"
when
!false == false
then
trigger_test();
end
//booleanValuedFunctionAsCondition.txt
rule "bool function as top level"
when doch()
then
trigger_test();
end
//dateArithmetic.txt
// now() is fixed to "2010-07-30T18:03:25+02:00" to provide a better testing experience
rule "date math"
when
now() + years(1) > now() &&
now() + months(1) > now() &&
now() + weeks(1) > now() &&
now() + days(1) > now() &&
now() + hours(1) > now() &&
now() + minutes(1) > now() &&
now() + seconds(1) > now() &&
now() + millis(1) > now() &&
now() + period("P1YT1M") > now() &&
now() - years(1) < now() &&
now() - months(1) < now() &&
now() - weeks(1) < now() &&
now() - days(1) < now() &&
now() - hours(1) < now() &&
now() - minutes(1) < now() &&
now() - seconds(1) < now() &&
now() - millis(1) < now() &&
now() - period("P1YT1M") < now()
then
set_field("interval", now() - (now() - days(1))); // is a duration of 1 day
trigger_test();
end
//declaredFunction.txt
rule "using declared function 'nein'"
when true == nein()
then
end
//indexedAccess.txt
rule "indexed array and map access"
when
["first","second"][0] == "first" and {third: "a value"}["third"] == "a value"
then
trigger_test();
end
//indexedAccessWrongIndexType.txt
rule "indexed array and map access"
when
["first"][true] == "first"
then
trigger_test();
end
//indexedAccessWrongType.txt
rule "indexed array and map access"
when
one_arg("not an array")[0] == "first"
then
trigger_test();
end
//inferVariableType.txt
rule "infer"
when true
then
let x = one_arg("string");
one_arg(x);
end
//invalidArgType.txt
rule "invalid arg type"
when one_arg(0d) == "0" // one_arg needs a String argument, but 0d is Double
then
let x = double_valued_func();
one_arg(x); // this needs a String argument, but x resolves to Double
end
//invalidArgumentValue.txt
rule "invalid arg"
when now_in_tz("123") // this isn't a valid tz
then
end
//invalidDateAddition.txt
rule "cannot add dates"
when
now() + now() == now()
end
//mapArrayLiteral.txt
rule "mapliteral"
when sort(keys({some_identifier: 1, `something with spaces`: "some expression"})) == ["some_identifier", "something with spaces"]
then
trigger_test();
end
//messageRefQuotedField.txt
rule "test"
when to_string($message.`@specialfieldname`, "empty") == "string"
then
trigger_test();
end
//messageRef.txt
rule "message field ref"
when to_long(value: $message.responseCode, default: 200) >= 500
then
set_field(field: "response_category", value: "server_error");
end
//mismatchedNumericTypes.txt
rule "incompatible numeric types inference"
when
1.0 + 10 == 11 // error: no automatic long -> double conversion!
then
trigger_test();
end
//optionalArguments.txt
rule "optional function arguments"
when
optional(d: 3, a: true, b: "string")
then
trigger_test();
end
//optionalParamsMustBeNamed.txt
rule "optionalParamsMustBeNamed"
when
optional(false, "string", 3)
then
end
//pipelineDeclaration.txt
pipeline "cisco"
stage 1 match all
rule "check_ip_whitelist"
rule "cisco_device"
stage 2 match either
rule "parse_cisco_time"
rule "extract_src_dest"
rule "normalize_src_dest"
rule "lookup_ips"
rule "resolve_ips"
end
//positionalArguments.txt
rule "positional args"
when concat("a", 1, true) == concat(one: "a", two: 1, three: true)
then
trigger_test();
end
//singleArgFunction.txt
rule "single arg"
when one_arg("arg") == one_arg(one: "arg")
then
trigger_test();
end
//typedFieldAccess.txt
rule "typed field access"
when
to_long(customObject("1").id, 0) < 2
then
trigger_test();
end
//undeclaredFunction.txt
rule "undeclared function"
when false == unknown()
then
end
//undeclaredIdentifier.txt
rule "undeclared variable"
when true
then
one_arg(one: x);
end
rule "Generate berate_mandant field"
when
has_field("svc_berater") && has_field("svc_mandant")
then
let berater_mandant = concat(concat(to_string($message.svc_berater),"/"), to_string($message.svc_mandant));
set_field("svc_berater_mandant", berater_mandant);
end
//clonedMessage.txt
rule "operate on cloned message"
when true
then
let x = clone_message();
let new = create_message("foo", "source");
let cloned = clone_message(new);
set_field(field: "removed_again", value: "foo", message: x);
set_field(field: "only_in", value: "new message", message: x);
set_fields(fields: { multi: "new message" }, message: x);
set_field(field: "has_source", value: has_field("source", x), message: x);
route_to_stream(name: "some stream", message: x);
remove_field("removed_again", x);
end
//conversions.txt
rule "conversions"
when true
then
set_fields({
string_1: to_string("1"), // "1"
string_2: to_string("2", "default"), // "2"
string_3: to_string($message.not_there), // "" -> not being set in message!
string_4: to_string($message.not_there, "default"), // "default"
long_1: to_long(1), // 1L
long_2: to_long(2, 1), // 2L
long_3: to_long($message.not_there), // 0L
long_4: to_long($message.not_there, 1), // 1L
double_1: to_double(1d), // 1d
double_2: to_double(2d, 1d), // 2d
double_3: to_double($message.not_there), // 0d
double_4: to_double($message.not_there, 1d), // 1d
bool_1: to_bool("true"), // true
bool_2: to_bool("false", true), // false
bool_3: to_bool($message.not_there), // false
bool_4: to_bool($message.not_there, true), // true
ip_1: to_ip("127.0.0.1"), // 127.0.0.1
ip_2: to_ip("127.0.0.1", "2001:db8::1"), // 127.0.0.1
ip_3: to_ip($message.not_there), // 0.0.0.0
ip_4: to_ip($message.not_there, "::1") // ::1 (v6)
});
end
//dateArithmetic.txt
// now() is fixed to "2010-07-30T18:03:25+02:00" to provide a better testing experience
rule "date math"
when
now() + years(1) > now() &&
now() + months(1) > now() &&
now() + weeks(1) > now() &&
now() + days(1) > now() &&
now() + hours(1) > now() &&
now() + minutes(1) > now() &&
now() + seconds(1) > now() &&
now() + millis(1) > now() &&
now() + period("P1YT1M") > now() &&
now() - years(1) < now() &&
now() - months(1) < now() &&
now() - weeks(1) < now() &&
now() - days(1) < now() &&
now() - hours(1) < now() &&
now() - minutes(1) < now() &&
now() - seconds(1) < now() &&
now() - millis(1) < now() &&
now() - period("P1YT1M") < now()
then
set_field("interval", now() - (now() - days(1))); // is a duration of 1 day
set_field("long_time_ago", now() - years(10000));
set_fields({
years: years(2),
months: months(2),
weeks: weeks(2),
days: days(2),
hours: hours(2),
minutes: minutes(2),
seconds: seconds(2),
millis: millis(2),
period: period("P1YT1M")
});
set_field("timestamp", to_date($message.timestamp) + hours(1));
trigger_test();
end
//dates.txt
// now() is fixed, test uses different millisprovider!
rule "dates"
when
parse_date("2010-07-30T18:03:25+02:00", "yyyy-MM-dd'T'HH:mm:ssZZ") == parse_date("2010-07-30T16:03:25Z", "yyyy-MM-dd'T'HH:mm:ssZZ") &&
now("CET") == now("UTC") &&
now("CET") == now() &&
flex_parse_date(value: "30th July 2010 18:03:25 ", timezone: "CET") == parse_date("2010-07-30T18:03:25+02:00", "yyyy-MM-dd'T'HH:mm:ssZZ") &&
format_date(flex_parse_date("30th July 2010 18:03:25"), "yyyy-MM-dd") == "2010-07-30" &&
parse_date("2010-07-30T18:03:24+02:00", "yyyy-MM-dd'T'HH:mm:ssZZ") < parse_date("2010-07-30T16:03:25Z", "yyyy-MM-dd'T'HH:mm:ssZZ") &&
!(parse_date("2010-07-30T18:03:24+02:00", "yyyy-MM-dd'T'HH:mm:ssZZ") >= parse_date("2010-07-30T16:03:25Z", "yyyy-MM-dd'T'HH:mm:ssZZ")) &&
parse_date("2010-07-30T18:03:25+02:00", "yyyy-MM-dd'T'HH:mm:ssZZ") > parse_date("2010-07-30T16:03:24Z", "yyyy-MM-dd'T'HH:mm:ssZZ") &&
!(parse_date("2010-07-30T18:03:25+02:00", "yyyy-MM-dd'T'HH:mm:ssZZ") <= parse_date("2010-07-30T16:03:24Z", "yyyy-MM-dd'T'HH:mm:ssZZ")) &&
parse_date("2010-07-30T18:03:25+02:00", "yyyy-MM-dd'T'HH:mm:ssZZ") <= parse_date("2010-07-30T16:03:25Z", "yyyy-MM-dd'T'HH:mm:ssZZ") &&
!(parse_date("2010-07-30T18:03:25+02:00", "yyyy-MM-dd'T'HH:mm:ssZZ") > parse_date("2010-07-30T16:03:25Z", "yyyy-MM-dd'T'HH:mm:ssZZ")) &&
parse_date("2010-07-30T18:03:25+02:00", "yyyy-MM-dd'T'HH:mm:ssZZ") >= parse_date("2010-07-30T16:03:25Z", "yyyy-MM-dd'T'HH:mm:ssZZ") &&
!(parse_date("2010-07-30T18:03:25+02:00", "yyyy-MM-dd'T'HH:mm:ssZZ") < parse_date("2010-07-30T16:03:25Z", "yyyy-MM-dd'T'HH:mm:ssZZ"))
then
trigger_test();
let date = parse_date("2010-07-30T18:03:25+02:00", "yyyy-MM-dd'T'HH:mm:ssZZ");
set_field("year", date.year);
set_field("timezone", to_string(date.zone));
end
//digests.txt
rule "digests"
when
crc32("graylog") == "e3018c57" &&
crc32c("graylog") == "82390e89" &&
md5("graylog") == "6f9efb466e043b9f3635827ce446e13c" &&
murmur3_32("graylog") == "67285534" &&
murmur3_128("graylog") == "945d5b1aaa8fdfe9b880b31e814972b3" &&
sha1("graylog") == "6d88bccf40bf65b911fe79d78c7af98e382f0c1a" &&
sha256("graylog") == "4bbdd5a829dba09d7a7ff4c1367be7d36a017b4267d728d31bd264f63debeaa6" &&
sha512("graylog") == "f6cb3a96450fb9c9174299a651333c926cd67b6f5c25d8daeede1589ffa006f4dd31da4f0625b7f281051a34c8352b3a9c1a9babf90020360e911a380b5c3f4f"
then
trigger_test();
end
//evalErrorSuppressed.txt
rule "suppressing exceptions/nulls"
when
is_null(to_ip($message.does_not_exist, "d.f.f.f")) && is_not_null($message.this_field_was_set)
then
trigger_test();
end
//fieldPrefixSuffix.txt
rule "prefixsuffix"
when true
then
// plain set field
set_field("field", "1");
// both prefix and suffix, doesn't touch the above
set_field("field", "2", "prae_", "_sueff");
// combinations of optional prefix, suffix
set_field(field: "field", value: "3", suffix: "_sueff");
set_field(field: "field", value: "4", prefix: "prae_");
// set multiple fields with the same prefix
set_fields(
fields: {
field1: "5",
field2: "6"
},
prefix: "pre_",
suffix: "_suff"
);
// set multiple fields with the same prefix, suffix optional
set_fields(
fields: {
field1: "7",
field2: "8"
},
prefix: "pre_"
);
// set multiple fields with the same suffix, prefix optional
set_fields(
fields: {
field1: "9",
field2: "10"
},
suffix: "_suff"
);
end
//fieldRenaming.txt
rule "fieldRenaming"
when true
then
rename_field("no_such_field", "field_1");
rename_field("field_a", "field_2");
rename_field("field_b", "field_b");
end
//grok.txt
rule "grok"
when true
then
let matches = grok(pattern: "%{GREEDY:timestamp;date;yyyy-MM-dd'T'HH:mm:ss.SSSX}", value: "2015-07-31T10:05:36.773Z");
set_fields(matches);
// only named captures
let matches1 = grok("%{NUM:num}", "10", true);
set_fields(matches1);
end
//ipMatchingIssue28.txt
rule "IP subnet"
when
cidr_match("10.20.30.0/24", to_ip($message.source))
then
trigger_test();
end
//ipMatching.txt
rule "ip handling"
when
cidr_match("192.0.0.0/8", to_ip("192.168.1.50")) &&
! cidr_match("191.0.0.0/8", to_ip("192.168.1.50"))
then
set_field("ip_anon", to_string(to_ip($message.ip).anonymized));
set_field("ipv6_anon", to_string(to_ip("2001:db8::1").anonymized));
trigger_test();
end
//jsonpath.txt
rule "jsonpath"
when true
then
let x = parse_json(to_string($message.message));
let new_fields = select_jsonpath(x,
{ author_first: "$['store']['book'][0]['author']",
author_last: "$['store']['book'][-1:]['author']"
});
set_fields(new_fields);
end
//keyValueFailure.txt
rule "kv"
when true
then
set_fields(key_value(
value: "dup_first=1 dup_first=2",
allow_dup_keys: false
));
set_fields(key_value(
value: "dup_last=",
ignore_empty_values: false
));
end
//keyValue.txt
rule "kv"
when true
then
set_fields(key_value(
value: "a='1' <b>=2 \n 'c'=3 [d]=44 a=4 \"e\"=4 [f=1][[g]:3] h=",
delimiters: " \t\n\r[",
kv_delimiters: "=:",
ignore_empty_values: true,
trim_key_chars: "\"[]<>'",
trim_value_chars: "']",
allow_dup_keys: true, // the default
handle_dup_keys: "," // meaning concat, default "take_first"
));
set_fields(key_value(
value: "dup_first=1 dup_first=2",
handle_dup_keys: "take_first"
));
set_fields(key_value(
value: "dup_last=1 dup_last=2",
handle_dup_keys: "take_last"
));
end
//newlyCreatedMessage.txt
rule "operate on newly created message"
when true
then
let x = create_message("new", "synthetic", now());
set_field(field: "removed_again", value: "foo", message: x);
set_field(field: "only_in", value: "new message", message: x);
set_fields(fields: { multi: "new message" }, message: x);
set_field(field: "has_source", value: has_field("source", x), message: x);
route_to_stream(name: "some stream", message: x);
remove_field("removed_again", x);
end
//regexMatch.txt
rule "regexMatch"
when
regex("^.*(cde\\.)(:(\\d+))?.*$", "abcde.fg").matches == true &&
regex(".*(cde\\.)(:(\\d+))?.*", "abcde.fg").matches == true &&
regex("(cde\\.)(:(\\d+))?", "abcde.fg").matches == true &&
regex("^(cde\\.)(:(\\d+))?$", "abcde.fg").matches == false
then
let result = regex("(cd\\.e)", "abcd.efg");
set_field("group_1", result["0"]);
let result = regex("(cd\\.e)", "abcd.efg", ["name"]);
set_field("named_group", result["name"]);
set_field("matched_regex", result.matches);
end
//routeToStream.txt
rule "stream routing"
when true
then
route_to_stream(name: "some name");
end
//split.txt
rule "split"
when
true
then
set_field("limit_0", split("_", "foo_bar_baz"));
set_field("limit_1", split(":", "foo:bar:baz", 1));
set_field("limit_2", split("\\|", "foo|bar|baz", 2));
trigger_test();
end
//strings.txt
// various string functions
rule "string tests"
when
contains("abcdef", "bc") &&
lowercase("a MIXED bag of chArs") == "a mixed bag of chars" &&
uppercase("a MIXED bag of chArs") == "A MIXED BAG OF CHARS" &&
swapcase("Capitalized") == "cAPITALIZED" &&
capitalize("hello") == "Hello" &&
capitalize("hEllo") == "HEllo" &&
uncapitalize("Hello") == "hello" &&
uncapitalize("HEllo") == "hEllo" &&
abbreviate("", 4) == "" &&
abbreviate("abcdefg", 6) == "abc..." &&
abbreviate("abcdefg", 7) == "abcdefg" &&
abbreviate("abcdefg", 8) == "abcdefg" &&
abbreviate("abcdefg", 4) == "a..." &&
concat("foo", "bar") == "foobar"
then
set_field("has_xyz", contains("abcdef", "xyz"));
set_field("string_literal", "abcd\\.e\tfg\u03a9\363");
trigger_test();
end
//substring.txt
rule "substrings"
when
substring("abc", 0, 2) == "ab" &&
substring("abc", 0, 2) == "ab" &&
substring("abc", 2, 0) == "" &&
substring("abc", 2, 4) == "c" &&
substring("abc", 4, 6) == "" &&
substring("abc", 2, 2) == "" &&
substring("abc", -2, -1) == "b" &&
substring("abc", -4, 2) == "ab" &&
substring("abc", 1) == "bc" &&
substring("abc", 0, -1) == "ab"
then
trigger_test();
end
//syslog.txt
rule "syslog tests"
when
true
then
set_field("level0", syslog_level(0));
set_field("level1", syslog_level(1));
set_field("level2", syslog_level(2));
set_field("level3", syslog_level(3));
set_field("level4", syslog_level(4));
set_field("level5", syslog_level(5));
set_field("level6", syslog_level(6));
set_field("level7", syslog_level(7));
set_field("facility0", syslog_facility(0));
set_field("facility1", syslog_facility(1));
set_field("facility2", syslog_facility(2));
set_field("facility3", syslog_facility(3));
set_field("facility4", syslog_facility(4));
set_field("facility5", syslog_facility(5));
set_field("facility6", syslog_facility(6));
set_field("facility7", syslog_facility(7));
set_field("facility8", syslog_facility(8));
set_field("facility9", syslog_facility(9));
set_field("facility10", syslog_facility(10));
set_field("facility11", syslog_facility(11));
set_field("facility12", syslog_facility(12));
set_field("facility13", syslog_facility(13));
set_field("facility14", syslog_facility(14));
set_field("facility15", syslog_facility(15));
set_field("facility16", syslog_facility(16));
set_field("facility17", syslog_facility(17));
set_field("facility18", syslog_facility(18));
set_field("facility19", syslog_facility(19));
set_field("facility20", syslog_facility(20));
set_field("facility21", syslog_facility(21));
set_field("facility22", syslog_facility(22));
set_field("facility23", syslog_facility(23));
let priority1 = expand_syslog_priority(0);
set_fields({prio1_facility: priority1.facility, prio1_level: priority1.level });
let priority2 = expand_syslog_priority(165);
set_fields({prio2_facility: priority2.facility, prio2_level: priority2.level });
let priority3 = expand_syslog_priority_as_string(0);
set_fields({prio3_facility: priority3.facility, prio3_level: priority3.level });
let priority4 = expand_syslog_priority_as_string(165);
set_fields({prio4_facility: priority4.facility, prio4_level: priority4.level });
trigger_test();
end
//timezones.txt
// now() is fixed, test uses different millisprovider!
rule "timezones"
when
now("CET") == now("UTC") &&
now("utc") == now("UTC") &&
now("Europe/Moscow") == now("europe/moscow") &&
now("europe/MoSCOw") == now("msk")
then
trigger_test();
end
//urls.txt
rule "urls"
when
true
then
let url = to_url("https://admin:s3cr31@some.host.with.lots.of.subdomains.com:9999/path1/path2/three?q1=something&with_spaces=hello%20graylog&equal=can=containanotherone#anchorstuff");
set_fields({
protocol: url.protocol,
authority: url.authority,
user_info: url.userInfo,
host: url.host,
port: url.port,
path: url.path,
file: url.file,
fragment: url.fragment,
query: url.query,
q1: url.queryParams.q1,
with_spaces: url.queryParams.with_spaces,
equal: url.queryParams.equal
});
trigger_test();
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment