Skip to content

Commit c913861

Browse files
authored
Fix cre report bug (#38)
Co-authored-by: Tony Meehan <[email protected]>
1 parent 3d8581b commit c913861

File tree

4 files changed

+68
-23
lines changed

4 files changed

+68
-23
lines changed

internal/pkg/engine/engine.go

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -158,6 +158,16 @@ func compileRule(cf compiler.RuntimeI, data []byte) (compiler.ObjsT, *parser.Rul
158158
log.Info().Str("id", rule.Metadata.Id).Str("cre", rule.Cre.Id).Msg("Rule")
159159
}
160160

161+
for i := range rules.Rules {
162+
var r = &rules.Rules[i]
163+
if r.Metadata.Id == "" {
164+
r.Metadata.Id = tree.Nodes[i].Metadata.RuleId
165+
}
166+
if r.Metadata.Hash == "" {
167+
r.Metadata.Hash = tree.Nodes[i].Metadata.RuleHash
168+
}
169+
}
170+
161171
nodeObjs, err = compileRuleTree(cf, tree)
162172
if err != nil {
163173
log.Error().Err(err).Msg("Failed to compile rule tree")
@@ -292,6 +302,13 @@ func (r *RuntimeT) AddRules(rules *parser.RulesT) error {
292302

293303
var ok bool
294304
for _, rule := range rules.Rules {
305+
306+
log.Info().
307+
Str("id", rule.Metadata.Id).
308+
Str("hash", rule.Metadata.Hash).
309+
Str("cre", rule.Cre.Id).
310+
Msg("Adding rule")
311+
295312
if _, ok = r.Rules[rule.Metadata.Hash]; !ok {
296313
r.Rules[rule.Metadata.Hash] = rule.Cre
297314
} else {

test/missing-ids.log

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,15 @@
1-
2019/02/05 12:07:37 [notice] 1629#1629: signal process started
2-
2019/02/05 12:07:37 [error] 1629#1629: open() "/run/nginx.pid" failed (2: No such file or directory)
3-
2019/02/05 12:07:38 [emerg] 1655#1655: bind() to 0.0.0.0:80 failed (98: Address already in use)
4-
2019/02/05 12:07:38 [emerg] 1655#1655: bind() to foo bar
5-
2019/02/05 12:07:38 [emerg] 1655#1655: bind() to test
6-
2019/02/05 12:07:38 [emerg] 1655#1655: bind() to 0.0.0.0:444 failed (98: Address already in use)
7-
2019/02/05 12:07:38 [emerg] 1655#1655: bind() just kidding false alarm this is normal
8-
2019/02/05 12:07:38 [emerg] 1655#1655: bind() to [::]:444 failed (98: Address already in use)
9-
2019/02/05 12:07:38 [emerg] 1655#1655: still could not bind() to baaaz
10-
2019/02/05 12:07:41 [alert] 1631#1631: unlink() "/run/nginx.pid" failed (2: No such file or directory)
1+
2025-03-11 14:00:19.421865+00:00 [erro] <0.229.0> Discarding message {'$gen_cast',{force_event_refresh,#Ref<0.449530684.1179910147.46753>}} from <0.229.0> to <0.3159.0> in an old incarnation (1741605434) of this node (1741701615) <A>
2+
2025-03-11 14:00:19.421872+00:00 [erro] <0.229.0> Discarding message {'$gen_cast',{force_event_refresh,#Ref<0.449530684.1179910147.46753>}} from <0.229.0> to <0.3156.0> in an old incarnation (1741605434) of this node (1741701615)
3+
2025-03-11 14:00:19.718868+00:00 [info] <0.519.0> Started message store of type persistent for vhost '/'
4+
2025-03-11 14:00:20.144956+00:00 [warn] <0.247.0> Mnesia('[email protected]'): ** WARNING ** Mnesia is overloaded: {dump_log,write_threshold}
5+
2025-03-11 14:00:20.177194+00:00 [warn] <0.247.0> Mnesia('[email protected]'): ** WARNING ** Mnesia is overloaded: {dump_log,write_threshold}
6+
2025-03-11 14:00:20.217811+00:00 [warn] <0.247.0> Mnesia('[email protected]'): ** WARNING ** Mnesia is overloaded: {dump_log,write_threshold}
7+
2025-03-11 14:00:20.245309+00:00 [warn] <0.247.0> Mnesia('[email protected]'): ** WARNING ** Mnesia is overloaded: {dump_log,write_threshold}
8+
2025-03-11 14:00:20.269326+00:00 [warn] <0.247.0> Mnesia('[email protected]'): ** WARNING ** Mnesia is overloaded: {dump_log,write_threshold}
9+
2025-03-11 14:00:20.295177+00:00 [warn] <0.247.0> Mnesia('[email protected]'): ** WARNING ** Mnesia is overloaded: {dump_log,write_threshold}
10+
2025-03-11 14:00:20.318874+00:00 [warn] <0.247.0> Mnesia('[email protected]'): ** WARNING ** Mnesia is overloaded: {dump_log,write_threshold}
11+
2025-03-11 14:00:20.341291+00:00 [warn] <0.247.0> Mnesia('[email protected]'): ** WARNING ** Mnesia is overloaded: {dump_log,write_threshold}
12+
2025-03-11 14:00:21.337821+00:00 [info] <0.519.0> Recovering 4540 queues of type rabbit_classic_queue took 1851ms
13+
2025-03-11 14:00:21.337940+00:00 [info] <0.519.0> Recovering 0 queues of type rabbit_quorum_queue took 0ms
14+
2025-03-11 14:00:21.338046+00:00 [info] <0.519.0> Recovering 0 queues of type rabbit_stream_queue took 0ms
15+
2025-03-11 14:00:21.505654+00:00 [info] <0.229.0> Running boot step empty_db_check defined by app rabbit

test/missing-ids.yaml

Lines changed: 32 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,38 @@
11
rules:
22
- cre:
3-
id: CRE-2025-2222
4-
severity: 2
5-
title: job failure example
3+
id: CRE-2024-7777
4+
severity: 0
5+
title: RabbitMQ Mnesia overloaded recovering persistent queues
6+
category: message-queue-problems
7+
author: Prequel
8+
description: |
9+
- The RabbitMQ cluster is processing a large number of persistent mirrored queues at boot.
10+
cause: |
11+
- The Erlang process, Mnesia, is overloaded while recovering persistent queues on boot.
12+
impact: |
13+
- RabbitMQ is unable to process any new messages and can cause outages in consumers and producers.
14+
tags:
15+
- known-problem
16+
- rabbitmq
17+
mitigation: |
18+
- Adjusting mirroring policies to limit the number of mirrored queues
19+
- Remove high-availability policies from queues
20+
- Add additional CPU resources and restart the RabbitMQ cluster
21+
- Use [lazy queues](https://www.rabbitmq.com/docs/lazy-queues) to avoid incurring the costs of writing data to disk
22+
references:
23+
- https://groups.google.com/g/rabbitmq-users/c/ekV9tTBRZms/m/1EXw-ruuBQAJ
24+
applications:
25+
- name: "rabbitmq"
26+
version: "3.9.x"
627
metadata:
728
gen: 1
829
rule:
9-
set:
10-
window: 62s
30+
sequence:
31+
window: 30s
1132
event:
12-
source: cre.log.app
13-
match:
14-
- value: "Saw completed job"
15-
count: 2
33+
source: rabbitmq
34+
order:
35+
- regex: "Discarding message(.+)in an old incarnation(.+)of this node"
36+
- "Mnesia is overloaded"
37+
negate:
38+
- "SIGTERM received - shutting down"

test/preq_test.go

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,10 @@ func TestSuccessExamples(t *testing.T) {
116116
rulePath: "../examples/29-negate-slide-anchor-1-window.yaml",
117117
dataPath: "../examples/29-example-fp-moved.log",
118118
},
119+
"Missing-IDs": {
120+
rulePath: "missing-ids.yaml",
121+
dataPath: "missing-ids.log",
122+
},
119123
}
120124

121125
ctx := context.Background()
@@ -204,10 +208,6 @@ func TestMissExamples(t *testing.T) {
204208
rulePath: "../examples/30-negate-absolute.yaml",
205209
dataPath: "../examples/30-example.log",
206210
},
207-
"Missing-IDs": {
208-
rulePath: "missing-ids.yaml",
209-
dataPath: "missing-ids.log",
210-
},
211211
}
212212

213213
ctx := context.Background()

0 commit comments

Comments
 (0)