mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2026-05-17 08:36:55 +03:00
Compare commits
197 Commits
debug/erro
...
v1.102.11
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
46bb2d4308 | ||
|
|
8fa8101677 | ||
|
|
204cec7913 | ||
|
|
17988942ab | ||
|
|
ed05ae12c4 | ||
|
|
256924e2d6 | ||
|
|
b5392337bf | ||
|
|
fa4e3607c3 | ||
|
|
043d066133 | ||
|
|
900159a2d3 | ||
|
|
4464c5a254 | ||
|
|
90fed18b83 | ||
|
|
1480ecc129 | ||
|
|
c67f4d4d86 | ||
|
|
c2f5088adc | ||
|
|
87739bbbef | ||
|
|
dce4dc0a33 | ||
|
|
ad6c587494 | ||
|
|
467cdd8a3d | ||
|
|
026894054b | ||
|
|
db9107acef | ||
|
|
f8a0f2fe44 | ||
|
|
bfd83e3cca | ||
|
|
6b20ec9c7d | ||
|
|
f0d55a1c25 | ||
|
|
f31dece58d | ||
|
|
a6951b8b14 | ||
|
|
d56e3df770 | ||
|
|
d88c1fbdbb | ||
|
|
a947ccf228 | ||
|
|
5747e8b5d0 | ||
|
|
aab0174c94 | ||
|
|
2c271aa9b2 | ||
|
|
7cdeb3a32c | ||
|
|
8c4ac815cb | ||
|
|
dcb6dd5dcb | ||
|
|
be24fbe8ae | ||
|
|
fc1a89f51c | ||
|
|
eddeccfcfb | ||
|
|
b620b5cff5 | ||
|
|
42c21ff671 | ||
|
|
b9eb9fe72d | ||
|
|
9ae49b405c | ||
|
|
f932deb47a | ||
|
|
77f446d095 | ||
|
|
5f8810fc8d | ||
|
|
80ead7cfa4 | ||
|
|
8772288bd6 | ||
|
|
338095fdd3 | ||
|
|
299d66fd98 | ||
|
|
77218c5848 | ||
|
|
9e6fc9269d | ||
|
|
661f9fc3e2 | ||
|
|
2adb5fe014 | ||
|
|
ce917a4cc3 | ||
|
|
b3de1c029c | ||
|
|
461c7a5ad7 | ||
|
|
489631b227 | ||
|
|
e78ff0dc2a | ||
|
|
ab4d9f6213 | ||
|
|
81c313fd89 | ||
|
|
e9de665289 | ||
|
|
bfbe06e912 | ||
|
|
71a7d0db4a | ||
|
|
e8748e4747 | ||
|
|
ad3a5be097 | ||
|
|
17b3f24a37 | ||
|
|
1f0b03aebe | ||
|
|
fc8710c071 | ||
|
|
a7f36eef0e | ||
|
|
54ab08d839 | ||
|
|
a3ea6d9e61 | ||
|
|
f19c760f4f | ||
|
|
86e74de9db | ||
|
|
4d4253ee17 | ||
|
|
8c7b5d22c9 | ||
|
|
513f5da5de | ||
|
|
fb4d545555 | ||
|
|
abaf8574a8 | ||
|
|
f346b5aaaa | ||
|
|
31398cc739 | ||
|
|
4574958e2e | ||
|
|
d623105ef4 | ||
|
|
aac5cd8574 | ||
|
|
d3c02b8f5d | ||
|
|
7f252c1800 | ||
|
|
f73b40619a | ||
|
|
0f7b853a88 | ||
|
|
70f0a974b8 | ||
|
|
2eb15cf30c | ||
|
|
499f0b9588 | ||
|
|
43d615ae87 | ||
|
|
82e1c6fc3f | ||
|
|
45bfe1f44c | ||
|
|
58d2c18423 | ||
|
|
feeda42560 | ||
|
|
7d2a6764e7 | ||
|
|
1645542a8a | ||
|
|
151eb1e4b6 | ||
|
|
5e4de8e860 | ||
|
|
6312d3bbba | ||
|
|
d2bede6b51 | ||
|
|
5ca5069fc4 | ||
|
|
8a3c460f63 | ||
|
|
ca653a515c | ||
|
|
e5b4cf33bf | ||
|
|
e24a8f2088 | ||
|
|
f27e120aeb | ||
|
|
ee1ce90501 | ||
|
|
47fe8cf3be | ||
|
|
5813aa6602 | ||
|
|
b4f4ece162 | ||
|
|
bb00f7529f | ||
|
|
ad3bd11334 | ||
|
|
875c6663ef | ||
|
|
b48b7c454a | ||
|
|
f523348b3f | ||
|
|
63bf1e008f | ||
|
|
419ac10c60 | ||
|
|
d631d2c100 | ||
|
|
89431458bf | ||
|
|
d8d0c0ac01 | ||
|
|
c0f5699bad | ||
|
|
277fdd1070 | ||
|
|
d290efb849 | ||
|
|
b26a68641c | ||
|
|
b88cda5c41 | ||
|
|
d2a791bef3 | ||
|
|
99516a5730 | ||
|
|
aecc86c390 | ||
|
|
500b54f5aa | ||
|
|
cc29692e27 | ||
|
|
f018aa33cb | ||
|
|
92b6475fa6 | ||
|
|
bda3546cfd | ||
|
|
2691cdefe3 | ||
|
|
93b8aa5c9d | ||
|
|
7a7f188133 | ||
|
|
3e00fae3f4 | ||
|
|
ee3c0c6a87 | ||
|
|
cf7ea78588 | ||
|
|
186aa3bb0e | ||
|
|
e368f687a7 | ||
|
|
0214aa328e | ||
|
|
dd919eeee6 | ||
|
|
3f22d06b0c | ||
|
|
b812de236b | ||
|
|
40f56fa93b | ||
|
|
e610edf045 | ||
|
|
764955b61c | ||
|
|
e3d31a371a | ||
|
|
df723a4870 | ||
|
|
bd00e3a735 | ||
|
|
e794582f31 | ||
|
|
7cab4fd30d | ||
|
|
3333135bc0 | ||
|
|
1db1841b20 | ||
|
|
f7ce191482 | ||
|
|
96ea222780 | ||
|
|
03c0d9a672 | ||
|
|
e9f86af7f5 | ||
|
|
9ada784983 | ||
|
|
a83ee2b3f1 | ||
|
|
2564f10d98 | ||
|
|
0871770634 | ||
|
|
51b21dfd57 | ||
|
|
276989716f | ||
|
|
6cb3c0cac8 | ||
|
|
d064e14933 | ||
|
|
77b0fcfdd9 | ||
|
|
ee7fe11fd2 | ||
|
|
4c26fb6fe5 | ||
|
|
fc135094b3 | ||
|
|
5d42f21abd | ||
|
|
28eeabded1 | ||
|
|
b6910cfff7 | ||
|
|
8938ef398c | ||
|
|
df2b75fa81 | ||
|
|
857734c66c | ||
|
|
bedc0c0f8f | ||
|
|
5a41bdf329 | ||
|
|
bf5d0dd245 | ||
|
|
1cec37b0f5 | ||
|
|
c40c25b03c | ||
|
|
82badc3dd5 | ||
|
|
43ded688f7 | ||
|
|
661420fe85 | ||
|
|
7aab967447 | ||
|
|
afb07034ed | ||
|
|
44d2205136 | ||
|
|
b226318f9e | ||
|
|
30999204c9 | ||
|
|
ffddfa1f94 | ||
|
|
fc336bbf20 | ||
|
|
e0b2c1c4f5 | ||
|
|
5afbee5f6f | ||
|
|
51459196f9 |
2
.github/workflows/main.yml
vendored
2
.github/workflows/main.yml
vendored
@@ -85,7 +85,7 @@ jobs:
|
||||
restore-keys: go-artifacts-${{ runner.os }}-${{ matrix.scenario }}-
|
||||
|
||||
- name: Run tests
|
||||
run: make ${{ matrix.scenario}}
|
||||
run: GOGC=10 make ${{ matrix.scenario}}
|
||||
|
||||
- name: Publish coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -175,7 +175,7 @@
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
Copyright 2019-2024 VictoriaMetrics, Inc.
|
||||
Copyright 2019-2025 VictoriaMetrics, Inc.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
12
Makefile
12
Makefile
@@ -513,19 +513,19 @@ check-all: fmt vet golangci-lint govulncheck
|
||||
clean-checkers: remove-golangci-lint remove-govulncheck
|
||||
|
||||
test:
|
||||
DISABLE_FSYNC_FOR_TESTING=1 go test ./lib/... ./app/...
|
||||
go test ./lib/... ./app/...
|
||||
|
||||
test-race:
|
||||
DISABLE_FSYNC_FOR_TESTING=1 go test -race ./lib/... ./app/...
|
||||
go test -race ./lib/... ./app/...
|
||||
|
||||
test-pure:
|
||||
DISABLE_FSYNC_FOR_TESTING=1 CGO_ENABLED=0 go test ./lib/... ./app/...
|
||||
CGO_ENABLED=0 go test ./lib/... ./app/...
|
||||
|
||||
test-full:
|
||||
DISABLE_FSYNC_FOR_TESTING=1 go test -coverprofile=coverage.txt -covermode=atomic ./lib/... ./app/...
|
||||
go test -coverprofile=coverage.txt -covermode=atomic ./lib/... ./app/...
|
||||
|
||||
test-full-386:
|
||||
DISABLE_FSYNC_FOR_TESTING=1 GOARCH=386 go test -coverprofile=coverage.txt -covermode=atomic ./lib/... ./app/...
|
||||
GOARCH=386 go test -coverprofile=coverage.txt -covermode=atomic ./lib/... ./app/...
|
||||
|
||||
integration-test: victoria-metrics vmagent vmalert vmauth
|
||||
go test ./apptest/... -skip="^TestCluster.*"
|
||||
@@ -567,7 +567,7 @@ golangci-lint: install-golangci-lint
|
||||
golangci-lint run
|
||||
|
||||
install-golangci-lint:
|
||||
which golangci-lint || curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(shell go env GOPATH)/bin v1.60.3
|
||||
which golangci-lint || curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(shell go env GOPATH)/bin v1.63.4
|
||||
|
||||
remove-golangci-lint:
|
||||
rm -rf `which golangci-lint`
|
||||
|
||||
16
README.md
16
README.md
@@ -1,12 +1,14 @@
|
||||
# VictoriaMetrics
|
||||
|
||||
[](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/latest)
|
||||
[](https://hub.docker.com/r/victoriametrics/victoria-metrics)
|
||||
[](https://slack.victoriametrics.com/)
|
||||
[](https://github.com/VictoriaMetrics/VictoriaMetrics/blob/master/LICENSE)
|
||||
[](https://goreportcard.com/report/github.com/VictoriaMetrics/VictoriaMetrics)
|
||||
[](https://github.com/VictoriaMetrics/VictoriaMetrics/actions)
|
||||
[](https://codecov.io/gh/VictoriaMetrics/VictoriaMetrics)
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
<picture>
|
||||
<source srcset="docs/logo_white.webp" media="(prefers-color-scheme: dark)">
|
||||
|
||||
@@ -42,6 +42,10 @@ var (
|
||||
"Smaller intervals increase disk IO load. Minimum supported value is 1s")
|
||||
maxIngestionRate = flag.Int("maxIngestionRate", 0, "The maximum number of samples vmsingle can receive per second. Data ingestion is paused when the limit is exceeded. "+
|
||||
"By default there are no limits on samples ingestion rate.")
|
||||
finalDedupScheduleInterval = flag.Duration("storage.finalDedupScheduleCheckInterval", time.Hour, "The interval for checking when final deduplication process should be started."+
|
||||
"Storage unconditionally adds 25% jitter to the interval value on each check evaluation."+
|
||||
" Changing the interval to the bigger values may delay downsampling, deduplication for historical data."+
|
||||
" See also https://docs.victoriametrics.com/#deduplication")
|
||||
)
|
||||
|
||||
func main() {
|
||||
@@ -86,6 +90,10 @@ func main() {
|
||||
startTime := time.Now()
|
||||
storage.SetDedupInterval(*minScrapeInterval)
|
||||
storage.SetDataFlushInterval(*inmemoryDataFlushInterval)
|
||||
if *finalDedupScheduleInterval < time.Hour {
|
||||
logger.Fatalf("-dedup.finalDedupScheduleCheckInterval cannot be smaller than 1 hour; got %s", *finalDedupScheduleInterval)
|
||||
}
|
||||
storage.SetFinalDedupScheduleInterval(*finalDedupScheduleInterval)
|
||||
vmstorage.Init(promql.ResetRollupResultCacheIfNeeded)
|
||||
vmselect.Init()
|
||||
vminsertcommon.StartIngestionRateLimiter(*maxIngestionRate)
|
||||
|
||||
@@ -15,6 +15,8 @@ import (
|
||||
// LineReader reads newline-delimited lines from the underlying reader
|
||||
type LineReader struct {
|
||||
// Line contains the next line read after the call to NextLine
|
||||
//
|
||||
// The Line contents is valid until the next call to NextLine.
|
||||
Line []byte
|
||||
|
||||
// name is the LineReader name
|
||||
@@ -26,6 +28,9 @@ type LineReader struct {
|
||||
// buf is a buffer for reading the next line
|
||||
buf []byte
|
||||
|
||||
// bufOffset is the offset at buf to read the next line from
|
||||
bufOffset int
|
||||
|
||||
// err is the last error when reading data from r
|
||||
err error
|
||||
|
||||
@@ -51,26 +56,27 @@ func NewLineReader(name string, r io.Reader) *LineReader {
|
||||
// Check for Err in this case.
|
||||
func (lr *LineReader) NextLine() bool {
|
||||
for {
|
||||
if len(lr.buf) == 0 {
|
||||
if lr.bufOffset >= len(lr.buf) {
|
||||
if lr.err != nil || lr.eofReached {
|
||||
return false
|
||||
}
|
||||
if !lr.readMoreData() {
|
||||
return false
|
||||
}
|
||||
if len(lr.buf) == 0 && lr.eofReached {
|
||||
if lr.bufOffset >= len(lr.buf) && lr.eofReached {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if n := bytes.IndexByte(lr.buf, '\n'); n >= 0 {
|
||||
lr.Line = append(lr.Line[:0], lr.buf[:n]...)
|
||||
lr.buf = append(lr.buf[:0], lr.buf[n+1:]...)
|
||||
buf := lr.buf[lr.bufOffset:]
|
||||
if n := bytes.IndexByte(buf, '\n'); n >= 0 {
|
||||
lr.Line = buf[:n]
|
||||
lr.bufOffset += n + 1
|
||||
return true
|
||||
}
|
||||
if lr.eofReached {
|
||||
lr.Line = append(lr.Line[:0], lr.buf...)
|
||||
lr.buf = lr.buf[:0]
|
||||
lr.Line = buf
|
||||
lr.bufOffset += len(buf)
|
||||
return true
|
||||
}
|
||||
if !lr.readMoreData() {
|
||||
@@ -88,6 +94,11 @@ func (lr *LineReader) Err() error {
|
||||
}
|
||||
|
||||
func (lr *LineReader) readMoreData() bool {
|
||||
if lr.bufOffset > 0 {
|
||||
lr.buf = append(lr.buf[:0], lr.buf[lr.bufOffset:]...)
|
||||
lr.bufOffset = 0
|
||||
}
|
||||
|
||||
bufLen := len(lr.buf)
|
||||
if bufLen >= MaxLineSizeBytes.IntN() {
|
||||
logger.Warnf("%s: the line length exceeds -insert.maxLineSizeBytes=%d; skipping it; line contents=%q", lr.name, MaxLineSizeBytes.IntN(), lr.buf)
|
||||
|
||||
@@ -176,7 +176,7 @@ func writeCompactObject(w io.Writer, fields []logstorage.Field) error {
|
||||
_, err := fmt.Fprintf(w, "%s\n", fields[0].Value)
|
||||
return err
|
||||
}
|
||||
if len(fields) == 2 && fields[0].Name == "_time" || fields[1].Name == "_time" {
|
||||
if len(fields) == 2 && (fields[0].Name == "_time" || fields[1].Name == "_time") {
|
||||
// Write _time\tfieldValue as is
|
||||
if fields[0].Name == "_time" {
|
||||
_, err := fmt.Fprintf(w, "%s\t%s\n", fields[0].Value, fields[1].Value)
|
||||
|
||||
@@ -45,6 +45,8 @@ var (
|
||||
"see https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model")
|
||||
u64FieldsPerLog = flag.Int("u64FieldsPerLog", 1, "The number of fields with uint64 values to generate per each log entry; "+
|
||||
"see https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model")
|
||||
i64FieldsPerLog = flag.Int("i64FieldsPerLog", 1, "The number of fields with int64 values to generate per each log entry; "+
|
||||
"see https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model")
|
||||
floatFieldsPerLog = flag.Int("floatFieldsPerLog", 1, "The number of fields with float64 values to generate per each log entry; "+
|
||||
"see https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model")
|
||||
ipFieldsPerLog = flag.Int("ipFieldsPerLog", 1, "The number of fields with IPv4 values to generate per each log entry; "+
|
||||
@@ -254,6 +256,9 @@ func generateLogsAtTimestamp(bw *bufio.Writer, workerID int, ts int64, firstStre
|
||||
for j := 0; j < *u64FieldsPerLog; j++ {
|
||||
fmt.Fprintf(bw, `,"u64_%d":"%d"`, j, rand.Uint64())
|
||||
}
|
||||
for j := 0; j < *i64FieldsPerLog; j++ {
|
||||
fmt.Fprintf(bw, `,"i64_%d":"%d"`, j, int64(rand.Uint64()))
|
||||
}
|
||||
for j := 0; j < *floatFieldsPerLog; j++ {
|
||||
fmt.Fprintf(bw, `,"float_%d":"%v"`, j, math.Round(10_000*rand.Float64())/1000)
|
||||
}
|
||||
|
||||
@@ -688,13 +688,13 @@ func ProcessStatsQueryRangeRequest(ctx context.Context, w http.ResponseWriter, r
|
||||
m := make(map[string]*statsSeries)
|
||||
var mLock sync.Mutex
|
||||
|
||||
timestamp := q.GetTimestamp()
|
||||
writeBlock := func(_ uint, timestamps []int64, columns []logstorage.BlockColumn) {
|
||||
clonedColumnNames := make([]string, len(columns))
|
||||
for i, c := range columns {
|
||||
clonedColumnNames[i] = strings.Clone(c.Name)
|
||||
}
|
||||
for i := range timestamps {
|
||||
timestamp := q.GetTimestamp()
|
||||
labels := make([]logstorage.Field, 0, len(byFields))
|
||||
for j, c := range columns {
|
||||
if c.Name == "_time" {
|
||||
|
||||
@@ -28,7 +28,7 @@ func TestParseExtraFilters_Success(t *testing.T) {
|
||||
// LogsQL filter
|
||||
f(`foobar`, `foobar`)
|
||||
f(`foo:bar`, `foo:bar`)
|
||||
f(`foo:(bar or baz) error _time:5m {"foo"=bar,baz="z"}`, `(foo:bar or foo:baz) error _time:5m {foo="bar",baz="z"}`)
|
||||
f(`foo:(bar or baz) error _time:5m {"foo"=bar,baz="z"}`, `{foo="bar",baz="z"} (foo:bar or foo:baz) error _time:5m`)
|
||||
}
|
||||
|
||||
func TestParseExtraFilters_Failure(t *testing.T) {
|
||||
@@ -77,7 +77,7 @@ func TestParseExtraStreamFilters_Success(t *testing.T) {
|
||||
// LogsQL filter
|
||||
f(`foobar`, `foobar`)
|
||||
f(`foo:bar`, `foo:bar`)
|
||||
f(`foo:(bar or baz) error _time:5m {"foo"=bar,baz="z"}`, `(foo:bar or foo:baz) error _time:5m {foo="bar",baz="z"}`)
|
||||
f(`foo:(bar or baz) error _time:5m {"foo"=bar,baz="z"}`, `{foo="bar",baz="z"} (foo:bar or foo:baz) error _time:5m`)
|
||||
}
|
||||
|
||||
func TestParseExtraStreamFilters_Failure(t *testing.T) {
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
{
|
||||
"files": {
|
||||
"main.css": "./static/css/main.fa83344e.css",
|
||||
"main.js": "./static/js/main.8ad2bc1f.js",
|
||||
"main.css": "./static/css/main.02a1c6cb.css",
|
||||
"main.js": "./static/js/main.55c8060b.js",
|
||||
"static/js/685.f772060c.chunk.js": "./static/js/685.f772060c.chunk.js",
|
||||
"static/media/MetricsQL.md": "./static/media/MetricsQL.a00044c91d9781cf8557.md",
|
||||
"index.html": "./index.html"
|
||||
},
|
||||
"entrypoints": [
|
||||
"static/css/main.fa83344e.css",
|
||||
"static/js/main.8ad2bc1f.js"
|
||||
"static/css/main.02a1c6cb.css",
|
||||
"static/js/main.55c8060b.js"
|
||||
]
|
||||
}
|
||||
@@ -1 +1 @@
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="./favicon.svg"/><link rel="apple-touch-icon" href="./favicon.svg"/><link rel="mask-icon" href="./favicon.svg" color="#000000"><meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=5"/><meta name="theme-color" content="#000000"/><meta name="description" content="Explore your log data with VictoriaLogs UI"/><link rel="manifest" href="./manifest.json"/><title>UI for VictoriaLogs</title><meta name="twitter:card" content="summary"><meta name="twitter:title" content="UI for VictoriaLogs"><meta name="twitter:site" content="@https://victoriametrics.com/products/victorialogs/"><meta name="twitter:description" content="Explore your log data with VictoriaLogs UI"><meta name="twitter:image" content="./preview.jpg"><meta property="og:type" content="website"><meta property="og:title" content="UI for VictoriaLogs"><meta property="og:url" content="https://victoriametrics.com/products/victorialogs/"><meta property="og:description" content="Explore your log data with VictoriaLogs UI"><script defer="defer" src="./static/js/main.8ad2bc1f.js"></script><link href="./static/css/main.fa83344e.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="./favicon.svg"/><link rel="apple-touch-icon" href="./favicon.svg"/><link rel="mask-icon" href="./favicon.svg" color="#000000"><meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=5"/><meta name="theme-color" content="#000000"/><meta name="description" content="Explore your log data with VictoriaLogs UI"/><link rel="manifest" href="./manifest.json"/><title>UI for VictoriaLogs</title><meta name="twitter:card" content="summary"><meta name="twitter:title" content="UI for VictoriaLogs"><meta name="twitter:site" content="@https://victoriametrics.com/products/victorialogs/"><meta name="twitter:description" content="Explore your log data with VictoriaLogs UI"><meta name="twitter:image" content="./preview.jpg"><meta property="og:type" content="website"><meta property="og:title" content="UI for VictoriaLogs"><meta property="og:url" content="https://victoriametrics.com/products/victorialogs/"><meta property="og:description" content="Explore your log data with VictoriaLogs UI"><script defer="defer" src="./static/js/main.55c8060b.js"></script><link href="./static/css/main.02a1c6cb.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
|
||||
1
app/vlselect/vmui/static/css/main.02a1c6cb.css
Normal file
1
app/vlselect/vmui/static/css/main.02a1c6cb.css
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
2
app/vlselect/vmui/static/js/main.55c8060b.js
Normal file
2
app/vlselect/vmui/static/js/main.55c8060b.js
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@@ -160,8 +160,8 @@ func (m *manager) update(ctx context.Context, groupsCfg []config.Group, restore
|
||||
// it is important to call InterruptEval before the update, because cancel fn
|
||||
// can be re-assigned during the update.
|
||||
item.old.InterruptEval()
|
||||
go func(old *rule.Group, new *rule.Group) {
|
||||
old.UpdateWith(new)
|
||||
go func(oldGroup *rule.Group, newGroup *rule.Group) {
|
||||
oldGroup.UpdateWith(newGroup)
|
||||
wg.Done()
|
||||
}(item.old, item.new)
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/utils"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/httputils"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
|
||||
)
|
||||
|
||||
@@ -69,7 +70,17 @@ func (am *AlertManager) Send(ctx context.Context, alerts []Alert, headers map[st
|
||||
|
||||
func (am *AlertManager) send(ctx context.Context, alerts []Alert, headers map[string]string) error {
|
||||
b := &bytes.Buffer{}
|
||||
writeamRequest(b, alerts, am.argFunc, am.relabelConfigs)
|
||||
alertsToSend := alerts[:0]
|
||||
lblss := make([][]prompbmarshal.Label, 0, len(alerts))
|
||||
for _, a := range alerts {
|
||||
lbls := a.applyRelabelingIfNeeded(am.relabelConfigs)
|
||||
if len(lbls) == 0 {
|
||||
continue
|
||||
}
|
||||
alertsToSend = append(alertsToSend, a)
|
||||
lblss = append(lblss, lbls)
|
||||
}
|
||||
writeamRequest(b, alertsToSend, am.argFunc, lblss)
|
||||
|
||||
req, err := http.NewRequest(http.MethodPost, am.addr.String(), b)
|
||||
if err != nil {
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
{% import (
|
||||
"time"
|
||||
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
|
||||
) %}
|
||||
{% stripspace %}
|
||||
|
||||
{% func amRequest(alerts []Alert, generatorURL func(Alert) string, relabelCfg *promrelabel.ParsedConfigs) %}
|
||||
{% func amRequest(alerts []Alert, generatorURL func(Alert) string, lblss [][]prompbmarshal.Label) %}
|
||||
[
|
||||
{% for i, alert := range alerts %}
|
||||
{% code lbls := alert.applyRelabelingIfNeeded(relabelCfg) %}
|
||||
{% if len(lbls) == 0 %} {% continue %} {% endif %}
|
||||
{% code lbls := lblss[i] %}
|
||||
{
|
||||
"startsAt":{%q= alert.Start.Format(time.RFC3339Nano) %},
|
||||
"generatorURL": {%q= generatorURL(alert) %},
|
||||
|
||||
@@ -8,7 +8,7 @@ package notifier
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
|
||||
)
|
||||
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:8
|
||||
@@ -25,122 +25,116 @@ var (
|
||||
)
|
||||
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:8
|
||||
func streamamRequest(qw422016 *qt422016.Writer, alerts []Alert, generatorURL func(Alert) string, relabelCfg *promrelabel.ParsedConfigs) {
|
||||
func streamamRequest(qw422016 *qt422016.Writer, alerts []Alert, generatorURL func(Alert) string, lblss [][]prompbmarshal.Label) {
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:8
|
||||
qw422016.N().S(`[`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:10
|
||||
for i, alert := range alerts {
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:11
|
||||
lbls := alert.applyRelabelingIfNeeded(relabelCfg)
|
||||
lbls := lblss[i]
|
||||
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:12
|
||||
if len(lbls) == 0 {
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:12
|
||||
continue
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:12
|
||||
}
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:12
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:11
|
||||
qw422016.N().S(`{"startsAt":`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:14
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:13
|
||||
qw422016.N().Q(alert.Start.Format(time.RFC3339Nano))
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:14
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:13
|
||||
qw422016.N().S(`,"generatorURL":`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:15
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:14
|
||||
qw422016.N().Q(generatorURL(alert))
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:15
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:14
|
||||
qw422016.N().S(`,`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:16
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:15
|
||||
if !alert.End.IsZero() {
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:16
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:15
|
||||
qw422016.N().S(`"endsAt":`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:17
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:16
|
||||
qw422016.N().Q(alert.End.Format(time.RFC3339Nano))
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:17
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:16
|
||||
qw422016.N().S(`,`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:18
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:17
|
||||
}
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:18
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:17
|
||||
qw422016.N().S(`"labels": {`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:20
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:19
|
||||
ll := len(lbls)
|
||||
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:21
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:20
|
||||
for idx, l := range lbls {
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:22
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:21
|
||||
qw422016.N().Q(l.Name)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:22
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:21
|
||||
qw422016.N().S(`:`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:22
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:21
|
||||
qw422016.N().Q(l.Value)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:22
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:21
|
||||
if idx != ll-1 {
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:22
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:21
|
||||
qw422016.N().S(`,`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:22
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:21
|
||||
}
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:23
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:22
|
||||
}
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:23
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:22
|
||||
qw422016.N().S(`},"annotations": {`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:26
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:25
|
||||
c := len(alert.Annotations)
|
||||
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:27
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:26
|
||||
for k, v := range alert.Annotations {
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:28
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:27
|
||||
c = c - 1
|
||||
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:29
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:28
|
||||
qw422016.N().Q(k)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:29
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:28
|
||||
qw422016.N().S(`:`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:29
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:28
|
||||
qw422016.N().Q(v)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:29
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:28
|
||||
if c > 0 {
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:29
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:28
|
||||
qw422016.N().S(`,`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:29
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:28
|
||||
}
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:30
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:29
|
||||
}
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:30
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:29
|
||||
qw422016.N().S(`}}`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:33
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:32
|
||||
if i != len(alerts)-1 {
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:33
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:32
|
||||
qw422016.N().S(`,`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:33
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:32
|
||||
}
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:34
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:33
|
||||
}
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:34
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:33
|
||||
qw422016.N().S(`]`)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:36
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:35
|
||||
}
|
||||
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:36
|
||||
func writeamRequest(qq422016 qtio422016.Writer, alerts []Alert, generatorURL func(Alert) string, relabelCfg *promrelabel.ParsedConfigs) {
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:36
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:35
|
||||
func writeamRequest(qq422016 qtio422016.Writer, alerts []Alert, generatorURL func(Alert) string, lblss [][]prompbmarshal.Label) {
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:35
|
||||
qw422016 := qt422016.AcquireWriter(qq422016)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:36
|
||||
streamamRequest(qw422016, alerts, generatorURL, relabelCfg)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:36
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:35
|
||||
streamamRequest(qw422016, alerts, generatorURL, lblss)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:35
|
||||
qt422016.ReleaseWriter(qw422016)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:36
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:35
|
||||
}
|
||||
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:36
|
||||
func amRequest(alerts []Alert, generatorURL func(Alert) string, relabelCfg *promrelabel.ParsedConfigs) string {
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:36
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:35
|
||||
func amRequest(alerts []Alert, generatorURL func(Alert) string, lblss [][]prompbmarshal.Label) string {
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:35
|
||||
qb422016 := qt422016.AcquireByteBuffer()
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:36
|
||||
writeamRequest(qb422016, alerts, generatorURL, relabelCfg)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:36
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:35
|
||||
writeamRequest(qb422016, alerts, generatorURL, lblss)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:35
|
||||
qs422016 := string(qb422016.B)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:36
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:35
|
||||
qt422016.ReleaseByteBuffer(qb422016)
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:36
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:35
|
||||
return qs422016
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:36
|
||||
//line app/vmalert/notifier/alertmanager_request.qtpl:35
|
||||
}
|
||||
|
||||
@@ -105,6 +105,16 @@ func TestAlertManager_Send(t *testing.T) {
|
||||
if r.Header.Get(headerKey) != "bar" {
|
||||
t.Fatalf("expected header %q to be set to %q; got %q instead", headerKey, "bar", r.Header.Get(headerKey))
|
||||
}
|
||||
case 4:
|
||||
var a []struct {
|
||||
Labels map[string]string `json:"labels"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&a); err != nil {
|
||||
t.Fatalf("can not unmarshal data into alert %s", err)
|
||||
}
|
||||
if len(a) != 1 {
|
||||
t.Fatalf("expected 1 alert in array got %d", len(a))
|
||||
}
|
||||
}
|
||||
})
|
||||
srv := httptest.NewServer(mux)
|
||||
@@ -168,7 +178,20 @@ func TestAlertManager_Send(t *testing.T) {
|
||||
t.Fatalf("unexpected error %s", err)
|
||||
}
|
||||
|
||||
if c != 3 {
|
||||
t.Fatalf("expected 3 calls(count from zero) to server got %d", c)
|
||||
if err := am.Send(context.Background(), []Alert{
|
||||
{
|
||||
Name: "alert1",
|
||||
Labels: map[string]string{"rule": "test"},
|
||||
},
|
||||
{
|
||||
Name: "alert2",
|
||||
Labels: map[string]string{},
|
||||
},
|
||||
}, map[string]string{}); err != nil {
|
||||
t.Fatalf("unexpected error %s", err)
|
||||
}
|
||||
|
||||
if c != 4 {
|
||||
t.Fatalf("expected 4 calls(count from zero) to server got %d", c)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -614,7 +614,7 @@ func (ar *AlertingRule) alertToTimeSeries(a *notifier.Alert, timestamp int64) []
|
||||
}
|
||||
|
||||
func alertToTimeSeries(a *notifier.Alert, timestamp int64) prompbmarshal.TimeSeries {
|
||||
var labels []prompbmarshal.Label
|
||||
labels := make([]prompbmarshal.Label, 0, len(a.Labels)+2)
|
||||
for k, v := range a.Labels {
|
||||
labels = append(labels, prompbmarshal.Label{
|
||||
Name: k,
|
||||
@@ -634,7 +634,7 @@ func alertToTimeSeries(a *notifier.Alert, timestamp int64) prompbmarshal.TimeSer
|
||||
// alertForToTimeSeries returns a time series that represents
|
||||
// state of active alerts, where value is time when alert become active
|
||||
func alertForToTimeSeries(a *notifier.Alert, timestamp int64) prompbmarshal.TimeSeries {
|
||||
var labels []prompbmarshal.Label
|
||||
labels := make([]prompbmarshal.Label, 0, len(a.Labels)+1)
|
||||
for k, v := range a.Labels {
|
||||
labels = append(labels, prompbmarshal.Label{
|
||||
Name: k,
|
||||
@@ -650,21 +650,24 @@ func alertForToTimeSeries(a *notifier.Alert, timestamp int64) prompbmarshal.Time
|
||||
// for alerts which changed their state from Pending to Inactive or Firing.
|
||||
func pendingAlertStaleTimeSeries(ls map[string]string, timestamp int64, includeAlertForState bool) []prompbmarshal.TimeSeries {
|
||||
var result []prompbmarshal.TimeSeries
|
||||
var baseLabels []prompbmarshal.Label
|
||||
baseLabels := make([]prompbmarshal.Label, 0, len(ls)+1)
|
||||
for k, v := range ls {
|
||||
baseLabels = append(baseLabels, prompbmarshal.Label{
|
||||
Name: k,
|
||||
Value: v,
|
||||
})
|
||||
}
|
||||
|
||||
alertsLabels := make([]prompbmarshal.Label, 0, len(ls)+2)
|
||||
alertsLabels = append(alertsLabels, baseLabels...)
|
||||
// __name__ already been dropped, no need to check duplication
|
||||
alertsLabels := append(baseLabels, prompbmarshal.Label{Name: "__name__", Value: alertMetricName})
|
||||
alertsLabels = append(alertsLabels, prompbmarshal.Label{Name: "__name__", Value: alertMetricName})
|
||||
alertsLabels = append(alertsLabels, prompbmarshal.Label{Name: alertStateLabel, Value: notifier.StatePending.String()})
|
||||
result = append(result, newTimeSeries([]float64{decimal.StaleNaN}, []int64{timestamp}, alertsLabels))
|
||||
|
||||
if includeAlertForState {
|
||||
alertsForStateLabels := append(baseLabels, prompbmarshal.Label{Name: "__name__", Value: alertForStateMetricName})
|
||||
result = append(result, newTimeSeries([]float64{decimal.StaleNaN}, []int64{timestamp}, alertsForStateLabels))
|
||||
baseLabels = append(baseLabels, prompbmarshal.Label{Name: "__name__", Value: alertForStateMetricName})
|
||||
result = append(result, newTimeSeries([]float64{decimal.StaleNaN}, []int64{timestamp}, baseLabels))
|
||||
}
|
||||
return result
|
||||
}
|
||||
@@ -672,22 +675,25 @@ func pendingAlertStaleTimeSeries(ls map[string]string, timestamp int64, includeA
|
||||
// firingAlertStaleTimeSeries returns stale `ALERTS` and `ALERTS_FOR_STATE` time series
|
||||
// for alerts which changed their state from Firing to Inactive.
|
||||
func firingAlertStaleTimeSeries(ls map[string]string, timestamp int64) []prompbmarshal.TimeSeries {
|
||||
var baseLabels []prompbmarshal.Label
|
||||
baseLabels := make([]prompbmarshal.Label, 0, len(ls)+1)
|
||||
for k, v := range ls {
|
||||
baseLabels = append(baseLabels, prompbmarshal.Label{
|
||||
Name: k,
|
||||
Value: v,
|
||||
})
|
||||
}
|
||||
|
||||
alertsLabels := make([]prompbmarshal.Label, 0, len(ls)+2)
|
||||
alertsLabels = append(alertsLabels, baseLabels...)
|
||||
// __name__ already been dropped, no need to check duplication
|
||||
alertsLabels := append(baseLabels, prompbmarshal.Label{Name: "__name__", Value: alertMetricName})
|
||||
alertsLabels = append(alertsLabels, prompbmarshal.Label{Name: "__name__", Value: alertMetricName})
|
||||
alertsLabels = append(alertsLabels, prompbmarshal.Label{Name: alertStateLabel, Value: notifier.StateFiring.String()})
|
||||
|
||||
alertsForStateLabels := append(baseLabels, prompbmarshal.Label{Name: "__name__", Value: alertForStateMetricName})
|
||||
baseLabels = append(baseLabels, prompbmarshal.Label{Name: "__name__", Value: alertForStateMetricName})
|
||||
|
||||
return []prompbmarshal.TimeSeries{
|
||||
newTimeSeries([]float64{decimal.StaleNaN}, []int64{timestamp}, alertsLabels),
|
||||
newTimeSeries([]float64{decimal.StaleNaN}, []int64{timestamp}, alertsForStateLabels),
|
||||
newTimeSeries([]float64{decimal.StaleNaN}, []int64{timestamp}, baseLabels),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -252,10 +252,14 @@ func TestAlertingRule_Exec(t *testing.T) {
|
||||
},
|
||||
map[int][]prompbmarshal.TimeSeries{
|
||||
0: {
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "empty_labels"}, {Name: "alertstate", Value: "firing"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "empty_labels"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Unix()), Timestamp: ts.UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "empty_labels"}, {Name: "alertstate", Value: "firing"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "empty_labels"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Unix()), Timestamp: ts.UnixNano() / 1e6}},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -273,22 +277,34 @@ func TestAlertingRule_Exec(t *testing.T) {
|
||||
4: {{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StateInactive}}},
|
||||
}, map[int][]prompbmarshal.TimeSeries{
|
||||
0: {
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "single-firing=>inactive=>firing=>inactive=>inactive"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "single-firing=>inactive=>firing=>inactive=>inactive"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Unix()), Timestamp: ts.UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "single-firing=>inactive=>firing=>inactive=>inactive"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "single-firing=>inactive=>firing=>inactive=>inactive"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Unix()), Timestamp: ts.UnixNano() / 1e6}},
|
||||
},
|
||||
},
|
||||
1: {
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "single-firing=>inactive=>firing=>inactive=>inactive"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "single-firing=>inactive=>firing=>inactive=>inactive"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "single-firing=>inactive=>firing=>inactive=>inactive"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "single-firing=>inactive=>firing=>inactive=>inactive"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
},
|
||||
2: {
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "single-firing=>inactive=>firing=>inactive=>inactive"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "single-firing=>inactive=>firing=>inactive=>inactive"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Add(2 * defaultStep).Unix()), Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "single-firing=>inactive=>firing=>inactive=>inactive"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "single-firing=>inactive=>firing=>inactive=>inactive"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Add(2 * defaultStep).Unix()), Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -344,34 +360,54 @@ func TestAlertingRule_Exec(t *testing.T) {
|
||||
},
|
||||
}, map[int][]prompbmarshal.TimeSeries{
|
||||
0: {
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Unix()), Timestamp: ts.UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Unix()), Timestamp: ts.UnixNano() / 1e6}},
|
||||
},
|
||||
},
|
||||
1: {
|
||||
// stale time series for foo, `firing -> inactive`
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
// new time series for foo1
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo1"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "name", Value: "foo1"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Add(defaultStep).Unix()), Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo1"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "name", Value: "foo1"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Add(defaultStep).Unix()), Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
},
|
||||
2: {
|
||||
// stale time series for foo1
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo1"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "name", Value: "foo1"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo1"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "name", Value: "foo1"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
// new time series for foo2
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo2"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "name", Value: "foo2"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Add(2 * defaultStep).Unix()), Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo2"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "multiple-steps-firing"}, {Name: "name", Value: "foo2"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Add(2 * defaultStep).Unix()), Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -389,50 +425,72 @@ func TestAlertingRule_Exec(t *testing.T) {
|
||||
1: {{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StateFiring}}},
|
||||
}, map[int][]prompbmarshal.TimeSeries{
|
||||
0: {
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "for-fired"}, {Name: "alertstate", Value: "pending"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "for-fired"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Unix()), Timestamp: ts.UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "for-fired"}, {Name: "alertstate", Value: "pending"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "for-fired"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Unix()), Timestamp: ts.UnixNano() / 1e6}},
|
||||
},
|
||||
},
|
||||
1: {
|
||||
// stale time series for `pending -> firing`
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "for-fired"}, {Name: "alertstate", Value: "pending"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "for-fired"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "for-fired"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Add(defaultStep).Unix()), Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "for-fired"}, {Name: "alertstate", Value: "pending"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "for-fired"}, {Name: "alertstate", Value: "firing"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "for-fired"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Add(defaultStep).Unix()), Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
f(newTestAlertingRule("for-pending=>empty", time.Second), [][]datasource.Metric{
|
||||
{metricWithLabels(t, "name", "foo")},
|
||||
{metricWithLabels(t, "name", "foo")},
|
||||
{metricWithLabels(t, "name", "foo", "a1", "b1", "a2", "b2", "a3", "b3")},
|
||||
{metricWithLabels(t, "name", "foo", "a1", "b1", "a2", "b2", "a3", "b3")},
|
||||
// empty step to delete pending alerts
|
||||
{},
|
||||
}, map[int][]testAlert{
|
||||
0: {{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StatePending}}},
|
||||
1: {{labels: []string{"name", "foo"}, alert: ¬ifier.Alert{State: notifier.StatePending}}},
|
||||
0: {{labels: []string{"name", "foo", "a1", "b1", "a2", "b2", "a3", "b3"}, alert: ¬ifier.Alert{State: notifier.StatePending}}},
|
||||
1: {{labels: []string{"name", "foo", "a1", "b1", "a2", "b2", "a3", "b3"}, alert: ¬ifier.Alert{State: notifier.StatePending}}},
|
||||
2: {},
|
||||
}, map[int][]prompbmarshal.TimeSeries{
|
||||
0: {
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "for-pending=>empty"}, {Name: "alertstate", Value: "pending"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "for-pending=>empty"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Unix()), Timestamp: ts.UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "a1", Value: "b1"}, {Name: "a2", Value: "b2"}, {Name: "a3", Value: "b3"}, {Name: "alertname", Value: "for-pending=>empty"}, {Name: "alertstate", Value: "pending"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "a1", Value: "b1"}, {Name: "a2", Value: "b2"}, {Name: "a3", Value: "b3"}, {Name: "alertname", Value: "for-pending=>empty"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Unix()), Timestamp: ts.UnixNano() / 1e6}},
|
||||
},
|
||||
},
|
||||
1: {
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "for-pending=>empty"}, {Name: "alertstate", Value: "pending"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "for-pending=>empty"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Unix()), Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "a1", Value: "b1"}, {Name: "a2", Value: "b2"}, {Name: "a3", Value: "b3"}, {Name: "alertname", Value: "for-pending=>empty"}, {Name: "alertstate", Value: "pending"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: 1, Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "a1", Value: "b1"}, {Name: "a2", Value: "b2"}, {Name: "a3", Value: "b3"}, {Name: "alertname", Value: "for-pending=>empty"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: float64(ts.Unix()), Timestamp: ts.Add(defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
},
|
||||
// stale time series for `pending -> inactive`
|
||||
2: {
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "alertname", Value: "for-pending=>empty"}, {Name: "alertstate", Value: "pending"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}}},
|
||||
{Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "alertname", Value: "for-pending=>empty"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}}},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertMetricName}, {Name: "a1", Value: "b1"}, {Name: "a2", Value: "b2"}, {Name: "a3", Value: "b3"}, {Name: "alertname", Value: "for-pending=>empty"}, {Name: "alertstate", Value: "pending"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
{
|
||||
Labels: []prompbmarshal.Label{{Name: "__name__", Value: alertForStateMetricName}, {Name: "a1", Value: "b1"}, {Name: "a2", Value: "b2"}, {Name: "a3", Value: "b3"}, {Name: "alertname", Value: "for-pending=>empty"}, {Name: "name", Value: "foo"}},
|
||||
Samples: []prompbmarshal.Sample{{Value: decimal.StaleNaN, Timestamp: ts.Add(2*defaultStep).UnixNano() / 1e6}},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@@ -443,8 +443,8 @@ func (g *Group) Start(ctx context.Context, nts func() []notifier.Notifier, rw re
|
||||
}
|
||||
|
||||
// UpdateWith inserts new group to updateCh
|
||||
func (g *Group) UpdateWith(new *Group) {
|
||||
g.updateCh <- new
|
||||
func (g *Group) UpdateWith(newGroup *Group) {
|
||||
g.updateCh <- newGroup
|
||||
}
|
||||
|
||||
// DeepCopy returns a deep copy of group
|
||||
|
||||
@@ -7,11 +7,13 @@ import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"math"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
@@ -348,6 +350,7 @@ func (up *URLPrefix) discoverBackendAddrsIfNeeded() {
|
||||
hostToAddrs := make(map[string][]string)
|
||||
for _, bu := range up.busOriginal {
|
||||
host := bu.Hostname()
|
||||
port := bu.Port()
|
||||
if hostToAddrs[host] != nil {
|
||||
// ips for the given host have been already discovered
|
||||
continue
|
||||
@@ -364,7 +367,11 @@ func (up *URLPrefix) discoverBackendAddrsIfNeeded() {
|
||||
} else {
|
||||
resolvedAddrs = make([]string, len(addrs))
|
||||
for i, addr := range addrs {
|
||||
resolvedAddrs[i] = fmt.Sprintf("%s:%d", addr.Target, addr.Port)
|
||||
hostPort := port
|
||||
if hostPort == "" && addr.Port > 0 {
|
||||
hostPort = strconv.FormatUint(uint64(addr.Port), 10)
|
||||
}
|
||||
resolvedAddrs[i] = net.JoinHostPort(addr.Target, hostPort)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -375,7 +382,7 @@ func (up *URLPrefix) discoverBackendAddrsIfNeeded() {
|
||||
} else {
|
||||
resolvedAddrs = make([]string, len(addrs))
|
||||
for i, addr := range addrs {
|
||||
resolvedAddrs[i] = addr.String()
|
||||
resolvedAddrs[i] = net.JoinHostPort(addr.String(), port)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -389,17 +396,9 @@ func (up *URLPrefix) discoverBackendAddrsIfNeeded() {
|
||||
var busNew []*backendURL
|
||||
for _, bu := range up.busOriginal {
|
||||
host := bu.Hostname()
|
||||
port := bu.Port()
|
||||
for _, addr := range hostToAddrs[host] {
|
||||
buCopy := *bu
|
||||
buCopy.Host = addr
|
||||
if port != "" {
|
||||
if n := strings.IndexByte(buCopy.Host, ':'); n >= 0 {
|
||||
// Drop the discovered port and substitute it the port specified in bu.
|
||||
buCopy.Host = buCopy.Host[:n]
|
||||
}
|
||||
buCopy.Host += ":" + port
|
||||
}
|
||||
busNew = append(busNew, &backendURL{
|
||||
url: &buCopy,
|
||||
})
|
||||
|
||||
@@ -3,12 +3,14 @@ package main
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/netutil"
|
||||
)
|
||||
|
||||
func TestParseAuthConfigFailure(t *testing.T) {
|
||||
@@ -799,6 +801,75 @@ func TestBrokenBackend(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestDiscoverBackendIPsWithIPV6(t *testing.T) {
|
||||
f := func(actualUrl, expectedUrl string) {
|
||||
t.Helper()
|
||||
up := mustParseURL(actualUrl)
|
||||
up.discoverBackendIPs = true
|
||||
up.loadBalancingPolicy = "least_loaded"
|
||||
|
||||
up.discoverBackendAddrsIfNeeded()
|
||||
pbus := up.bus.Load()
|
||||
bus := *pbus
|
||||
|
||||
if len(bus) != 1 {
|
||||
t.Fatalf("expected url list to be of size 1; got %d instead", len(bus))
|
||||
}
|
||||
|
||||
got := bus[0].url.Host
|
||||
if got != expectedUrl {
|
||||
t.Fatalf(`expected url to be %q; got %q instead`, expectedUrl, bus[0].url.Host)
|
||||
}
|
||||
}
|
||||
|
||||
// Discover backendURL with SRV hostnames
|
||||
customResolver := &fakeResolver{
|
||||
Resolver: &net.Resolver{},
|
||||
// SRV records must return hostname
|
||||
// not an IP address
|
||||
lookupSRVResults: map[string][]*net.SRV{
|
||||
"_vmselect._tcp.selectwithport.": {
|
||||
{
|
||||
Target: "vmselect.local",
|
||||
Port: 8481,
|
||||
},
|
||||
},
|
||||
"_vmselect._tcp.selectwoport.": {
|
||||
{
|
||||
Target: "vmselect.local",
|
||||
},
|
||||
},
|
||||
},
|
||||
lookupIPAddrResults: map[string][]net.IPAddr{
|
||||
"vminsert.local": {
|
||||
{
|
||||
IP: net.ParseIP("10.0.10.13"),
|
||||
},
|
||||
},
|
||||
"ipv6.vminsert.local": {
|
||||
{
|
||||
IP: net.ParseIP("2607:f8b0:400a:80b::200e"),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
origResolver := netutil.Resolver
|
||||
netutil.Resolver = customResolver
|
||||
defer func() {
|
||||
netutil.Resolver = origResolver
|
||||
}()
|
||||
f("http://srv+_vmselect._tcp.selectwithport.:8080", "vmselect.local:8080")
|
||||
f("http://srv+_vmselect._tcp.selectwithport.:", "vmselect.local:8481")
|
||||
f("http://srv+_vmselect._tcp.selectwoport.:8080", "vmselect.local:8080")
|
||||
f("http://srv+_vmselect._tcp.selectwoport.", "vmselect.local:")
|
||||
|
||||
f("http://vminsert.local:8080", "10.0.10.13:8080")
|
||||
f("http://vminsert.local", "10.0.10.13:")
|
||||
f("http://ipv6.vminsert.local:8080", "[2607:f8b0:400a:80b::200e]:8080")
|
||||
f("http://ipv6.vminsert.local", "[2607:f8b0:400a:80b::200e]:")
|
||||
|
||||
}
|
||||
|
||||
func getRegexs(paths []string) []*Regex {
|
||||
var sps []*Regex
|
||||
for _, path := range paths {
|
||||
|
||||
@@ -222,8 +222,7 @@ func processRequest(w http.ResponseWriter, r *http.Request, ui *UserInfo) {
|
||||
isDefault = true
|
||||
}
|
||||
|
||||
rtb := getReadTrackingBody(r.Body, maxRequestBodySizeToRetry.IntN())
|
||||
defer putReadTrackingBody(rtb)
|
||||
rtb := newReadTrackingBody(r.Body, maxRequestBodySizeToRetry.IntN())
|
||||
r.Body = rtb
|
||||
|
||||
maxAttempts := up.getBackendsCount()
|
||||
@@ -559,22 +558,11 @@ type readTrackingBody struct {
|
||||
bufComplete bool
|
||||
}
|
||||
|
||||
func (rtb *readTrackingBody) reset() {
|
||||
rtb.maxBodySize = 0
|
||||
rtb.r = nil
|
||||
rtb.buf = rtb.buf[:0]
|
||||
rtb.readBuf = nil
|
||||
rtb.cannotRetry = false
|
||||
rtb.bufComplete = false
|
||||
}
|
||||
|
||||
func getReadTrackingBody(r io.ReadCloser, maxBodySize int) *readTrackingBody {
|
||||
v := readTrackingBodyPool.Get()
|
||||
if v == nil {
|
||||
v = &readTrackingBody{}
|
||||
}
|
||||
rtb := v.(*readTrackingBody)
|
||||
|
||||
func newReadTrackingBody(r io.ReadCloser, maxBodySize int) *readTrackingBody {
|
||||
// do not use sync.Pool there
|
||||
// since http.RoundTrip may still use request body after return
|
||||
// See this issue for details https://github.com/VictoriaMetrics/VictoriaMetrics/issues/8051
|
||||
rtb := &readTrackingBody{}
|
||||
if maxBodySize < 0 {
|
||||
maxBodySize = 0
|
||||
}
|
||||
@@ -597,13 +585,6 @@ func (r *zeroReader) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func putReadTrackingBody(rtb *readTrackingBody) {
|
||||
rtb.reset()
|
||||
readTrackingBodyPool.Put(rtb)
|
||||
}
|
||||
|
||||
var readTrackingBodyPool sync.Pool
|
||||
|
||||
// Read implements io.Reader interface.
|
||||
func (rtb *readTrackingBody) Read(p []byte) (int, error) {
|
||||
if len(rtb.readBuf) > 0 {
|
||||
|
||||
@@ -195,7 +195,7 @@ unauthorized_user:
|
||||
}
|
||||
responseExpected = `
|
||||
statusCode=401
|
||||
The provided authKey doesn't match -reloadAuthKey`
|
||||
Expected to receive non-empty authKey when -reloadAuthKey is set`
|
||||
f(cfgStr, requestURL, backendHandler, responseExpected)
|
||||
if err := reloadAuthKey.Set(origAuthKey); err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
@@ -545,8 +545,7 @@ func TestReadTrackingBody_RetrySuccess(t *testing.T) {
|
||||
f := func(s string, maxBodySize int) {
|
||||
t.Helper()
|
||||
|
||||
rtb := getReadTrackingBody(io.NopCloser(bytes.NewBufferString(s)), maxBodySize)
|
||||
defer putReadTrackingBody(rtb)
|
||||
rtb := newReadTrackingBody(io.NopCloser(bytes.NewBufferString(s)), maxBodySize)
|
||||
|
||||
if !rtb.canRetry() {
|
||||
t.Fatalf("canRetry() must return true before reading anything")
|
||||
@@ -581,8 +580,7 @@ func TestReadTrackingBody_RetrySuccessPartialRead(t *testing.T) {
|
||||
t.Helper()
|
||||
|
||||
// Check the case with partial read
|
||||
rtb := getReadTrackingBody(io.NopCloser(bytes.NewBufferString(s)), maxBodySize)
|
||||
defer putReadTrackingBody(rtb)
|
||||
rtb := newReadTrackingBody(io.NopCloser(bytes.NewBufferString(s)), maxBodySize)
|
||||
|
||||
for i := 0; i < len(s); i++ {
|
||||
buf := make([]byte, i)
|
||||
@@ -631,8 +629,7 @@ func TestReadTrackingBody_RetryFailureTooBigBody(t *testing.T) {
|
||||
f := func(s string, maxBodySize int) {
|
||||
t.Helper()
|
||||
|
||||
rtb := getReadTrackingBody(io.NopCloser(bytes.NewBufferString(s)), maxBodySize)
|
||||
defer putReadTrackingBody(rtb)
|
||||
rtb := newReadTrackingBody(io.NopCloser(bytes.NewBufferString(s)), maxBodySize)
|
||||
|
||||
if !rtb.canRetry() {
|
||||
t.Fatalf("canRetry() must return true before reading anything")
|
||||
@@ -681,8 +678,7 @@ func TestReadTrackingBody_RetryFailureZeroOrNegativeMaxBodySize(t *testing.T) {
|
||||
f := func(s string, maxBodySize int) {
|
||||
t.Helper()
|
||||
|
||||
rtb := getReadTrackingBody(io.NopCloser(bytes.NewBufferString(s)), maxBodySize)
|
||||
defer putReadTrackingBody(rtb)
|
||||
rtb := newReadTrackingBody(io.NopCloser(bytes.NewBufferString(s)), maxBodySize)
|
||||
|
||||
if !rtb.canRetry() {
|
||||
t.Fatalf("canRetry() must return true before reading anything")
|
||||
|
||||
@@ -180,11 +180,7 @@ func (c *Client) Explore() ([]*Series, error) {
|
||||
log.Printf("skip measurement %q since it has no fields", s.Measurement)
|
||||
continue
|
||||
}
|
||||
tags, ok := measurementTags[s.Measurement]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("failed to find tags of measurement %s", s.Measurement)
|
||||
}
|
||||
emptyTags := getEmptyTags(tags, s.LabelPairs)
|
||||
emptyTags := getEmptyTags(measurementTags[s.Measurement], s.LabelPairs)
|
||||
for _, field := range fields {
|
||||
is := &Series{
|
||||
Measurement: s.Measurement,
|
||||
@@ -201,11 +197,16 @@ func (c *Client) Explore() ([]*Series, error) {
|
||||
// getEmptyTags returns tags of a measurement that are missing in a specific series.
|
||||
// Tags represent all tags of a measurement. LabelPairs represent tags of a specific series.
|
||||
func getEmptyTags(tags map[string]struct{}, LabelPairs []LabelPair) []string {
|
||||
if len(tags) == 0 {
|
||||
// fast path: the measurement does not contain any tag
|
||||
return nil
|
||||
}
|
||||
|
||||
labelMap := make(map[string]struct{})
|
||||
for _, pair := range LabelPairs {
|
||||
labelMap[pair.Name] = struct{}{}
|
||||
}
|
||||
result := make([]string, 0, len(labelMap)-len(LabelPairs))
|
||||
var result []string
|
||||
for tag := range tags {
|
||||
if _, ok := labelMap[tag]; !ok {
|
||||
result = append(result, tag)
|
||||
|
||||
@@ -40,15 +40,15 @@ type filter struct {
|
||||
labelValue string
|
||||
}
|
||||
|
||||
func (f filter) inRange(min, max int64) bool {
|
||||
func (f filter) inRange(minV, maxV int64) bool {
|
||||
fmin, fmax := f.min, f.max
|
||||
if min == 0 {
|
||||
fmin = min
|
||||
if minV == 0 {
|
||||
fmin = minV
|
||||
}
|
||||
if fmax == 0 {
|
||||
fmax = max
|
||||
fmax = maxV
|
||||
}
|
||||
return min <= fmax && fmin <= max
|
||||
return minV <= fmax && fmin <= maxV
|
||||
}
|
||||
|
||||
// NewClient creates and validates new Client
|
||||
@@ -59,13 +59,13 @@ func NewClient(cfg Config) (*Client, error) {
|
||||
return nil, fmt.Errorf("failed to open snapshot %q: %s", cfg.Snapshot, err)
|
||||
}
|
||||
c := &Client{DBReadOnly: db}
|
||||
min, max, err := parseTime(cfg.Filter.TimeMin, cfg.Filter.TimeMax)
|
||||
minTime, maxTime, err := parseTime(cfg.Filter.TimeMin, cfg.Filter.TimeMax)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse time in filter: %s", err)
|
||||
}
|
||||
c.filter = filter{
|
||||
min: min,
|
||||
max: max,
|
||||
min: minTime,
|
||||
max: maxTime,
|
||||
label: cfg.Filter.Label,
|
||||
labelValue: cfg.Filter.LabelValue,
|
||||
}
|
||||
|
||||
@@ -105,7 +105,7 @@ func (ctx *InsertCtx) TryPrepareLabels(hasRelabeling bool) bool {
|
||||
if timeserieslimits.Enabled() && timeserieslimits.IsExceeding(ctx.Labels) {
|
||||
return false
|
||||
}
|
||||
ctx.sortLabelsIfNeeded()
|
||||
ctx.SortLabelsIfNeeded()
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -12,8 +12,8 @@ var sortLabels = flag.Bool("sortLabels", false, `Whether to sort labels for inco
|
||||
`For example, if m{k1="v1",k2="v2"} may be sent as m{k2="v2",k1="v1"}. `+
|
||||
`Enabled sorting for labels can slow down ingestion performance a bit`)
|
||||
|
||||
// sortLabelsIfNeeded sorts labels if -sortLabels command-line flag is set
|
||||
func (ctx *InsertCtx) sortLabelsIfNeeded() {
|
||||
// SortLabelsIfNeeded sorts labels if -sortLabels command-line flag is set
|
||||
func (ctx *InsertCtx) SortLabelsIfNeeded() {
|
||||
if *sortLabels {
|
||||
sort.Sort(&ctx.Labels)
|
||||
}
|
||||
|
||||
@@ -118,8 +118,14 @@ func insertRows(db string, rows []parser.Row, extraLabels []prompbmarshal.Label)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if !ic.TryPrepareLabels(false) {
|
||||
continue
|
||||
// special case for optimisations below
|
||||
// do not call TryPrepareLabels
|
||||
// manually apply sort and limits on demand
|
||||
ic.SortLabelsIfNeeded()
|
||||
if hasLimitsEnabled {
|
||||
if timeserieslimits.IsExceeding(ic.Labels) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
ctx.metricNameBuf = storage.MarshalMetricNameRaw(ctx.metricNameBuf[:0], ic.Labels)
|
||||
labelsLen := len(ic.Labels)
|
||||
@@ -132,8 +138,6 @@ func insertRows(db string, rows []parser.Row, extraLabels []prompbmarshal.Label)
|
||||
ic.Labels = ic.Labels[:labelsLen]
|
||||
ic.AddLabel("", metricGroup)
|
||||
if hasLimitsEnabled {
|
||||
// special case for optimisation above
|
||||
// check only __name__ label value limits
|
||||
if timeserieslimits.IsExceeding(ic.Labels[len(ic.Labels)-1:]) {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -98,13 +98,13 @@ func aggrMin(values []float64) float64 {
|
||||
if pos < 0 {
|
||||
return nan
|
||||
}
|
||||
min := values[pos]
|
||||
minV := values[pos]
|
||||
for _, v := range values[pos+1:] {
|
||||
if !math.IsNaN(v) && v < min {
|
||||
min = v
|
||||
if !math.IsNaN(v) && v < minV {
|
||||
minV = v
|
||||
}
|
||||
}
|
||||
return min
|
||||
return minV
|
||||
}
|
||||
|
||||
func aggrMax(values []float64) float64 {
|
||||
@@ -112,13 +112,13 @@ func aggrMax(values []float64) float64 {
|
||||
if pos < 0 {
|
||||
return nan
|
||||
}
|
||||
max := values[pos]
|
||||
maxV := values[pos]
|
||||
for _, v := range values[pos+1:] {
|
||||
if !math.IsNaN(v) && v > max {
|
||||
max = v
|
||||
if !math.IsNaN(v) && v > maxV {
|
||||
maxV = v
|
||||
}
|
||||
}
|
||||
return max
|
||||
return maxV
|
||||
}
|
||||
|
||||
func aggrDiff(values []float64) float64 {
|
||||
@@ -177,12 +177,12 @@ func aggrCount(values []float64) float64 {
|
||||
}
|
||||
|
||||
func aggrRange(values []float64) float64 {
|
||||
min := aggrMin(values)
|
||||
if math.IsNaN(min) {
|
||||
minV := aggrMin(values)
|
||||
if math.IsNaN(minV) {
|
||||
return nan
|
||||
}
|
||||
max := aggrMax(values)
|
||||
return max - min
|
||||
maxV := aggrMax(values)
|
||||
return maxV - minV
|
||||
}
|
||||
|
||||
func aggrMultiply(values []float64) float64 {
|
||||
|
||||
@@ -2594,17 +2594,17 @@ func transformMinMax(ec *evalConfig, fe *graphiteql.FuncExpr) (nextSeriesFunc, e
|
||||
}
|
||||
f := nextSeriesConcurrentWrapper(nextSeries, func(s *series) (*series, error) {
|
||||
values := s.Values
|
||||
min := aggrMin(values)
|
||||
if math.IsNaN(min) {
|
||||
min = 0
|
||||
minV := aggrMin(values)
|
||||
if math.IsNaN(minV) {
|
||||
minV = 0
|
||||
}
|
||||
max := aggrMax(values)
|
||||
if math.IsNaN(max) {
|
||||
max = 0
|
||||
maxV := aggrMax(values)
|
||||
if math.IsNaN(maxV) {
|
||||
maxV = 0
|
||||
}
|
||||
vRange := max - min
|
||||
vRange := maxV - minV
|
||||
for i, v := range values {
|
||||
v = (v - min) / vRange
|
||||
v = (v - minV) / vRange
|
||||
if math.IsInf(v, 0) {
|
||||
v = 0
|
||||
}
|
||||
@@ -2975,9 +2975,9 @@ func transformRemoveAbovePercentile(ec *evalConfig, fe *graphiteql.FuncExpr) (ne
|
||||
}
|
||||
f := nextSeriesConcurrentWrapper(nextSeries, func(s *series) (*series, error) {
|
||||
values := s.Values
|
||||
max := aggrFunc(values)
|
||||
maxV := aggrFunc(values)
|
||||
for i, v := range values {
|
||||
if v > max {
|
||||
if v > maxV {
|
||||
values[i] = nan
|
||||
}
|
||||
}
|
||||
@@ -3035,9 +3035,9 @@ func transformRemoveBelowPercentile(ec *evalConfig, fe *graphiteql.FuncExpr) (ne
|
||||
}
|
||||
f := nextSeriesConcurrentWrapper(nextSeries, func(s *series) (*series, error) {
|
||||
values := s.Values
|
||||
min := aggrFunc(values)
|
||||
minV := aggrFunc(values)
|
||||
for i, v := range values {
|
||||
if v < min {
|
||||
if v < minV {
|
||||
values[i] = nan
|
||||
}
|
||||
}
|
||||
@@ -4514,11 +4514,11 @@ func transformOffsetToZero(ec *evalConfig, fe *graphiteql.FuncExpr) (nextSeriesF
|
||||
}
|
||||
f := nextSeriesConcurrentWrapper(nextSeries, func(s *series) (*series, error) {
|
||||
values := s.Values
|
||||
min := aggrMin(values)
|
||||
minV := aggrMin(values)
|
||||
for i, v := range values {
|
||||
values[i] = v - min
|
||||
values[i] = v - minV
|
||||
}
|
||||
s.Tags["offsetToZero"] = fmt.Sprintf("%g", min)
|
||||
s.Tags["offsetToZero"] = fmt.Sprintf("%g", minV)
|
||||
s.Name = fmt.Sprintf("offsetToZero(%s)", s.Name)
|
||||
s.expr = fe
|
||||
s.pathExpression = s.Name
|
||||
@@ -4567,29 +4567,29 @@ func transformPerSecond(ec *evalConfig, fe *graphiteql.FuncExpr) (nextSeriesFunc
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func nonNegativeDelta(curr, prev, max, min float64) (float64, float64) {
|
||||
if !math.IsNaN(max) && curr > max {
|
||||
func nonNegativeDelta(currV, prevV, maxV, minV float64) (float64, float64) {
|
||||
if !math.IsNaN(maxV) && currV > maxV {
|
||||
return nan, nan
|
||||
}
|
||||
if !math.IsNaN(min) && curr < min {
|
||||
if !math.IsNaN(minV) && currV < minV {
|
||||
return nan, nan
|
||||
}
|
||||
if math.IsNaN(curr) || math.IsNaN(prev) {
|
||||
return nan, curr
|
||||
if math.IsNaN(currV) || math.IsNaN(prevV) {
|
||||
return nan, currV
|
||||
}
|
||||
if curr >= prev {
|
||||
return curr - prev, curr
|
||||
if currV >= prevV {
|
||||
return currV - prevV, currV
|
||||
}
|
||||
if !math.IsNaN(max) {
|
||||
if math.IsNaN(min) {
|
||||
min = float64(0)
|
||||
if !math.IsNaN(maxV) {
|
||||
if math.IsNaN(minV) {
|
||||
minV = float64(0)
|
||||
}
|
||||
return max + 1 + curr - prev - min, curr
|
||||
return maxV + 1 + currV - prevV - minV, currV
|
||||
}
|
||||
if !math.IsNaN(min) {
|
||||
return curr - min, curr
|
||||
if !math.IsNaN(minV) {
|
||||
return currV - minV, currV
|
||||
}
|
||||
return nan, curr
|
||||
return nan, currV
|
||||
}
|
||||
|
||||
// See https://graphite.readthedocs.io/en/stable/functions.html#graphite.render.functions.threshold
|
||||
@@ -4941,8 +4941,8 @@ func transformSortByMinima(ec *evalConfig, fe *graphiteql.FuncExpr) (nextSeriesF
|
||||
}
|
||||
// Filter out series with all the values smaller than 0
|
||||
f := nextSeriesConcurrentWrapper(nextSeries, func(s *series) (*series, error) {
|
||||
max := aggrMax(s.Values)
|
||||
if math.IsNaN(max) || max <= 0 {
|
||||
maxV := aggrMax(s.Values)
|
||||
if math.IsNaN(maxV) || maxV <= 0 {
|
||||
return nil, nil
|
||||
}
|
||||
return s, nil
|
||||
|
||||
@@ -29,13 +29,13 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
deleteAuthKey = flagutil.NewPassword("deleteAuthKey", "authKey for metrics' deletion via /api/v1/admin/tsdb/delete_series and /tags/delSeries. It overrides -httpAuth.*")
|
||||
deleteAuthKey = flagutil.NewPassword("deleteAuthKey", "authKey for metrics' deletion via /api/v1/admin/tsdb/delete_series and /tags/delSeries. It could be passed via authKey query arg. It overrides -httpAuth.*")
|
||||
maxConcurrentRequests = flag.Int("search.maxConcurrentRequests", getDefaultMaxConcurrentRequests(), "The maximum number of concurrent search requests. "+
|
||||
"It shouldn't be high, since a single request can saturate all the CPU cores, while many concurrently executed requests may require high amounts of memory. "+
|
||||
"See also -search.maxQueueDuration and -search.maxMemoryPerQuery")
|
||||
maxQueueDuration = flag.Duration("search.maxQueueDuration", 10*time.Second, "The maximum time the request waits for execution when -search.maxConcurrentRequests "+
|
||||
"limit is reached; see also -search.maxQueryDuration")
|
||||
resetCacheAuthKey = flagutil.NewPassword("search.resetCacheAuthKey", "Optional authKey for resetting rollup cache via /internal/resetRollupResultCache call. It overrides -httpAuth.*")
|
||||
resetCacheAuthKey = flagutil.NewPassword("search.resetCacheAuthKey", "Optional authKey for resetting rollup cache via /internal/resetRollupResultCache call. It could be passed via authKey query arg. It overrides -httpAuth.*")
|
||||
logSlowQueryDuration = flag.Duration("search.logSlowQueryDuration", 5*time.Second, "Log queries with execution time exceeding this value. Zero disables slow query logging. "+
|
||||
"See also -search.logQueryMemoryUsage")
|
||||
vmalertProxyURL = flag.String("vmalert.proxyURL", "", "Optional URL for proxying requests to vmalert. For example, if -vmalert.proxyURL=http://vmalert:8880 , then alerting API requests such as /api/v1/rules from Grafana will be proxied to http://vmalert:8880/api/v1/rules")
|
||||
|
||||
@@ -481,6 +481,8 @@ func DeleteHandler(startTime time.Time, r *http.Request) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cp.deadline = searchutils.GetDeadlineForDelete(r, startTime)
|
||||
|
||||
if !cp.IsDefaultTimeRange() {
|
||||
return fmt.Errorf("start=%d and end=%d args aren't supported. Remove these args from the query in order to delete all the matching metrics", cp.start, cp.end)
|
||||
}
|
||||
|
||||
@@ -295,13 +295,13 @@ func aggrFuncMin(tss []*timeseries) []*timeseries {
|
||||
}
|
||||
dst := tss[0]
|
||||
for i := range dst.Values {
|
||||
min := dst.Values[i]
|
||||
minV := dst.Values[i]
|
||||
for _, ts := range tss {
|
||||
if math.IsNaN(min) || ts.Values[i] < min {
|
||||
min = ts.Values[i]
|
||||
if math.IsNaN(minV) || ts.Values[i] < minV {
|
||||
minV = ts.Values[i]
|
||||
}
|
||||
}
|
||||
dst.Values[i] = min
|
||||
dst.Values[i] = minV
|
||||
}
|
||||
return tss[:1]
|
||||
}
|
||||
@@ -313,13 +313,13 @@ func aggrFuncMax(tss []*timeseries) []*timeseries {
|
||||
}
|
||||
dst := tss[0]
|
||||
for i := range dst.Values {
|
||||
max := dst.Values[i]
|
||||
maxV := dst.Values[i]
|
||||
for _, ts := range tss {
|
||||
if math.IsNaN(max) || ts.Values[i] > max {
|
||||
max = ts.Values[i]
|
||||
if math.IsNaN(maxV) || ts.Values[i] > maxV {
|
||||
maxV = ts.Values[i]
|
||||
}
|
||||
}
|
||||
dst.Values[i] = max
|
||||
dst.Values[i] = maxV
|
||||
}
|
||||
return tss[:1]
|
||||
}
|
||||
@@ -793,7 +793,7 @@ func fillNaNsAtIdx(idx int, k float64, tss []*timeseries) {
|
||||
}
|
||||
}
|
||||
|
||||
func getIntK(k float64, max int) int {
|
||||
func getIntK(k float64, maxV int) int {
|
||||
if math.IsNaN(k) {
|
||||
return 0
|
||||
}
|
||||
@@ -801,38 +801,38 @@ func getIntK(k float64, max int) int {
|
||||
if kn < 0 {
|
||||
return 0
|
||||
}
|
||||
if kn > max {
|
||||
return max
|
||||
if kn > maxV {
|
||||
return maxV
|
||||
}
|
||||
return kn
|
||||
}
|
||||
|
||||
func minValue(values []float64) float64 {
|
||||
min := nan
|
||||
for len(values) > 0 && math.IsNaN(min) {
|
||||
min = values[0]
|
||||
minV := nan
|
||||
for len(values) > 0 && math.IsNaN(minV) {
|
||||
minV = values[0]
|
||||
values = values[1:]
|
||||
}
|
||||
for _, v := range values {
|
||||
if !math.IsNaN(v) && v < min {
|
||||
min = v
|
||||
if !math.IsNaN(v) && v < minV {
|
||||
minV = v
|
||||
}
|
||||
}
|
||||
return min
|
||||
return minV
|
||||
}
|
||||
|
||||
func maxValue(values []float64) float64 {
|
||||
max := nan
|
||||
for len(values) > 0 && math.IsNaN(max) {
|
||||
max = values[0]
|
||||
maxV := nan
|
||||
for len(values) > 0 && math.IsNaN(maxV) {
|
||||
maxV = values[0]
|
||||
values = values[1:]
|
||||
}
|
||||
for _, v := range values {
|
||||
if !math.IsNaN(v) && v > max {
|
||||
max = v
|
||||
if !math.IsNaN(v) && v > maxV {
|
||||
maxV = v
|
||||
}
|
||||
}
|
||||
return max
|
||||
return maxV
|
||||
}
|
||||
|
||||
func avgValue(values []float64) float64 {
|
||||
|
||||
@@ -46,6 +46,8 @@ var (
|
||||
"so there is no need in spending additional CPU time on its handling. Staleness markers may exist only in data obtained from Prometheus scrape targets")
|
||||
minWindowForInstantRollupOptimization = flag.Duration("search.minWindowForInstantRollupOptimization", time.Hour*3, "Enable cache-based optimization for repeated queries "+
|
||||
"to /api/v1/query (aka instant queries), which contain rollup functions with lookbehind window exceeding the given value")
|
||||
maxBinaryOpPushdownLabelValues = flag.Int("search.maxBinaryOpPushdownLabelValues", 100, "The maximum number of values for a label in the first expression that can be extracted as a common label filter and pushed down to the second expression in a binary operation. "+
|
||||
"A larger value makes the pushed-down filter more complex but fewer time series will be returned. This flag is useful when selective label contains numerous values, for example `instance`, and storage resources are abundant.")
|
||||
)
|
||||
|
||||
// The minimum number of points per timeseries for enabling time rounding.
|
||||
@@ -582,7 +584,7 @@ func getCommonLabelFilters(tss []*timeseries) []metricsql.LabelFilter {
|
||||
}
|
||||
continue
|
||||
}
|
||||
if len(vc.values) > 100 {
|
||||
if len(vc.values) > *maxBinaryOpPushdownLabelValues {
|
||||
// Too many unique values found for the given tag.
|
||||
// Do not make a filter on such values, since it may slow down
|
||||
// search for matching time series.
|
||||
|
||||
@@ -6,8 +6,6 @@ import (
|
||||
"math"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmselect/netstorage"
|
||||
@@ -16,7 +14,6 @@ import (
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/querytracer"
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/storage"
|
||||
"github.com/VictoriaMetrics/metrics"
|
||||
"github.com/VictoriaMetrics/metricsql"
|
||||
)
|
||||
|
||||
@@ -270,7 +267,7 @@ func getReverseCmpOp(op string) string {
|
||||
}
|
||||
|
||||
func parsePromQLWithCache(q string) (metricsql.Expr, error) {
|
||||
pcv := parseCacheV.Get(q)
|
||||
pcv := parseCacheV.get(q)
|
||||
if pcv == nil {
|
||||
e, err := metricsql.Parse(q)
|
||||
if err == nil {
|
||||
@@ -284,7 +281,7 @@ func parsePromQLWithCache(q string) (metricsql.Expr, error) {
|
||||
e: e,
|
||||
err: err,
|
||||
}
|
||||
parseCacheV.Put(q, pcv)
|
||||
parseCacheV.put(q, pcv)
|
||||
}
|
||||
if pcv.err != nil {
|
||||
return nil, pcv.err
|
||||
@@ -328,80 +325,3 @@ func escapeDots(s string) string {
|
||||
}
|
||||
return string(result)
|
||||
}
|
||||
|
||||
var parseCacheV = func() *parseCache {
|
||||
pc := &parseCache{
|
||||
m: make(map[string]*parseCacheValue),
|
||||
}
|
||||
metrics.NewGauge(`vm_cache_requests_total{type="promql/parse"}`, func() float64 {
|
||||
return float64(pc.Requests())
|
||||
})
|
||||
metrics.NewGauge(`vm_cache_misses_total{type="promql/parse"}`, func() float64 {
|
||||
return float64(pc.Misses())
|
||||
})
|
||||
metrics.NewGauge(`vm_cache_entries{type="promql/parse"}`, func() float64 {
|
||||
return float64(pc.Len())
|
||||
})
|
||||
return pc
|
||||
}()
|
||||
|
||||
const parseCacheMaxLen = 10e3
|
||||
|
||||
type parseCacheValue struct {
|
||||
e metricsql.Expr
|
||||
err error
|
||||
}
|
||||
|
||||
type parseCache struct {
|
||||
requests atomic.Uint64
|
||||
misses atomic.Uint64
|
||||
|
||||
m map[string]*parseCacheValue
|
||||
mu sync.RWMutex
|
||||
}
|
||||
|
||||
func (pc *parseCache) Requests() uint64 {
|
||||
return pc.requests.Load()
|
||||
}
|
||||
|
||||
func (pc *parseCache) Misses() uint64 {
|
||||
return pc.misses.Load()
|
||||
}
|
||||
|
||||
func (pc *parseCache) Len() uint64 {
|
||||
pc.mu.RLock()
|
||||
n := len(pc.m)
|
||||
pc.mu.RUnlock()
|
||||
return uint64(n)
|
||||
}
|
||||
|
||||
func (pc *parseCache) Get(q string) *parseCacheValue {
|
||||
pc.requests.Add(1)
|
||||
|
||||
pc.mu.RLock()
|
||||
pcv := pc.m[q]
|
||||
pc.mu.RUnlock()
|
||||
|
||||
if pcv == nil {
|
||||
pc.misses.Add(1)
|
||||
}
|
||||
return pcv
|
||||
}
|
||||
|
||||
func (pc *parseCache) Put(q string, pcv *parseCacheValue) {
|
||||
pc.mu.Lock()
|
||||
overflow := len(pc.m) - parseCacheMaxLen
|
||||
if overflow > 0 {
|
||||
// Remove 10% of items from the cache.
|
||||
overflow = int(float64(len(pc.m)) * 0.1)
|
||||
for k := range pc.m {
|
||||
delete(pc.m, k)
|
||||
overflow--
|
||||
if overflow <= 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
pc.m[q] = pcv
|
||||
pc.mu.Unlock()
|
||||
}
|
||||
|
||||
142
app/vmselect/promql/parse_cache.go
Normal file
142
app/vmselect/promql/parse_cache.go
Normal file
@@ -0,0 +1,142 @@
|
||||
// Cache for metricsql expressions
|
||||
// Based on the fastcache idea of locking buckets in order to avoid whole cache locks.
|
||||
// See: https://github.com/VictoriaMetrics/fastcache
|
||||
package promql
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
||||
"github.com/VictoriaMetrics/metrics"
|
||||
"github.com/VictoriaMetrics/metricsql"
|
||||
|
||||
xxhash "github.com/cespare/xxhash/v2"
|
||||
)
|
||||
|
||||
var parseCacheV = func() *parseCache {
|
||||
pc := newParseCache()
|
||||
metrics.NewGauge(`vm_cache_requests_total{type="promql/parse"}`, func() float64 {
|
||||
return float64(pc.requests())
|
||||
})
|
||||
metrics.NewGauge(`vm_cache_misses_total{type="promql/parse"}`, func() float64 {
|
||||
return float64(pc.misses())
|
||||
})
|
||||
metrics.NewGauge(`vm_cache_entries{type="promql/parse"}`, func() float64 {
|
||||
return float64(pc.len())
|
||||
})
|
||||
return pc
|
||||
}()
|
||||
|
||||
const (
|
||||
parseBucketCount = 128
|
||||
|
||||
parseCacheMaxLen int = 10e3
|
||||
|
||||
parseBucketMaxLen int = parseCacheMaxLen / parseBucketCount
|
||||
|
||||
parseBucketFreePercent float64 = 0.1
|
||||
)
|
||||
|
||||
type parseCacheValue struct {
|
||||
e metricsql.Expr
|
||||
err error
|
||||
}
|
||||
|
||||
type parseBucket struct {
|
||||
m map[string]*parseCacheValue
|
||||
mu sync.RWMutex
|
||||
requests atomic.Uint64
|
||||
misses atomic.Uint64
|
||||
}
|
||||
|
||||
type parseCache struct {
|
||||
buckets [parseBucketCount]parseBucket
|
||||
}
|
||||
|
||||
func newParseCache() *parseCache {
|
||||
pc := new(parseCache)
|
||||
for i := 0; i < parseBucketCount; i++ {
|
||||
pc.buckets[i] = newParseBucket()
|
||||
}
|
||||
return pc
|
||||
}
|
||||
|
||||
func (pc *parseCache) put(q string, pcv *parseCacheValue) {
|
||||
h := xxhash.Sum64String(q)
|
||||
idx := h % parseBucketCount
|
||||
pc.buckets[idx].put(q, pcv)
|
||||
}
|
||||
|
||||
func (pc *parseCache) get(q string) *parseCacheValue {
|
||||
h := xxhash.Sum64String(q)
|
||||
idx := h % parseBucketCount
|
||||
return pc.buckets[idx].get(q)
|
||||
}
|
||||
|
||||
func (pc *parseCache) requests() uint64 {
|
||||
var n uint64
|
||||
for i := 0; i < parseBucketCount; i++ {
|
||||
n += pc.buckets[i].requests.Load()
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func (pc *parseCache) misses() uint64 {
|
||||
var n uint64
|
||||
for i := 0; i < parseBucketCount; i++ {
|
||||
n += pc.buckets[i].misses.Load()
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func (pc *parseCache) len() uint64 {
|
||||
var n uint64
|
||||
for i := 0; i < parseBucketCount; i++ {
|
||||
n += pc.buckets[i].len()
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func newParseBucket() parseBucket {
|
||||
return parseBucket{
|
||||
m: make(map[string]*parseCacheValue, parseBucketMaxLen),
|
||||
}
|
||||
}
|
||||
|
||||
func (pb *parseBucket) len() uint64 {
|
||||
pb.mu.RLock()
|
||||
n := len(pb.m)
|
||||
pb.mu.RUnlock()
|
||||
return uint64(n)
|
||||
}
|
||||
|
||||
func (pb *parseBucket) get(q string) *parseCacheValue {
|
||||
pb.requests.Add(1)
|
||||
|
||||
pb.mu.RLock()
|
||||
pcv := pb.m[q]
|
||||
pb.mu.RUnlock()
|
||||
|
||||
if pcv == nil {
|
||||
pb.misses.Add(1)
|
||||
}
|
||||
return pcv
|
||||
}
|
||||
|
||||
func (pb *parseBucket) put(q string, pcv *parseCacheValue) {
|
||||
pb.mu.Lock()
|
||||
overflow := len(pb.m) - parseBucketMaxLen
|
||||
if overflow > 0 {
|
||||
// Remove parseBucketDeletePercent*100 % of items from the bucket.
|
||||
overflow = int(float64(len(pb.m)) * parseBucketFreePercent)
|
||||
for k := range pb.m {
|
||||
delete(pb.m, k)
|
||||
overflow--
|
||||
if overflow <= 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
pb.m[q] = pcv
|
||||
pb.mu.Unlock()
|
||||
}
|
||||
129
app/vmselect/promql/parse_cache_test.go
Normal file
129
app/vmselect/promql/parse_cache_test.go
Normal file
@@ -0,0 +1,129 @@
|
||||
package promql
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/VictoriaMetrics/metricsql"
|
||||
)
|
||||
|
||||
func testGetParseCacheValue(q string) *parseCacheValue {
|
||||
e, err := metricsql.Parse(q)
|
||||
return &parseCacheValue{
|
||||
e: e,
|
||||
err: err,
|
||||
}
|
||||
}
|
||||
|
||||
func testGenerateQueries(items int) []string {
|
||||
queries := make([]string, items)
|
||||
for i := 0; i < items; i++ {
|
||||
queries[i] = fmt.Sprintf(`node_time_seconds{instance="node%d", job="job%d"}`, i, i)
|
||||
}
|
||||
return queries
|
||||
}
|
||||
|
||||
func TestParseCache(t *testing.T) {
|
||||
pc := newParseCache()
|
||||
if pc.len() != 0 || pc.misses() != 0 || pc.requests() != 0 {
|
||||
t.Errorf("unexpected pc.Len()=%d, pc.Misses()=%d, pc.Requests()=%d; expected all to be zero.", pc.len(), pc.misses(), pc.requests())
|
||||
}
|
||||
|
||||
q1 := `foo{bar="baz"}`
|
||||
v1 := testGetParseCacheValue(q1)
|
||||
|
||||
q2 := `foo1{bar1="baz1"}`
|
||||
v2 := testGetParseCacheValue(q2)
|
||||
|
||||
pc.put(q1, v1)
|
||||
if pc.len() != 1 {
|
||||
t.Errorf("unexpected value obtained; got %d; want %d", pc.len(), 1)
|
||||
}
|
||||
|
||||
if res := pc.get(q2); res != nil {
|
||||
t.Errorf("unexpected non-empty value obtained from cache: %d ", res)
|
||||
}
|
||||
if pc.len() != 1 {
|
||||
t.Errorf("unexpected value obtained; got %d; want %d", pc.len(), 1)
|
||||
}
|
||||
if miss := pc.misses(); miss != 1 {
|
||||
t.Errorf("unexpected value obtained; got %d; want %d", miss, 1)
|
||||
}
|
||||
if req := pc.requests(); req != 1 {
|
||||
t.Errorf("unexpected value obtained; got %d; want %d", req, 1)
|
||||
}
|
||||
|
||||
pc.put(q2, v2)
|
||||
if pc.len() != 2 {
|
||||
t.Errorf("unexpected value obtained; got %d; want %d", pc.len(), 2)
|
||||
}
|
||||
|
||||
if res := pc.get(q1); res != v1 {
|
||||
t.Errorf("unexpected value obtained; got %v; want %v", res, v1)
|
||||
}
|
||||
|
||||
if res := pc.get(q2); res != v2 {
|
||||
t.Errorf("unexpected value obtained; got %v; want %v", res, v2)
|
||||
}
|
||||
|
||||
pc.put(q2, v2)
|
||||
if pc.len() != 2 {
|
||||
t.Errorf("unexpected value obtained; got %d; want %d", pc.len(), 2)
|
||||
}
|
||||
if miss := pc.misses(); miss != 1 {
|
||||
t.Errorf("unexpected value obtained; got %d; want %d", miss, 1)
|
||||
}
|
||||
if req := pc.requests(); req != 3 {
|
||||
t.Errorf("unexpected value obtained; got %d; want %d", req, 3)
|
||||
}
|
||||
|
||||
if res := pc.get(q2); res != v2 {
|
||||
t.Errorf("unexpected value obtained; got %v; want %v", res, v2)
|
||||
}
|
||||
if pc.len() != 2 {
|
||||
t.Errorf("unexpected value obtained; got %d; want %d", pc.len(), 2)
|
||||
}
|
||||
if miss := pc.misses(); miss != 1 {
|
||||
t.Errorf("unexpected value obtained; got %d; want %d", miss, 1)
|
||||
}
|
||||
if req := pc.requests(); req != 4 {
|
||||
t.Errorf("unexpected value obtained; got %d; want %d", req, 4)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseCacheBucketOverflow(t *testing.T) {
|
||||
b := newParseBucket()
|
||||
var expectedLen uint64
|
||||
|
||||
// +2 for overflow and clean up
|
||||
queries := testGenerateQueries(parseBucketMaxLen + 2)
|
||||
|
||||
// Same value for all keys
|
||||
v := testGetParseCacheValue(queries[0])
|
||||
|
||||
// Fill bucket
|
||||
for i := 0; i < parseBucketMaxLen; i++ {
|
||||
b.put(queries[i], v)
|
||||
}
|
||||
expectedLen = uint64(parseBucketMaxLen)
|
||||
if b.len() != expectedLen {
|
||||
t.Errorf("unexpected value obtained; got %v; want %v", b.len(), expectedLen)
|
||||
}
|
||||
|
||||
// Overflow bucket
|
||||
expectedLen = uint64(parseBucketMaxLen + 1)
|
||||
b.put(queries[parseBucketMaxLen], v)
|
||||
if b.len() != uint64(expectedLen) {
|
||||
t.Errorf("unexpected value obtained; got %v; want %v", b.len(), expectedLen)
|
||||
}
|
||||
|
||||
// Clean up;
|
||||
oldLen := b.len()
|
||||
overflow := int(float64(oldLen) * parseBucketFreePercent)
|
||||
expectedLen = oldLen - uint64(overflow) + 1 // +1 for new entry
|
||||
|
||||
b.put(queries[parseBucketMaxLen+1], v)
|
||||
if b.len() != expectedLen {
|
||||
t.Errorf("unexpected value obtained; got %v; want %v", b.len(), expectedLen)
|
||||
}
|
||||
}
|
||||
235
app/vmselect/promql/parse_cache_timing_test.go
Normal file
235
app/vmselect/promql/parse_cache_timing_test.go
Normal file
@@ -0,0 +1,235 @@
|
||||
package promql
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func BenchmarkCachePutNoOverFlow(b *testing.B) {
|
||||
const items int = (parseCacheMaxLen / 2)
|
||||
pc := newParseCache()
|
||||
|
||||
queries := testGenerateQueries(items)
|
||||
v := testGetParseCacheValue(queries[0])
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
for i := 0; i < items; i++ {
|
||||
pc.put(queries[i], v)
|
||||
}
|
||||
}
|
||||
})
|
||||
if pc.len() != uint64(items) {
|
||||
b.Errorf("unexpected value obtained; got %d; want %d", pc.len(), items)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCacheGetNoOverflow(b *testing.B) {
|
||||
const items int = parseCacheMaxLen / 2
|
||||
pc := newParseCache()
|
||||
|
||||
queries := testGenerateQueries(items)
|
||||
v := testGetParseCacheValue(queries[0])
|
||||
|
||||
for i := 0; i < len(queries); i++ {
|
||||
pc.put(queries[i], v)
|
||||
}
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
for i := 0; i < items; i++ {
|
||||
if v := pc.get(queries[i]); v == nil {
|
||||
b.Errorf("unexpected nil value obtained from cache for query: %s ", queries[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkCachePutGetNoOverflow(b *testing.B) {
|
||||
const items int = parseCacheMaxLen / 2
|
||||
pc := newParseCache()
|
||||
|
||||
queries := testGenerateQueries(items)
|
||||
v := testGetParseCacheValue(queries[0])
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
for i := 0; i < items; i++ {
|
||||
pc.put(queries[i], v)
|
||||
if res := pc.get(queries[i]); res == nil {
|
||||
b.Errorf("unexpected nil value obtained from cache for query: %s ", queries[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
if pc.len() != uint64(items) {
|
||||
b.Errorf("unexpected value obtained; got %d; want %d", pc.len(), items)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCachePutOverflow(b *testing.B) {
|
||||
const items int = parseCacheMaxLen + (parseCacheMaxLen / 2)
|
||||
c := newParseCache()
|
||||
|
||||
queries := testGenerateQueries(items)
|
||||
v := testGetParseCacheValue(queries[0])
|
||||
|
||||
for i := 0; i < parseCacheMaxLen; i++ {
|
||||
c.put(queries[i], v)
|
||||
}
|
||||
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
for i := parseCacheMaxLen; i < items; i++ {
|
||||
c.put(queries[i], v)
|
||||
}
|
||||
}
|
||||
})
|
||||
maxElemnts := uint64(parseCacheMaxLen + parseBucketCount)
|
||||
if c.len() > maxElemnts {
|
||||
b.Errorf("cache length is more than expected; got %d, expected %d", c.len(), maxElemnts)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCachePutGetOverflow(b *testing.B) {
|
||||
const items int = parseCacheMaxLen + (parseCacheMaxLen / 2)
|
||||
c := newParseCache()
|
||||
|
||||
queries := testGenerateQueries(items)
|
||||
v := testGetParseCacheValue(queries[0])
|
||||
|
||||
for i := 0; i < parseCacheMaxLen; i++ {
|
||||
c.put(queries[i], v)
|
||||
}
|
||||
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
for i := parseCacheMaxLen; i < items; i++ {
|
||||
c.put(queries[i], v)
|
||||
c.get(queries[i])
|
||||
}
|
||||
}
|
||||
})
|
||||
maxElemnts := uint64(parseCacheMaxLen + parseBucketCount)
|
||||
if c.len() > maxElemnts {
|
||||
b.Errorf("cache length is more than expected; got %d, expected %d", c.len(), maxElemnts)
|
||||
}
|
||||
}
|
||||
|
||||
var testSimpleQueries = []string{
|
||||
`m{a="b"}`,
|
||||
`{a="b"}`,
|
||||
`m{c="d",a="b"}`,
|
||||
`{a="b",c="d"}`,
|
||||
`m1{a="foo"}`,
|
||||
`m2{a="bar"}`,
|
||||
`m1{b="foo"}`,
|
||||
`m2{b="bar"}`,
|
||||
`m1{a="foo",b="bar"}`,
|
||||
`m2{b="bar",c="x"}`,
|
||||
`{b="bar"}`,
|
||||
}
|
||||
|
||||
func BenchmarkParsePromQLWithCacheSimple(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
for j := 0; j < len(testSimpleQueries); j++ {
|
||||
_, err := parsePromQLWithCache(testSimpleQueries[j])
|
||||
if err != nil {
|
||||
b.Errorf("unexpected error: %s", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkParsePromQLWithCacheSimpleParallel(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
for i := 0; i < len(testSimpleQueries); i++ {
|
||||
_, err := parsePromQLWithCache(testSimpleQueries[i])
|
||||
if err != nil {
|
||||
b.Errorf("unexpected error: %s", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
var testComplexQueries = []string{
|
||||
`sort_desc(label_set(2, "foo", "bar") * ignoring(a) (label_set(time(), "foo", "bar") or label_set(10, "foo", "qwert")))`,
|
||||
`sum(a.b{c="d.e",x=~"a.b.+[.a]",y!~"aaa.bb|cc.dd"}) + avg_over_time(1,sum({x=~"aa.bb"}))`,
|
||||
`sort((label_set(time() offset 100s, "foo", "bar"), label_set(time()+10, "foo", "baz") offset 50s) offset 400s)`,
|
||||
`sort(label_map((
|
||||
label_set(time(), "label", "v1"),
|
||||
label_set(time()+100, "label", "v2"),
|
||||
label_set(time()+200, "label", "v3"),
|
||||
label_set(time()+300, "x", "y"),
|
||||
label_set(time()+400, "label", "v4"),
|
||||
), "label", "v1", "foo", "v2", "bar", "", "qwe", "v4", ""))`,
|
||||
`sort(labels_equal((
|
||||
label_set(10, "instance", "qwe", "host", "rty"),
|
||||
label_set(20, "instance", "qwe", "host", "qwe"),
|
||||
label_set(30, "aaa", "bbb", "instance", "foo", "host", "foo"),
|
||||
), "instance", "host"))`,
|
||||
`with (
|
||||
x = (
|
||||
label_set(time() > 1500, "foo", "123.456", "__name__", "aaa"),
|
||||
label_set(-time(), "foo", "bar", "__name__", "bbb"),
|
||||
label_set(-time(), "__name__", "bxs"),
|
||||
label_set(-time(), "foo", "45", "bar", "xs"),
|
||||
)
|
||||
)
|
||||
sort(x + label_value(x, "foo"))`,
|
||||
`label_replace(
|
||||
label_replace(
|
||||
label_replace(time(), "__name__", "x${1}y", "foo", ".*"),
|
||||
"xxx", "foo${1}bar(${1})", "__name__", "(.+)"),
|
||||
"xxx", "AA$1", "xxx", "foox(.+)"
|
||||
)`,
|
||||
`sort_desc(union(
|
||||
label_set(time() > 1400, "__name__", "x", "foo", "bar"),
|
||||
label_set(time() < 1700, "__name__", "y", "foo", "baz")) default 123)`,
|
||||
`sort(histogram_quantile(0.6,
|
||||
label_set(90, "foo", "bar", "le", "10")
|
||||
or label_set(100, "foo", "bar", "le", "30")
|
||||
or label_set(300, "foo", "bar", "le", "+Inf")
|
||||
or label_set(200, "tag", "xx", "le", "10")
|
||||
or label_set(300, "tag", "xx", "le", "30")
|
||||
))`,
|
||||
}
|
||||
|
||||
func BenchmarkParsePromQLWithCacheComplex(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
for j := 0; j < len(testComplexQueries); j++ {
|
||||
_, err := parsePromQLWithCache(testComplexQueries[j])
|
||||
if err != nil {
|
||||
b.Errorf("unexpected error: %s", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkParsePromQLWithCacheComplexParallel(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
for i := 0; i < len(testComplexQueries); i++ {
|
||||
_, err := parsePromQLWithCache(testComplexQueries[i])
|
||||
if err != nil {
|
||||
b.Errorf("unexpected error: %s", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -374,8 +374,8 @@ func getRollupConfigs(funcName string, rf rollupFunc, expr metricsql.Expr, start
|
||||
preFunc := func(_ []float64, _ []int64) {}
|
||||
funcName = strings.ToLower(funcName)
|
||||
if rollupFuncsRemoveCounterResets[funcName] {
|
||||
preFunc = func(values []float64, _ []int64) {
|
||||
removeCounterResets(values)
|
||||
preFunc = func(values []float64, timestamps []int64) {
|
||||
removeCounterResets(values, timestamps, lookbackDelta)
|
||||
}
|
||||
}
|
||||
samplesScannedPerCall := rollupFuncsSamplesScannedPerCall[funcName]
|
||||
@@ -486,8 +486,8 @@ func getRollupConfigs(funcName string, rf rollupFunc, expr metricsql.Expr, start
|
||||
for _, aggrFuncName := range aggrFuncNames {
|
||||
if rollupFuncsRemoveCounterResets[aggrFuncName] {
|
||||
// There is no need to save the previous preFunc, since it is either empty or the same.
|
||||
preFunc = func(values []float64, _ []int64) {
|
||||
removeCounterResets(values)
|
||||
preFunc = func(values []float64, timestamps []int64) {
|
||||
removeCounterResets(values, timestamps, lookbackDelta)
|
||||
}
|
||||
}
|
||||
rf := rollupAggrFuncs[aggrFuncName]
|
||||
@@ -520,7 +520,8 @@ type rollupFuncArg struct {
|
||||
// Timestamps for values.
|
||||
timestamps []int64
|
||||
|
||||
// Real value preceding values without restrictions on staleness interval.
|
||||
// Real value preceding values.
|
||||
// Is populated if preceding value is within the -search.maxStalenessInterval (rc.LookbackDelta).
|
||||
realPrevValue float64
|
||||
|
||||
// Real value which goes after values.
|
||||
@@ -764,10 +765,18 @@ func (rc *rollupConfig) doInternal(dstValues []float64, tsm *timeseriesMap, valu
|
||||
}
|
||||
rfa.values = values[i:j]
|
||||
rfa.timestamps = timestamps[i:j]
|
||||
rfa.realPrevValue = nan
|
||||
if i > 0 {
|
||||
rfa.realPrevValue = values[i-1]
|
||||
} else {
|
||||
rfa.realPrevValue = nan
|
||||
prevValue, prevTimestamp := values[i-1], timestamps[i-1]
|
||||
// set realPrevValue if rc.LookbackDelta == 0
|
||||
// or if distance between datapoint in prev interval and beginning of this interval
|
||||
// doesn't exceed LookbackDelta.
|
||||
// https://github.com/VictoriaMetrics/VictoriaMetrics/pull/1381
|
||||
// https://github.com/VictoriaMetrics/VictoriaMetrics/issues/894
|
||||
// https://github.com/VictoriaMetrics/VictoriaMetrics/issues/8045
|
||||
if rc.LookbackDelta == 0 || (tStart-prevTimestamp) < rc.LookbackDelta {
|
||||
rfa.realPrevValue = prevValue
|
||||
}
|
||||
}
|
||||
if j < len(values) {
|
||||
rfa.realNextValue = values[j]
|
||||
@@ -891,7 +900,7 @@ func getMaxPrevInterval(scrapeInterval int64) int64 {
|
||||
return scrapeInterval + scrapeInterval/8
|
||||
}
|
||||
|
||||
func removeCounterResets(values []float64) {
|
||||
func removeCounterResets(values []float64, timestamps []int64, maxStalenessInterval int64) {
|
||||
// There is no need in handling NaNs here, since they are impossible
|
||||
// on values from vmstorage.
|
||||
if len(values) == 0 {
|
||||
@@ -910,6 +919,16 @@ func removeCounterResets(values []float64) {
|
||||
correction += prevValue
|
||||
}
|
||||
}
|
||||
if i > 0 && maxStalenessInterval > 0 {
|
||||
gap := timestamps[i] - timestamps[i-1]
|
||||
if gap > maxStalenessInterval {
|
||||
// reset correction if gap between samples exceeds staleness interval
|
||||
// see https://github.com/VictoriaMetrics/VictoriaMetrics/issues/8072
|
||||
correction = 0
|
||||
prevValue = v
|
||||
continue
|
||||
}
|
||||
}
|
||||
prevValue = v
|
||||
values[i] = v + correction
|
||||
// Check again, there could be precision error in float operations,
|
||||
@@ -1682,9 +1701,9 @@ func rollupRateOverSum(rfa *rollupFuncArg) float64 {
|
||||
}
|
||||
|
||||
func rollupRange(rfa *rollupFuncArg) float64 {
|
||||
max := rollupMax(rfa)
|
||||
min := rollupMin(rfa)
|
||||
return max - min
|
||||
maxV := rollupMax(rfa)
|
||||
minV := rollupMin(rfa)
|
||||
return maxV - minV
|
||||
}
|
||||
|
||||
func rollupSum2(rfa *rollupFuncArg) float64 {
|
||||
@@ -2192,38 +2211,38 @@ func rollupClose(rfa *rollupFuncArg) float64 {
|
||||
|
||||
func rollupHigh(rfa *rollupFuncArg) float64 {
|
||||
values := getCandlestickValues(rfa)
|
||||
max := getFirstValueForCandlestick(rfa)
|
||||
if math.IsNaN(max) {
|
||||
maxV := getFirstValueForCandlestick(rfa)
|
||||
if math.IsNaN(maxV) {
|
||||
if len(values) == 0 {
|
||||
return nan
|
||||
}
|
||||
max = values[0]
|
||||
maxV = values[0]
|
||||
values = values[1:]
|
||||
}
|
||||
for _, v := range values {
|
||||
if v > max {
|
||||
max = v
|
||||
if v > maxV {
|
||||
maxV = v
|
||||
}
|
||||
}
|
||||
return max
|
||||
return maxV
|
||||
}
|
||||
|
||||
func rollupLow(rfa *rollupFuncArg) float64 {
|
||||
values := getCandlestickValues(rfa)
|
||||
min := getFirstValueForCandlestick(rfa)
|
||||
if math.IsNaN(min) {
|
||||
minV := getFirstValueForCandlestick(rfa)
|
||||
if math.IsNaN(minV) {
|
||||
if len(values) == 0 {
|
||||
return nan
|
||||
}
|
||||
min = values[0]
|
||||
minV = values[0]
|
||||
values = values[1:]
|
||||
}
|
||||
for _, v := range values {
|
||||
if v < min {
|
||||
min = v
|
||||
if v < minV {
|
||||
minV = v
|
||||
}
|
||||
}
|
||||
return min
|
||||
return minV
|
||||
}
|
||||
|
||||
func rollupModeOverTime(rfa *rollupFuncArg) float64 {
|
||||
|
||||
@@ -5,6 +5,8 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/VictoriaMetrics/metricsql"
|
||||
|
||||
"github.com/VictoriaMetrics/VictoriaMetrics/lib/decimal"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -115,31 +117,49 @@ func TestRollupIderivDuplicateTimestamps(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestRemoveCounterResets(t *testing.T) {
|
||||
removeCounterResets(nil)
|
||||
removeCounterResets(nil, nil, 0)
|
||||
|
||||
values := append([]float64{}, testValues...)
|
||||
removeCounterResets(values)
|
||||
timestamps := append([]int64{}, testTimestamps...)
|
||||
removeCounterResets(values, timestamps, 0)
|
||||
valuesExpected := []float64{123, 157, 167, 188, 221, 255, 320, 332, 364, 396, 398, 398}
|
||||
testRowsEqual(t, values, testTimestamps, valuesExpected, testTimestamps)
|
||||
|
||||
// removeCounterResets doesn't expect negative values, so it doesn't work properly with them.
|
||||
values = []float64{-100, -200, -300, -400}
|
||||
removeCounterResets(values)
|
||||
valuesExpected = []float64{-100, -100, -100, -100}
|
||||
timestampsExpected := []int64{0, 1, 2, 3}
|
||||
removeCounterResets(values, timestampsExpected, 0)
|
||||
valuesExpected = []float64{-100, -100, -100, -100}
|
||||
testRowsEqual(t, values, timestampsExpected, valuesExpected, timestampsExpected)
|
||||
|
||||
// verify how partial counter reset is handled.
|
||||
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/2787
|
||||
values = []float64{100, 95, 120, 119, 139, 50}
|
||||
removeCounterResets(values)
|
||||
valuesExpected = []float64{100, 100, 125, 125, 145, 195}
|
||||
timestampsExpected = []int64{0, 1, 2, 3, 4, 5}
|
||||
removeCounterResets(values, timestampsExpected, 0)
|
||||
valuesExpected = []float64{100, 100, 125, 125, 145, 195}
|
||||
testRowsEqual(t, values, timestampsExpected, valuesExpected, timestampsExpected)
|
||||
|
||||
// verify that staleness interval is respected during resets
|
||||
// see https://github.com/VictoriaMetrics/VictoriaMetrics/issues/8072
|
||||
values = []float64{10, 12, 14, 4, 6, 8, 6, 8, 4, 6}
|
||||
timestamps = []int64{10, 20, 30, 60, 70, 80, 90, 100, 120, 130}
|
||||
valuesExpected = []float64{10, 12, 14, 4, 6, 8, 14, 16, 4, 6}
|
||||
removeCounterResets(values, timestamps, 10)
|
||||
testRowsEqual(t, values, timestamps, valuesExpected, timestamps)
|
||||
|
||||
// verify that staleness is respected if there was no counter reset
|
||||
// but correction was made previously
|
||||
values = []float64{10, 12, 2, 4}
|
||||
timestamps = []int64{10, 20, 30, 60}
|
||||
valuesExpected = []float64{10, 12, 14, 4}
|
||||
removeCounterResets(values, timestamps, 10)
|
||||
testRowsEqual(t, values, timestamps, valuesExpected, timestamps)
|
||||
|
||||
// verify results always increase monotonically with possible float operations precision error
|
||||
values = []float64{34.094223, 2.7518, 2.140669, 0.044878, 1.887095, 2.546569, 2.490149, 0.045, 0.035684, 0.062454, 0.058296}
|
||||
removeCounterResets(values)
|
||||
timestampsExpected = []int64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
|
||||
removeCounterResets(values, timestampsExpected, 0)
|
||||
var prev float64
|
||||
for i, v := range values {
|
||||
if v < prev {
|
||||
@@ -164,7 +184,7 @@ func TestDeltaValues(t *testing.T) {
|
||||
|
||||
// remove counter resets
|
||||
values = append([]float64{}, testValues...)
|
||||
removeCounterResets(values)
|
||||
removeCounterResets(values, testTimestamps, 0)
|
||||
deltaValues(values)
|
||||
valuesExpected = []float64{34, 10, 21, 33, 34, 65, 12, 32, 32, 2, 0, 0}
|
||||
testRowsEqual(t, values, testTimestamps, valuesExpected, testTimestamps)
|
||||
@@ -186,7 +206,7 @@ func TestDerivValues(t *testing.T) {
|
||||
|
||||
// remove counter resets
|
||||
values = append([]float64{}, testValues...)
|
||||
removeCounterResets(values)
|
||||
removeCounterResets(values, testTimestamps, 0)
|
||||
derivValues(values, testTimestamps)
|
||||
valuesExpected = []float64{3400, 1111.111111111111, 1750, 2538.4615384615386, 3090.909090909091, 3611.1111111111113,
|
||||
6000, 1882.3529411764705, 1777.7777777777778, 400, 0, 0}
|
||||
@@ -217,7 +237,7 @@ func testRollupFunc(t *testing.T, funcName string, args []any, vExpected float64
|
||||
rfa.timestamps = append(rfa.timestamps, testTimestamps...)
|
||||
rfa.window = rfa.timestamps[len(rfa.timestamps)-1] - rfa.timestamps[0]
|
||||
if rollupFuncsRemoveCounterResets[funcName] {
|
||||
removeCounterResets(rfa.values)
|
||||
removeCounterResets(rfa.values, rfa.timestamps, 0)
|
||||
}
|
||||
for i := 0; i < 5; i++ {
|
||||
v := rf(&rfa)
|
||||
@@ -1587,3 +1607,229 @@ func TestRollupDelta(t *testing.T) {
|
||||
f(1, nan, nan, nil, 0)
|
||||
f(100, nan, nan, nil, 0)
|
||||
}
|
||||
|
||||
func TestRollupDeltaWithStaleness(t *testing.T) {
|
||||
// there is a gap between samples in the dataset below
|
||||
timestamps := []int64{0, 15000, 30000, 70000}
|
||||
values := []float64{1, 1, 1, 1}
|
||||
|
||||
// if step > gap, then delta will always respect value before gap
|
||||
t.Run("step>gap", func(t *testing.T) {
|
||||
rc := rollupConfig{
|
||||
Func: rollupDelta,
|
||||
Start: 0,
|
||||
End: 70000,
|
||||
Step: 45000,
|
||||
Window: 0,
|
||||
MaxPointsPerSeries: 1e4,
|
||||
}
|
||||
rc.Timestamps = rc.getTimestamps()
|
||||
gotValues, samplesScanned := rc.Do(nil, values, timestamps)
|
||||
if samplesScanned != 7 {
|
||||
t.Fatalf("expecting 8 samplesScanned from rollupConfig.Do; got %d", samplesScanned)
|
||||
}
|
||||
valuesExpected := []float64{1, 0}
|
||||
timestampsExpected := []int64{0, 45e3}
|
||||
testRowsEqual(t, gotValues, rc.Timestamps, valuesExpected, timestampsExpected)
|
||||
})
|
||||
// even if LookbackDelta < gap
|
||||
t.Run("step>gap;LookbackDelta<gap", func(t *testing.T) {
|
||||
rc := rollupConfig{
|
||||
Func: rollupDelta,
|
||||
Start: 0,
|
||||
End: 70000,
|
||||
Step: 45000,
|
||||
LookbackDelta: 10e3,
|
||||
Window: 0,
|
||||
MaxPointsPerSeries: 1e4,
|
||||
}
|
||||
rc.Timestamps = rc.getTimestamps()
|
||||
gotValues, samplesScanned := rc.Do(nil, values, timestamps)
|
||||
if samplesScanned != 7 {
|
||||
t.Fatalf("expecting 8 samplesScanned from rollupConfig.Do; got %d", samplesScanned)
|
||||
}
|
||||
valuesExpected := []float64{1, 0}
|
||||
timestampsExpected := []int64{0, 45e3}
|
||||
testRowsEqual(t, gotValues, rc.Timestamps, valuesExpected, timestampsExpected)
|
||||
})
|
||||
|
||||
// if step < gap and LookbackDelta==0 then delta will always respect value before gap
|
||||
// as LookbackDelta=0 ignores staleness
|
||||
t.Run("step<gap;LookbackDelta=0", func(t *testing.T) {
|
||||
rc := rollupConfig{
|
||||
Func: rollupDelta,
|
||||
Start: 0,
|
||||
End: 70000,
|
||||
Step: 10000,
|
||||
LookbackDelta: 0,
|
||||
Window: 0,
|
||||
MaxPointsPerSeries: 1e4,
|
||||
}
|
||||
rc.Timestamps = rc.getTimestamps()
|
||||
gotValues, samplesScanned := rc.Do(nil, values, timestamps)
|
||||
if samplesScanned != 8 {
|
||||
t.Fatalf("expecting 8 samplesScanned from rollupConfig.Do; got %d", samplesScanned)
|
||||
}
|
||||
valuesExpected := []float64{1, 0, 0, 0, 0, 0, 0, 0}
|
||||
timestampsExpected := []int64{0, 10e3, 20e3, 30e3, 40e3, 50e3, 60e3, 70e3}
|
||||
testRowsEqual(t, gotValues, rc.Timestamps, valuesExpected, timestampsExpected)
|
||||
})
|
||||
|
||||
// if step < gap and LookbackDelta>0 then delta will respect value before gap
|
||||
// only if it is not stale according to LookbackDelta
|
||||
t.Run("step<gap;LookbackDelta>0", func(t *testing.T) {
|
||||
rc := rollupConfig{
|
||||
Func: rollupDelta,
|
||||
Start: 0,
|
||||
End: 70000,
|
||||
Step: 10000,
|
||||
Window: 0,
|
||||
MaxPointsPerSeries: 1e4,
|
||||
LookbackDelta: 30e3,
|
||||
}
|
||||
rc.Timestamps = rc.getTimestamps()
|
||||
gotValues, samplesScanned := rc.Do(nil, values, timestamps)
|
||||
if samplesScanned != 8 {
|
||||
t.Fatalf("expecting 8 samplesScanned from rollupConfig.Do; got %d", samplesScanned)
|
||||
}
|
||||
valuesExpected := []float64{1, 0, 0, 0, 0, 0, 0, 1}
|
||||
timestampsExpected := []int64{0, 10e3, 20e3, 30e3, 40e3, 50e3, 60e3, 70e3}
|
||||
testRowsEqual(t, gotValues, rc.Timestamps, valuesExpected, timestampsExpected)
|
||||
})
|
||||
|
||||
// there is a staleness marker between samples in the dataset below
|
||||
timestamps = []int64{0, 10000, 20000, 30000, 40000}
|
||||
values = []float64{1, 1, 1, decimal.StaleNaN, 1}
|
||||
|
||||
t.Run("staleness marker", func(t *testing.T) {
|
||||
rc := rollupConfig{
|
||||
Func: rollupDelta,
|
||||
Start: 0,
|
||||
End: 40000,
|
||||
Step: 10000,
|
||||
Window: 0,
|
||||
MaxPointsPerSeries: 1e4,
|
||||
}
|
||||
rc.Timestamps = rc.getTimestamps()
|
||||
gotValues, samplesScanned := rc.Do(nil, values, timestamps)
|
||||
if samplesScanned != 10 {
|
||||
t.Fatalf("expecting 10 samplesScanned from rollupConfig.Do; got %d", samplesScanned)
|
||||
}
|
||||
valuesExpected := []float64{1, 0, 0, nan, 1}
|
||||
timestampsExpected := []int64{0, 10e3, 20e3, 30e3, 40e3}
|
||||
testRowsEqual(t, gotValues, rc.Timestamps, valuesExpected, timestampsExpected)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRollupIncreasePureWithStaleness(t *testing.T) {
|
||||
// there is a gap between samples in the dataset below
|
||||
timestamps := []int64{0, 15000, 30000, 70000}
|
||||
values := []float64{1, 1, 1, 1}
|
||||
|
||||
// if step > gap, then delta will always respect value before gap
|
||||
t.Run("step>gap", func(t *testing.T) {
|
||||
rc := rollupConfig{
|
||||
Func: rollupIncreasePure,
|
||||
Start: 0,
|
||||
End: 70000,
|
||||
Step: 45000,
|
||||
Window: 0,
|
||||
MaxPointsPerSeries: 1e4,
|
||||
}
|
||||
rc.Timestamps = rc.getTimestamps()
|
||||
gotValues, samplesScanned := rc.Do(nil, values, timestamps)
|
||||
if samplesScanned != 7 {
|
||||
t.Fatalf("expecting 8 samplesScanned from rollupConfig.Do; got %d", samplesScanned)
|
||||
}
|
||||
valuesExpected := []float64{1, 0}
|
||||
timestampsExpected := []int64{0, 45e3}
|
||||
testRowsEqual(t, gotValues, rc.Timestamps, valuesExpected, timestampsExpected)
|
||||
})
|
||||
// even if LookbackDelta < gap
|
||||
t.Run("step>gap;LookbackDelta<gap", func(t *testing.T) {
|
||||
rc := rollupConfig{
|
||||
Func: rollupIncreasePure,
|
||||
Start: 0,
|
||||
End: 70000,
|
||||
Step: 45000,
|
||||
LookbackDelta: 10e3,
|
||||
Window: 0,
|
||||
MaxPointsPerSeries: 1e4,
|
||||
}
|
||||
rc.Timestamps = rc.getTimestamps()
|
||||
gotValues, samplesScanned := rc.Do(nil, values, timestamps)
|
||||
if samplesScanned != 7 {
|
||||
t.Fatalf("expecting 8 samplesScanned from rollupConfig.Do; got %d", samplesScanned)
|
||||
}
|
||||
valuesExpected := []float64{1, 0}
|
||||
timestampsExpected := []int64{0, 45e3}
|
||||
testRowsEqual(t, gotValues, rc.Timestamps, valuesExpected, timestampsExpected)
|
||||
})
|
||||
|
||||
// if step < gap and LookbackDelta==0 then delta will always respect value before gap
|
||||
// as LookbackDelta=0 ignores staleness
|
||||
t.Run("step<gap;LookbackDelta=0", func(t *testing.T) {
|
||||
rc := rollupConfig{
|
||||
Func: rollupIncreasePure,
|
||||
Start: 0,
|
||||
End: 70000,
|
||||
Step: 10000,
|
||||
LookbackDelta: 0,
|
||||
Window: 0,
|
||||
MaxPointsPerSeries: 1e4,
|
||||
}
|
||||
rc.Timestamps = rc.getTimestamps()
|
||||
gotValues, samplesScanned := rc.Do(nil, values, timestamps)
|
||||
if samplesScanned != 8 {
|
||||
t.Fatalf("expecting 8 samplesScanned from rollupConfig.Do; got %d", samplesScanned)
|
||||
}
|
||||
valuesExpected := []float64{1, 0, 0, 0, 0, 0, 0, 0}
|
||||
timestampsExpected := []int64{0, 10e3, 20e3, 30e3, 40e3, 50e3, 60e3, 70e3}
|
||||
testRowsEqual(t, gotValues, rc.Timestamps, valuesExpected, timestampsExpected)
|
||||
})
|
||||
|
||||
// if step < gap and LookbackDelta>0 then delta will respect value before gap
|
||||
// only if it is not stale according to LookbackDelta
|
||||
t.Run("step<gap;LookbackDelta>0", func(t *testing.T) {
|
||||
rc := rollupConfig{
|
||||
Func: rollupIncreasePure,
|
||||
Start: 0,
|
||||
End: 70000,
|
||||
Step: 10000,
|
||||
Window: 0,
|
||||
MaxPointsPerSeries: 1e4,
|
||||
LookbackDelta: 30e3,
|
||||
}
|
||||
rc.Timestamps = rc.getTimestamps()
|
||||
gotValues, samplesScanned := rc.Do(nil, values, timestamps)
|
||||
if samplesScanned != 8 {
|
||||
t.Fatalf("expecting 8 samplesScanned from rollupConfig.Do; got %d", samplesScanned)
|
||||
}
|
||||
valuesExpected := []float64{1, 0, 0, 0, 0, 0, 0, 1}
|
||||
timestampsExpected := []int64{0, 10e3, 20e3, 30e3, 40e3, 50e3, 60e3, 70e3}
|
||||
testRowsEqual(t, gotValues, rc.Timestamps, valuesExpected, timestampsExpected)
|
||||
})
|
||||
|
||||
// there is a staleness marker between samples in the dataset below
|
||||
timestamps = []int64{0, 10000, 20000, 30000, 40000}
|
||||
values = []float64{1, 1, 1, decimal.StaleNaN, 1}
|
||||
|
||||
t.Run("staleness marker", func(t *testing.T) {
|
||||
rc := rollupConfig{
|
||||
Func: rollupIncreasePure,
|
||||
Start: 0,
|
||||
End: 40000,
|
||||
Step: 10000,
|
||||
Window: 0,
|
||||
MaxPointsPerSeries: 1e4,
|
||||
}
|
||||
rc.Timestamps = rc.getTimestamps()
|
||||
gotValues, samplesScanned := rc.Do(nil, values, timestamps)
|
||||
if samplesScanned != 10 {
|
||||
t.Fatalf("expecting 10 samplesScanned from rollupConfig.Do; got %d", samplesScanned)
|
||||
}
|
||||
valuesExpected := []float64{1, 0, 0, nan, 1}
|
||||
timestampsExpected := []int64{0, 10e3, 20e3, 30e3, 40e3}
|
||||
testRowsEqual(t, gotValues, rc.Timestamps, valuesExpected, timestampsExpected)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
|
||||
var (
|
||||
maxExportDuration = flag.Duration("search.maxExportDuration", time.Hour*24*30, "The maximum duration for /api/v1/export call")
|
||||
maxDeleteDuration = flag.Duration("search.maxDeleteDuration", time.Minute*5, "The maximum duration for /api/v1/admin/tsdb/delete_series call")
|
||||
maxQueryDuration = flag.Duration("search.maxQueryDuration", time.Second*30, "The maximum duration for query execution. It can be overridden to a smaller value on a per-query basis via 'timeout' query arg")
|
||||
maxStatusRequestDuration = flag.Duration("search.maxStatusRequestDuration", time.Minute*5, "The maximum duration for /api/v1/status/* requests")
|
||||
maxLabelsAPIDuration = flag.Duration("search.maxLabelsAPIDuration", time.Second*5, "The maximum duration for /api/v1/labels, /api/v1/label/.../values and /api/v1/series requests. "+
|
||||
@@ -58,6 +59,12 @@ func GetDeadlineForLabelsAPI(r *http.Request, startTime time.Time) Deadline {
|
||||
return getDeadlineWithMaxDuration(r, startTime, dMax, "-search.maxLabelsAPIDuration")
|
||||
}
|
||||
|
||||
// GetDeadlineForDelete returns deadline for the given request to /api/v1/admin/tsdb/delete_series.
|
||||
func GetDeadlineForDelete(r *http.Request, startTime time.Time) Deadline {
|
||||
dMax := maxDeleteDuration.Milliseconds()
|
||||
return getDeadlineWithMaxDuration(r, startTime, dMax, "-search.maxDeleteDuration")
|
||||
}
|
||||
|
||||
func getDeadlineWithMaxDuration(r *http.Request, startTime time.Time, dMax int64, flagHint string) Deadline {
|
||||
d, err := httputils.GetDuration(r, "timeout", 0)
|
||||
if err != nil {
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
{
|
||||
"files": {
|
||||
"main.css": "./static/css/main.876c56b7.css",
|
||||
"main.js": "./static/js/main.caf36c39.js",
|
||||
"main.css": "./static/css/main.af583aad.css",
|
||||
"main.js": "./static/js/main.1413b18d.js",
|
||||
"static/js/685.f772060c.chunk.js": "./static/js/685.f772060c.chunk.js",
|
||||
"static/media/MetricsQL.md": "./static/media/MetricsQL.a00044c91d9781cf8557.md",
|
||||
"index.html": "./index.html"
|
||||
},
|
||||
"entrypoints": [
|
||||
"static/css/main.876c56b7.css",
|
||||
"static/js/main.caf36c39.js"
|
||||
"static/css/main.af583aad.css",
|
||||
"static/js/main.1413b18d.js"
|
||||
]
|
||||
}
|
||||
@@ -1 +1 @@
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="./favicon.svg"/><link rel="apple-touch-icon" href="./favicon.svg"/><link rel="mask-icon" href="./favicon.svg" color="#000000"><meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=5"/><meta name="theme-color" content="#000000"/><meta name="description" content="Explore and troubleshoot your VictoriaMetrics data"/><link rel="manifest" href="./manifest.json"/><title>vmui</title><script src="./dashboards/index.js" type="module"></script><meta name="twitter:card" content="summary"><meta name="twitter:title" content="UI for VictoriaMetrics"><meta name="twitter:site" content="@https://victoriametrics.com/"><meta name="twitter:description" content="Explore and troubleshoot your VictoriaMetrics data"><meta name="twitter:image" content="./preview.jpg"><meta property="og:type" content="website"><meta property="og:title" content="UI for VictoriaMetrics"><meta property="og:url" content="https://victoriametrics.com/"><meta property="og:description" content="Explore and troubleshoot your VictoriaMetrics data"><script defer="defer" src="./static/js/main.caf36c39.js"></script><link href="./static/css/main.876c56b7.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="./favicon.svg"/><link rel="apple-touch-icon" href="./favicon.svg"/><link rel="mask-icon" href="./favicon.svg" color="#000000"><meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=5"/><meta name="theme-color" content="#000000"/><meta name="description" content="Explore and troubleshoot your VictoriaMetrics data"/><link rel="manifest" href="./manifest.json"/><title>vmui</title><script src="./dashboards/index.js" type="module"></script><meta name="twitter:card" content="summary"><meta name="twitter:title" content="UI for VictoriaMetrics"><meta name="twitter:site" content="@https://victoriametrics.com/"><meta name="twitter:description" content="Explore and troubleshoot your VictoriaMetrics data"><meta name="twitter:image" content="./preview.jpg"><meta property="og:type" content="website"><meta property="og:title" content="UI for VictoriaMetrics"><meta property="og:url" content="https://victoriametrics.com/"><meta property="og:description" content="Explore and troubleshoot your VictoriaMetrics data"><script defer="defer" src="./static/js/main.1413b18d.js"></script><link href="./static/css/main.af583aad.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
|
||||
File diff suppressed because one or more lines are too long
1
app/vmselect/vmui/static/css/main.af583aad.css
Normal file
1
app/vmselect/vmui/static/css/main.af583aad.css
Normal file
File diff suppressed because one or more lines are too long
2
app/vmselect/vmui/static/js/main.1413b18d.js
Normal file
2
app/vmselect/vmui/static/js/main.1413b18d.js
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -67,6 +67,8 @@ var (
|
||||
"See https://docs.victoriametrics.com/single-server-victoriametrics/#cache-tuning")
|
||||
cacheSizeIndexDBDataBlocks = flagutil.NewBytes("storage.cacheSizeIndexDBDataBlocks", 0, "Overrides max size for indexdb/dataBlocks cache. "+
|
||||
"See https://docs.victoriametrics.com/single-server-victoriametrics/#cache-tuning")
|
||||
cacheSizeIndexDBDataBlocksSparse = flagutil.NewBytes("storage.cacheSizeIndexDBDataBlocksSparse", 0, "Overrides max size for indexdb/dataBlocksSparse cache. "+
|
||||
"See https://docs.victoriametrics.com/single-server-victoriametrics/#cache-tuning")
|
||||
cacheSizeIndexDBTagFilters = flagutil.NewBytes("storage.cacheSizeIndexDBTagFilters", 0, "Overrides max size for indexdb/tagFiltersToMetricIDs cache. "+
|
||||
"See https://docs.victoriametrics.com/single-server-victoriametrics/#cache-tuning")
|
||||
)
|
||||
@@ -100,6 +102,7 @@ func Init(resetCacheIfNeeded func(mrs []storage.MetricRow)) {
|
||||
storage.SetTagFiltersCacheSize(cacheSizeIndexDBTagFilters.IntN())
|
||||
mergeset.SetIndexBlocksCacheSize(cacheSizeIndexDBIndexBlocks.IntN())
|
||||
mergeset.SetDataBlocksCacheSize(cacheSizeIndexDBDataBlocks.IntN())
|
||||
mergeset.SetDataBlocksSparseCacheSize(cacheSizeIndexDBDataBlocksSparse.IntN())
|
||||
|
||||
if retentionPeriod.Duration() < 24*time.Hour {
|
||||
logger.Fatalf("-retentionPeriod cannot be smaller than a day; got %s", retentionPeriod)
|
||||
@@ -581,6 +584,7 @@ func writeStorageMetrics(w io.Writer, strg *storage.Storage) {
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_entries{type="storage/next_day_metric_ids"}`, m.NextDayMetricIDCacheSize)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_entries{type="storage/indexBlocks"}`, tm.IndexBlocksCacheSize)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_entries{type="indexdb/dataBlocks"}`, idbm.DataBlocksCacheSize)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_entries{type="indexdb/dataBlocksSparse"}`, idbm.DataBlocksSparseCacheSize)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_entries{type="indexdb/indexBlocks"}`, idbm.IndexBlocksCacheSize)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_entries{type="indexdb/tagFiltersToMetricIDs"}`, idbm.TagFiltersToMetricIDsCacheSize)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_entries{type="storage/regexps"}`, uint64(storage.RegexpCacheSize()))
|
||||
@@ -592,6 +596,7 @@ func writeStorageMetrics(w io.Writer, strg *storage.Storage) {
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_bytes{type="storage/metricName"}`, m.MetricNameCacheSizeBytes)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_bytes{type="storage/indexBlocks"}`, tm.IndexBlocksCacheSizeBytes)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_bytes{type="indexdb/dataBlocks"}`, idbm.DataBlocksCacheSizeBytes)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_bytes{type="indexdb/dataBlocksSparse"}`, idbm.DataBlocksSparseCacheSizeBytes)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_bytes{type="indexdb/indexBlocks"}`, idbm.IndexBlocksCacheSizeBytes)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_bytes{type="storage/date_metricID"}`, m.DateMetricIDCacheSizeBytes)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_bytes{type="storage/hour_metric_ids"}`, m.HourMetricIDCacheSizeBytes)
|
||||
@@ -606,6 +611,7 @@ func writeStorageMetrics(w io.Writer, strg *storage.Storage) {
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_max_bytes{type="storage/metricName"}`, m.MetricNameCacheSizeMaxBytes)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_max_bytes{type="storage/indexBlocks"}`, tm.IndexBlocksCacheSizeMaxBytes)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_max_bytes{type="indexdb/dataBlocks"}`, idbm.DataBlocksCacheSizeMaxBytes)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_max_bytes{type="indexdb/dataBlocksSparse"}`, idbm.DataBlocksSparseCacheSizeMaxBytes)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_max_bytes{type="indexdb/indexBlocks"}`, idbm.IndexBlocksCacheSizeMaxBytes)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_max_bytes{type="indexdb/tagFiltersToMetricIDs"}`, idbm.TagFiltersToMetricIDsCacheSizeMaxBytes)
|
||||
metrics.WriteGaugeUint64(w, `vm_cache_size_max_bytes{type="storage/regexps"}`, uint64(storage.RegexpCacheMaxSizeBytes()))
|
||||
@@ -616,6 +622,7 @@ func writeStorageMetrics(w io.Writer, strg *storage.Storage) {
|
||||
metrics.WriteCounterUint64(w, `vm_cache_requests_total{type="storage/metricName"}`, m.MetricNameCacheRequests)
|
||||
metrics.WriteCounterUint64(w, `vm_cache_requests_total{type="storage/indexBlocks"}`, tm.IndexBlocksCacheRequests)
|
||||
metrics.WriteCounterUint64(w, `vm_cache_requests_total{type="indexdb/dataBlocks"}`, idbm.DataBlocksCacheRequests)
|
||||
metrics.WriteCounterUint64(w, `vm_cache_requests_total{type="indexdb/dataBlocksSparse"}`, idbm.DataBlocksSparseCacheRequests)
|
||||
metrics.WriteCounterUint64(w, `vm_cache_requests_total{type="indexdb/indexBlocks"}`, idbm.IndexBlocksCacheRequests)
|
||||
metrics.WriteCounterUint64(w, `vm_cache_requests_total{type="indexdb/tagFiltersToMetricIDs"}`, idbm.TagFiltersToMetricIDsCacheRequests)
|
||||
metrics.WriteCounterUint64(w, `vm_cache_requests_total{type="storage/regexps"}`, storage.RegexpCacheRequests())
|
||||
@@ -626,6 +633,7 @@ func writeStorageMetrics(w io.Writer, strg *storage.Storage) {
|
||||
metrics.WriteCounterUint64(w, `vm_cache_misses_total{type="storage/metricName"}`, m.MetricNameCacheMisses)
|
||||
metrics.WriteCounterUint64(w, `vm_cache_misses_total{type="storage/indexBlocks"}`, tm.IndexBlocksCacheMisses)
|
||||
metrics.WriteCounterUint64(w, `vm_cache_misses_total{type="indexdb/dataBlocks"}`, idbm.DataBlocksCacheMisses)
|
||||
metrics.WriteCounterUint64(w, `vm_cache_misses_total{type="indexdb/dataBlocksSparse"}`, idbm.DataBlocksSparseCacheMisses)
|
||||
metrics.WriteCounterUint64(w, `vm_cache_misses_total{type="indexdb/indexBlocks"}`, idbm.IndexBlocksCacheMisses)
|
||||
metrics.WriteCounterUint64(w, `vm_cache_misses_total{type="indexdb/tagFiltersToMetricIDs"}`, idbm.TagFiltersToMetricIDsCacheMisses)
|
||||
metrics.WriteCounterUint64(w, `vm_cache_misses_total{type="storage/regexps"}`, storage.RegexpCacheMisses())
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM golang:1.23.4 AS build-web-stage
|
||||
FROM golang:1.23.5 AS build-web-stage
|
||||
COPY build /build
|
||||
|
||||
WORKDIR /build
|
||||
@@ -6,7 +6,7 @@ COPY web/ /build/
|
||||
RUN GOOS=linux GOARCH=amd64 CGO_ENABLED=0 go build -o web-amd64 github.com/VictoriMetrics/vmui/ && \
|
||||
GOOS=windows GOARCH=amd64 CGO_ENABLED=0 go build -o web-windows github.com/VictoriMetrics/vmui/
|
||||
|
||||
FROM alpine:3.21.0
|
||||
FROM alpine:3.21.2
|
||||
USER root
|
||||
|
||||
COPY --from=build-web-stage /build/web-amd64 /app/web
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import uPlot from "uplot";
|
||||
import { ReactNode } from "react";
|
||||
|
||||
export interface MetricBase {
|
||||
group: number;
|
||||
metric: {
|
||||
@@ -6,13 +9,13 @@ export interface MetricBase {
|
||||
}
|
||||
|
||||
export interface MetricResult extends MetricBase {
|
||||
values: [number, string][]
|
||||
values: [number, string][];
|
||||
}
|
||||
|
||||
|
||||
export interface InstantMetricResult extends MetricBase {
|
||||
value?: [number, string]
|
||||
values?: [number, string][]
|
||||
value?: [number, string];
|
||||
values?: [number, string][];
|
||||
}
|
||||
|
||||
export interface ExportMetricResult extends MetricBase {
|
||||
@@ -44,9 +47,23 @@ export interface LogHits {
|
||||
timestamps: string[];
|
||||
values: number[];
|
||||
total?: number;
|
||||
fields: {
|
||||
[key: string]: string;
|
||||
};
|
||||
fields: { [key: string]: string; };
|
||||
_isOther: boolean;
|
||||
}
|
||||
|
||||
export interface LegendLogHits {
|
||||
label: string;
|
||||
total: number;
|
||||
totalHits: number;
|
||||
isOther: boolean;
|
||||
fields: { [key: string]: string; };
|
||||
stroke?: uPlot.Series.Stroke;
|
||||
}
|
||||
|
||||
export interface LegendLogHitsMenu {
|
||||
title: string;
|
||||
icon?: ReactNode;
|
||||
handler?: () => void;
|
||||
}
|
||||
|
||||
export interface ReportMetaData {
|
||||
@@ -56,3 +73,8 @@ export interface ReportMetaData {
|
||||
comment: string;
|
||||
params: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface LogsFiledValues {
|
||||
value: string;
|
||||
hits: number;
|
||||
}
|
||||
|
||||
@@ -1,22 +1,23 @@
|
||||
import React, { FC, useMemo, useRef, useState } from "preact/compat";
|
||||
import React, { FC, useCallback, useMemo, useRef, useState } from "preact/compat";
|
||||
import "./style.scss";
|
||||
import "uplot/dist/uPlot.min.css";
|
||||
import useElementSize from "../../../hooks/useElementSize";
|
||||
import uPlot, { AlignedData } from "uplot";
|
||||
import { useEffect } from "react";
|
||||
import useBarHitsOptions from "./hooks/useBarHitsOptions";
|
||||
import useBarHitsOptions, { getLabelFromLogHit } from "./hooks/useBarHitsOptions";
|
||||
import BarHitsTooltip from "./BarHitsTooltip/BarHitsTooltip";
|
||||
import { TimeParams } from "../../../types";
|
||||
import usePlotScale from "../../../hooks/uplot/usePlotScale";
|
||||
import useReadyChart from "../../../hooks/uplot/useReadyChart";
|
||||
import useZoomChart from "../../../hooks/uplot/useZoomChart";
|
||||
import classNames from "classnames";
|
||||
import { LogHits } from "../../../api/types";
|
||||
import { LegendLogHits, LogHits } from "../../../api/types";
|
||||
import { addSeries, delSeries, setBand } from "../../../utils/uplot";
|
||||
import { GraphOptions, GRAPH_STYLES } from "./types";
|
||||
import BarHitsOptions from "./BarHitsOptions/BarHitsOptions";
|
||||
import stack from "../../../utils/uplot/stack";
|
||||
import BarHitsLegend from "./BarHitsLegend/BarHitsLegend";
|
||||
import { calculateTotalHits, sortLogHits } from "../../../utils/logs";
|
||||
|
||||
interface Props {
|
||||
logHits: LogHits[];
|
||||
@@ -57,6 +58,29 @@ const BarHitsChart: FC<Props> = ({ logHits, data: _data, period, setPeriod, onAp
|
||||
graphOptions
|
||||
});
|
||||
|
||||
const prepareLegend = useCallback((hits: LogHits[], totalHits: number): LegendLogHits[] => {
|
||||
return hits.map((hit) => {
|
||||
const label = getLabelFromLogHit(hit);
|
||||
|
||||
const legendItem: LegendLogHits = {
|
||||
label,
|
||||
isOther: hit._isOther,
|
||||
fields: hit.fields,
|
||||
total: hit.total || 0,
|
||||
totalHits,
|
||||
stroke: series.find((s) => s.label === label)?.stroke,
|
||||
};
|
||||
|
||||
return legendItem;
|
||||
}).sort(sortLogHits("total"));
|
||||
}, [series]);
|
||||
|
||||
|
||||
const legendDetails: LegendLogHits[] = useMemo(() => {
|
||||
const totalHits = calculateTotalHits(logHits);
|
||||
return prepareLegend(logHits, totalHits);
|
||||
}, [logHits, prepareLegend]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!uPlotInst) return;
|
||||
delSeries(uPlotInst);
|
||||
@@ -121,6 +145,7 @@ const BarHitsChart: FC<Props> = ({ logHits, data: _data, period, setPeriod, onAp
|
||||
<BarHitsLegend
|
||||
uPlotInst={uPlotInst}
|
||||
onApplyFilter={onApplyFilter}
|
||||
legendDetails={legendDetails}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -1,83 +1,53 @@
|
||||
import React, { FC, useCallback, useEffect, useState } from "preact/compat";
|
||||
import React, { FC, useEffect, useState } from "preact/compat";
|
||||
import uPlot, { Series } from "uplot";
|
||||
import "./style.scss";
|
||||
import "../../Line/Legend/style.scss";
|
||||
import classNames from "classnames";
|
||||
import { MouseEvent } from "react";
|
||||
import { isMacOs } from "../../../../utils/detect-device";
|
||||
import Tooltip from "../../../Main/Tooltip/Tooltip";
|
||||
import { getStreamPairs } from "../../../../utils/logs";
|
||||
import BarHitsLegendItem from "./BarHitsLegendItem";
|
||||
import { LegendLogHits } from "../../../../api/types";
|
||||
|
||||
interface Props {
|
||||
uPlotInst: uPlot;
|
||||
legendDetails: LegendLogHits[];
|
||||
onApplyFilter: (value: string) => void;
|
||||
}
|
||||
|
||||
const BarHitsLegend: FC<Props> = ({ uPlotInst, onApplyFilter }) => {
|
||||
const BarHitsLegend: FC<Props> = ({ uPlotInst, legendDetails, onApplyFilter }) => {
|
||||
const [series, setSeries] = useState<Series[]>([]);
|
||||
const [pairs, setPairs] = useState<string[][]>([]);
|
||||
const totalHits = legendDetails[0]?.totalHits || 0;
|
||||
|
||||
const updateSeries = useCallback(() => {
|
||||
const series = uPlotInst.series.filter(s => s.scale !== "x");
|
||||
setSeries(series);
|
||||
setPairs(series.map(s => getStreamPairs(s.label || "")));
|
||||
const getSeries = () => {
|
||||
return uPlotInst.series.filter(s => s.scale !== "x");
|
||||
};
|
||||
|
||||
const handleRedrawGraph = () => {
|
||||
uPlotInst.redraw();
|
||||
setSeries(getSeries());
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
setSeries(getSeries());
|
||||
}, [uPlotInst]);
|
||||
|
||||
const handleClickByValue = (value: string) => (e: MouseEvent<HTMLDivElement>) => {
|
||||
const metaKey = e.metaKey || e.ctrlKey;
|
||||
if (!metaKey) return;
|
||||
onApplyFilter(`{${value}}` || "");
|
||||
updateSeries();
|
||||
uPlotInst.redraw();
|
||||
};
|
||||
|
||||
const handleClickByStream = (target: Series) => (e: MouseEvent<HTMLDivElement>) => {
|
||||
const metaKey = e.metaKey || e.ctrlKey;
|
||||
if (metaKey) return;
|
||||
target.show = !target.show;
|
||||
updateSeries();
|
||||
uPlotInst.redraw();
|
||||
};
|
||||
|
||||
useEffect(updateSeries, [uPlotInst]);
|
||||
|
||||
return (
|
||||
<div className="vm-bar-hits-legend">
|
||||
{series.map((s, i) => (
|
||||
<Tooltip
|
||||
key={s.label}
|
||||
title={(
|
||||
<ul className="vm-bar-hits-legend-info">
|
||||
<li>Click to {s.show ? "hide" : "show"} the _stream.</li>
|
||||
<li>{isMacOs() ? "Cmd" : "Ctrl"} + Click to filter by the _stream.</li>
|
||||
</ul>
|
||||
)}
|
||||
>
|
||||
<div
|
||||
className={classNames({
|
||||
"vm-bar-hits-legend-item": true,
|
||||
"vm-bar-hits-legend-item_hide": !s.show,
|
||||
})}
|
||||
onClick={handleClickByStream(s)}
|
||||
>
|
||||
<div
|
||||
className="vm-bar-hits-legend-item__marker"
|
||||
style={{ backgroundColor: `${(s?.stroke as () => string)?.()}` }}
|
||||
/>
|
||||
<div className="vm-bar-hits-legend-item-pairs">
|
||||
{pairs[i].map(value => (
|
||||
<span
|
||||
className="vm-bar-hits-legend-item-pairs__value"
|
||||
key={value}
|
||||
onClick={handleClickByValue(value)}
|
||||
>
|
||||
{value}
|
||||
</span>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</Tooltip>
|
||||
{legendDetails.map((legend) => (
|
||||
<BarHitsLegendItem
|
||||
key={legend.label}
|
||||
legend={legend}
|
||||
series={series}
|
||||
onRedrawGraph={handleRedrawGraph}
|
||||
onApplyFilter={onApplyFilter}
|
||||
/>
|
||||
))}
|
||||
<div className="vm-bar-hits-legend-info">
|
||||
<div>
|
||||
Total hits: <b>{totalHits.toLocaleString("en-US")}</b>
|
||||
</div>
|
||||
<div>
|
||||
<code>L-Click</code> toggles visibility.
|
||||
<code>R-Click</code> opens menu.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
import React, { FC, useMemo, useRef, useState } from "preact/compat";
|
||||
import classNames from "classnames";
|
||||
import { Series } from "uplot";
|
||||
import { MouseEvent } from "react";
|
||||
import { LegendLogHits } from "../../../../api/types";
|
||||
import { getStreamPairs } from "../../../../utils/logs";
|
||||
import { formatNumberShort } from "../../../../utils/math";
|
||||
import Popper from "../../../Main/Popper/Popper";
|
||||
import useBoolean from "../../../../hooks/useBoolean";
|
||||
import LegendHitsMenu from "../LegendHitsMenu/LegendHitsMenu";
|
||||
|
||||
interface Props {
|
||||
legend: LegendLogHits;
|
||||
series: Series[];
|
||||
onRedrawGraph: () => void;
|
||||
onApplyFilter: (value: string) => void;
|
||||
}
|
||||
|
||||
const BarHitsLegendItem: FC<Props> = ({ legend, series, onRedrawGraph, onApplyFilter }) => {
|
||||
const {
|
||||
value: openContextMenu,
|
||||
setTrue: handleOpenContextMenu,
|
||||
setFalse: handleCloseContextMenu,
|
||||
} = useBoolean(false);
|
||||
|
||||
const legendRef = useRef<HTMLDivElement>(null);
|
||||
const [clickPosition, setClickPosition] = useState<{ top: number; left: number } | null>(null);
|
||||
|
||||
const targetSeries = useMemo(() => series.find(s => s.label === legend.label), [series]);
|
||||
|
||||
const fields = useMemo(() => getStreamPairs(legend.label), [legend.label]);
|
||||
|
||||
const label = fields.join(", ");
|
||||
const totalShortFormatted = formatNumberShort(legend.total);
|
||||
|
||||
const handleClickByStream = (e: MouseEvent<HTMLDivElement>) => {
|
||||
if (!targetSeries) return;
|
||||
|
||||
if (e.metaKey || e.ctrlKey) {
|
||||
targetSeries.show = !targetSeries.show;
|
||||
} else {
|
||||
const isOnlyTargetVisible = series.every(s => s === targetSeries || !s.show);
|
||||
series.forEach(s => {
|
||||
s.show = isOnlyTargetVisible || (s === targetSeries);
|
||||
});
|
||||
}
|
||||
|
||||
onRedrawGraph();
|
||||
};
|
||||
|
||||
const handleContextMenu = (e: MouseEvent<HTMLDivElement>) => {
|
||||
e.preventDefault();
|
||||
setClickPosition({ top: e.clientY, left: e.clientX });
|
||||
handleOpenContextMenu();
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={legendRef}
|
||||
className={classNames({
|
||||
"vm-bar-hits-legend-item": true,
|
||||
"vm-bar-hits-legend-item_other": legend.isOther,
|
||||
"vm-bar-hits-legend-item_hide": !targetSeries?.show,
|
||||
})}
|
||||
onClick={handleClickByStream}
|
||||
onContextMenu={handleContextMenu}
|
||||
>
|
||||
<div
|
||||
className="vm-bar-hits-legend-item__marker"
|
||||
style={{ backgroundColor: `${legend.stroke}` }}
|
||||
/>
|
||||
<div className="vm-bar-hits-legend-item__label">{label}</div>
|
||||
<span className="vm-bar-hits-legend-item__total">({totalShortFormatted})</span>
|
||||
<Popper
|
||||
placement="fixed"
|
||||
open={openContextMenu}
|
||||
buttonRef={legendRef}
|
||||
placementPosition={clickPosition}
|
||||
onClose={handleCloseContextMenu}
|
||||
>
|
||||
<LegendHitsMenu
|
||||
legend={legend}
|
||||
fields={fields}
|
||||
onApplyFilter={onApplyFilter}
|
||||
onClose={handleCloseContextMenu}
|
||||
/>
|
||||
</Popper>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default BarHitsLegendItem;
|
||||
@@ -3,16 +3,16 @@
|
||||
.vm-bar-hits-legend {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: $padding-small;
|
||||
padding: 0 $padding-small $padding-small;
|
||||
color: $color-text;
|
||||
|
||||
&-item {
|
||||
display: grid;
|
||||
grid-template-columns: auto 1fr;
|
||||
max-width: 50%;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: $padding-small;
|
||||
font-size: 12px;
|
||||
padding: 0 $padding-small;
|
||||
font-size: $font-size-small;
|
||||
padding: $padding-small $padding-global;
|
||||
border-radius: $border-radius-small;
|
||||
cursor: pointer;
|
||||
transition: 0.2s;
|
||||
@@ -27,34 +27,44 @@
|
||||
}
|
||||
|
||||
&__marker {
|
||||
width: 14px;
|
||||
min-width: 14px;
|
||||
max-width: 14px;
|
||||
height: 14px;
|
||||
border: $color-background-block;
|
||||
}
|
||||
|
||||
&-pairs {
|
||||
display: flex;
|
||||
gap: $padding-small;
|
||||
&__label {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
&__value {
|
||||
padding: $padding-small 0;
|
||||
|
||||
&:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
&:after {
|
||||
content: ",";
|
||||
}
|
||||
|
||||
&:last-child:after {
|
||||
content: "";
|
||||
}
|
||||
}
|
||||
&__total {
|
||||
color: $color-text-secondary;
|
||||
font-style: italic;
|
||||
grid-column: 2;
|
||||
}
|
||||
}
|
||||
|
||||
&-info {
|
||||
list-style-position: inside;
|
||||
flex-grow: 1;
|
||||
width: 100%;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding-top: $padding-small;
|
||||
color: $color-text-secondary;
|
||||
font-size: $font-size-small;
|
||||
|
||||
code {
|
||||
display: inline-block;
|
||||
padding: calc($padding-small / 2) $padding-small;
|
||||
font-size: $font-size-small;
|
||||
text-align: center;
|
||||
background-color: $color-background-body;
|
||||
background-repeat: repeat-x;
|
||||
border: $border-divider;
|
||||
border-radius: 4px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import "./style.scss";
|
||||
import useStateSearchParams from "../../../../hooks/useStateSearchParams";
|
||||
import { useSearchParams } from "react-router-dom";
|
||||
import Button from "../../../Main/Button/Button";
|
||||
import classNames from "classnames";
|
||||
import { SettingsIcon, VisibilityIcon, VisibilityOffIcon } from "../../../Main/Icons";
|
||||
import Tooltip from "../../../Main/Tooltip/Tooltip";
|
||||
import Popper from "../../../Main/Popper/Popper";
|
||||
@@ -24,27 +23,20 @@ const BarHitsOptions: FC<Props> = ({ onChange }) => {
|
||||
setFalse: handleCloseOptions,
|
||||
} = useBoolean(false);
|
||||
|
||||
const [graphStyle, setGraphStyle] = useStateSearchParams(GRAPH_STYLES.LINE_STEPPED, "graph");
|
||||
const [stacked, setStacked] = useStateSearchParams(false, "stacked");
|
||||
const [fill, setFill] = useStateSearchParams(false, "fill");
|
||||
const [fill, setFill] = useStateSearchParams("true", "fill");
|
||||
const [hideChart, setHideChart] = useStateSearchParams(false, "hide_chart");
|
||||
|
||||
const options: GraphOptions = useMemo(() => ({
|
||||
graphStyle,
|
||||
graphStyle: GRAPH_STYLES.BAR,
|
||||
stacked,
|
||||
fill,
|
||||
fill: fill === "true",
|
||||
hideChart,
|
||||
}), [graphStyle, stacked, fill, hideChart]);
|
||||
|
||||
const handleChangeGraphStyle = (val: string) => () => {
|
||||
setGraphStyle(val as GRAPH_STYLES);
|
||||
searchParams.set("graph", val);
|
||||
setSearchParams(searchParams);
|
||||
};
|
||||
}), [stacked, fill, hideChart]);
|
||||
|
||||
const handleChangeFill = (val: boolean) => {
|
||||
setFill(val);
|
||||
val ? searchParams.set("fill", "true") : searchParams.delete("fill");
|
||||
setFill(`${val}`);
|
||||
searchParams.set("fill", `${val}`);
|
||||
setSearchParams(searchParams);
|
||||
};
|
||||
|
||||
@@ -97,21 +89,6 @@ const BarHitsOptions: FC<Props> = ({ onChange }) => {
|
||||
title={"Graph settings"}
|
||||
>
|
||||
<div className="vm-bar-hits-options-settings">
|
||||
<div className="vm-bar-hits-options-settings-item vm-bar-hits-options-settings-item_list">
|
||||
<p className="vm-bar-hits-options-settings-item__title">Graph style:</p>
|
||||
{Object.values(GRAPH_STYLES).map(style => (
|
||||
<div
|
||||
key={style}
|
||||
className={classNames({
|
||||
"vm-list-item": true,
|
||||
"vm-list-item_active": graphStyle === style,
|
||||
})}
|
||||
onClick={handleChangeGraphStyle(style)}
|
||||
>
|
||||
{style}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
<div className="vm-bar-hits-options-settings-item">
|
||||
<Switch
|
||||
label={"Stacked"}
|
||||
@@ -122,7 +99,7 @@ const BarHitsOptions: FC<Props> = ({ onChange }) => {
|
||||
<div className="vm-bar-hits-options-settings-item">
|
||||
<Switch
|
||||
label={"Fill"}
|
||||
value={fill}
|
||||
value={fill === "true"}
|
||||
onChange={handleChangeFill}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -11,12 +11,12 @@
|
||||
&-settings {
|
||||
display: grid;
|
||||
align-items: flex-start;
|
||||
gap: $padding-global;
|
||||
min-width: 200px;
|
||||
gap: $padding-global;
|
||||
padding-bottom: $padding-global;
|
||||
|
||||
&-item {
|
||||
border-bottom: $border-divider;
|
||||
padding: 0 $padding-global $padding-global;
|
||||
padding: 0 $padding-global;
|
||||
|
||||
&_list {
|
||||
padding: 0;
|
||||
|
||||
@@ -5,6 +5,7 @@ import { DATE_TIME_FORMAT } from "../../../../constants/date";
|
||||
import classNames from "classnames";
|
||||
import "./style.scss";
|
||||
import "../../ChartTooltip/style.scss";
|
||||
import { sortLogHits } from "../../../../utils/logs";
|
||||
|
||||
interface Props {
|
||||
data: AlignedData;
|
||||
@@ -26,7 +27,7 @@ const BarHitsTooltip: FC<Props> = ({ data, focusDataIdx, uPlotInst }) => {
|
||||
const tooltipItems = values.map((value, i) => {
|
||||
const targetSeries = series[i + 1];
|
||||
const stroke = (targetSeries?.stroke as () => string)?.();
|
||||
const label = targetSeries?.label || "other";
|
||||
const label = targetSeries?.label;
|
||||
const show = targetSeries?.show;
|
||||
return {
|
||||
label,
|
||||
@@ -34,7 +35,7 @@ const BarHitsTooltip: FC<Props> = ({ data, focusDataIdx, uPlotInst }) => {
|
||||
value,
|
||||
show
|
||||
};
|
||||
}).filter(item => item.value > 0 && item.show).sort((a, b) => b.value - a.value);
|
||||
}).filter(item => item.value > 0 && item.show).sort(sortLogHits("value"));
|
||||
|
||||
const point = {
|
||||
top: tooltipItems[0] ? uPlotInst?.valToPos?.(tooltipItems[0].value, "y") || 0 : 0,
|
||||
@@ -104,16 +105,19 @@ const BarHitsTooltip: FC<Props> = ({ data, focusDataIdx, uPlotInst }) => {
|
||||
className="vm-chart-tooltip-data__marker"
|
||||
style={{ background: item.stroke }}
|
||||
/>
|
||||
<p>
|
||||
{item.label}: <b>{item.value}</b>
|
||||
<p className="vm-bar-hits-tooltip-item">
|
||||
<span className="vm-bar-hits-tooltip-item__label">{item.label}</span>
|
||||
<span>{item.value.toLocaleString("en-US")}</span>
|
||||
</p>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
{tooltipData.values.length > 1 && (
|
||||
<div className="vm-chart-tooltip-data">
|
||||
<p>
|
||||
Total records: <b>{tooltipData.total}</b>
|
||||
<span/>
|
||||
<p className="vm-bar-hits-tooltip-item">
|
||||
<span className="vm-bar-hits-tooltip-item__label">Total</span>
|
||||
<span>{tooltipData.total.toLocaleString("en-US")}</span>
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -9,4 +9,19 @@
|
||||
opacity: 1;
|
||||
pointer-events: auto;
|
||||
}
|
||||
|
||||
&-item {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr auto;
|
||||
align-items: center;
|
||||
gap: $padding-global;
|
||||
max-width: 100%;
|
||||
|
||||
&__label {
|
||||
display: inline-block;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,50 @@
|
||||
import React, { FC } from "preact/compat";
|
||||
import "./style.scss";
|
||||
import { LegendLogHits } from "../../../../api/types";
|
||||
import LegendHitsMenuStats from "./LegendHitsMenuStats";
|
||||
import LegendHitsMenuBase from "./LegendHitsMenuBase";
|
||||
import LegendHitsMenuRow from "./LegendHitsMenuRow";
|
||||
import LegendHitsMenuFields from "./LegendHitsMenuFields";
|
||||
import { LOGS_LIMIT_HITS } from "../../../../constants/logs";
|
||||
|
||||
const otherDescription = `aggregated results for fields not in the top ${LOGS_LIMIT_HITS}`;
|
||||
|
||||
interface Props {
|
||||
legend: LegendLogHits;
|
||||
fields: string[];
|
||||
onApplyFilter: (value: string) => void;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
const LegendHitsMenu: FC<Props> = ({ legend, fields, onApplyFilter, onClose }) => {
|
||||
return (
|
||||
<div className="vm-legend-hits-menu">
|
||||
<div className="vm-legend-hits-menu-section">
|
||||
<LegendHitsMenuRow
|
||||
className="vm-legend-hits-menu-row_info"
|
||||
title={legend.isOther ? otherDescription : legend.label}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{!legend.isOther && (
|
||||
<LegendHitsMenuBase
|
||||
legend={legend}
|
||||
onApplyFilter={onApplyFilter}
|
||||
onClose={onClose}
|
||||
/>
|
||||
)}
|
||||
|
||||
{!legend.isOther && (
|
||||
<LegendHitsMenuFields
|
||||
fields={fields}
|
||||
onApplyFilter={onApplyFilter}
|
||||
onClose={onClose}
|
||||
/>
|
||||
)}
|
||||
|
||||
<LegendHitsMenuStats legend={legend}/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default LegendHitsMenu;
|
||||
@@ -0,0 +1,64 @@
|
||||
import React, { FC } from "preact/compat";
|
||||
import LegendHitsMenuRow from "./LegendHitsMenuRow";
|
||||
import useCopyToClipboard from "../../../../hooks/useCopyToClipboard";
|
||||
import { CopyIcon, FilterIcon, FilterOffIcon } from "../../../Main/Icons";
|
||||
import { LegendLogHits, LegendLogHitsMenu } from "../../../../api/types";
|
||||
import { LOGS_GROUP_BY } from "../../../../constants/logs";
|
||||
|
||||
interface Props {
|
||||
legend: LegendLogHits;
|
||||
onApplyFilter: (value: string) => void;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
const LegendHitsMenuBase: FC<Props> = ({ legend, onApplyFilter, onClose }) => {
|
||||
const copyToClipboard = useCopyToClipboard();
|
||||
|
||||
const handleAddStreamToFilter = () => {
|
||||
onApplyFilter(`${LOGS_GROUP_BY}: ${legend.label}`);
|
||||
onClose();
|
||||
};
|
||||
|
||||
const handleExcludeStreamToFilter = () => {
|
||||
onApplyFilter(`(NOT ${LOGS_GROUP_BY}: ${legend.label})`);
|
||||
onClose();
|
||||
};
|
||||
|
||||
const handlerCopyLabel = async () => {
|
||||
await copyToClipboard(legend.label, `${legend.label} has been copied`);
|
||||
onClose();
|
||||
};
|
||||
|
||||
const options: LegendLogHitsMenu[] = [
|
||||
{
|
||||
title: `Copy ${LOGS_GROUP_BY} name`,
|
||||
icon: <CopyIcon/>,
|
||||
handler: handlerCopyLabel,
|
||||
},
|
||||
{
|
||||
title: `Add ${LOGS_GROUP_BY} to filter`,
|
||||
icon: <FilterIcon/>,
|
||||
handler: handleAddStreamToFilter,
|
||||
},
|
||||
{
|
||||
title: `Exclude ${LOGS_GROUP_BY} to filter`,
|
||||
icon: <FilterOffIcon/>,
|
||||
handler: handleExcludeStreamToFilter,
|
||||
}
|
||||
];
|
||||
|
||||
return (
|
||||
<div className="vm-legend-hits-menu-section">
|
||||
{options.map(({ icon, title, handler }) => (
|
||||
<LegendHitsMenuRow
|
||||
key={title}
|
||||
iconStart={icon}
|
||||
title={title}
|
||||
handler={handler}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default LegendHitsMenuBase;
|
||||
@@ -0,0 +1,74 @@
|
||||
import React, { FC, useMemo } from "preact/compat";
|
||||
import LegendHitsMenuRow from "./LegendHitsMenuRow";
|
||||
import { CopyIcon, FilterIcon, FilterOffIcon } from "../../../Main/Icons";
|
||||
import { convertToFieldFilter } from "../../../../utils/logs";
|
||||
import { LegendLogHitsMenu } from "../../../../api/types";
|
||||
import useCopyToClipboard from "../../../../hooks/useCopyToClipboard";
|
||||
|
||||
interface Props {
|
||||
fields: string[];
|
||||
onApplyFilter: (value: string) => void;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
const LegendHitsMenuFields: FC<Props> = ({ fields, onApplyFilter, onClose }) => {
|
||||
const copyToClipboard = useCopyToClipboard();
|
||||
|
||||
const handleCopy = (field: string) => async () => {
|
||||
await copyToClipboard(field, `${field} has been copied`);
|
||||
onClose();
|
||||
};
|
||||
|
||||
const handleAddToFilter = (field: string) => () => {
|
||||
onApplyFilter(field);
|
||||
onClose();
|
||||
};
|
||||
|
||||
const handleExcludeToFilter = (field: string) => () => {
|
||||
onApplyFilter(`-${field}`);
|
||||
onClose();
|
||||
};
|
||||
|
||||
const generateFieldMenu = (field: string): LegendLogHitsMenu[] => {
|
||||
return [
|
||||
{
|
||||
title: "Copy",
|
||||
icon: <CopyIcon/>,
|
||||
handler: handleCopy(field),
|
||||
},
|
||||
{
|
||||
title: "Add to filter",
|
||||
icon: <FilterIcon/>,
|
||||
handler: handleAddToFilter(field),
|
||||
},
|
||||
{
|
||||
title: "Exclude to filter",
|
||||
icon: <FilterOffIcon/>,
|
||||
handler: handleExcludeToFilter(field),
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
const fieldsWithMenu: LegendLogHitsMenu[] = useMemo(() => {
|
||||
return fields.map(field => {
|
||||
const title = convertToFieldFilter(field);
|
||||
return {
|
||||
title,
|
||||
submenu: generateFieldMenu(title),
|
||||
};
|
||||
});
|
||||
}, [fields]);
|
||||
|
||||
return (
|
||||
<div className="vm-legend-hits-menu-section">
|
||||
{fieldsWithMenu?.map((field) => (
|
||||
<LegendHitsMenuRow
|
||||
key={field.title}
|
||||
{...field}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default LegendHitsMenuFields;
|
||||
@@ -0,0 +1,116 @@
|
||||
import React, { FC, useRef, useState } from "preact/compat";
|
||||
import classNames from "classnames";
|
||||
import { ReactNode, useEffect } from "react";
|
||||
import Tooltip from "../../../Main/Tooltip/Tooltip";
|
||||
import { LegendLogHitsMenu } from "../../../../api/types";
|
||||
import { ArrowDropDownIcon } from "../../../Main/Icons";
|
||||
import useClickOutside from "../../../../hooks/useClickOutside";
|
||||
|
||||
interface Props {
|
||||
title: string | ReactNode;
|
||||
handler?: () => void;
|
||||
iconStart?: ReactNode;
|
||||
iconEnd?: ReactNode;
|
||||
className?: string;
|
||||
submenu?: LegendLogHitsMenu[];
|
||||
}
|
||||
|
||||
const LegendHitsMenuRow: FC<Props> = ({ title, handler, iconStart, iconEnd, className, submenu }) => {
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const titleRef = useRef<HTMLDivElement>(null);
|
||||
const submenuRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const [isOverflownTitle, setIsOverflownTitle] = useState(false);
|
||||
|
||||
const [openSubmenu, setOpenSubmenu] = useState(false);
|
||||
const [posSubmenuLeft, setPosSubmenuLeft] = useState(false);
|
||||
const hasSubmenu = !!submenu?.length;
|
||||
|
||||
const handleToggleContextMenu = () => {
|
||||
setOpenSubmenu(prev => !prev);
|
||||
};
|
||||
|
||||
const handleCloseContextMenu = () => {
|
||||
setOpenSubmenu(false);
|
||||
};
|
||||
|
||||
const handleClick = () => {
|
||||
handler && handler();
|
||||
hasSubmenu && handleToggleContextMenu();
|
||||
};
|
||||
|
||||
|
||||
useEffect(() => {
|
||||
if (!titleRef.current) return;
|
||||
setIsOverflownTitle(titleRef.current.scrollWidth > titleRef.current.clientWidth);
|
||||
}, [title, titleRef]);
|
||||
|
||||
useEffect(() => {
|
||||
requestAnimationFrame(() => {
|
||||
if (!openSubmenu || !submenuRef.current) {
|
||||
setPosSubmenuLeft(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const { left, width } = submenuRef.current.getBoundingClientRect();
|
||||
setPosSubmenuLeft(left + width > window.innerWidth);
|
||||
});
|
||||
}, [submenuRef, openSubmenu]);
|
||||
|
||||
useClickOutside(containerRef, handleCloseContextMenu);
|
||||
|
||||
const titleContent = (
|
||||
<div
|
||||
ref={titleRef}
|
||||
className="vm-legend-hits-menu-row__title"
|
||||
>
|
||||
{title}
|
||||
</div>
|
||||
);
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={containerRef}
|
||||
className={classNames({
|
||||
"vm-legend-hits-menu-row": true,
|
||||
"vm-legend-hits-menu-row_interactive": !!handler || hasSubmenu,
|
||||
[`${className}`]: className
|
||||
})}
|
||||
onClick={handleClick}
|
||||
>
|
||||
{iconStart && <div className="vm-legend-hits-menu-row__icon">{iconStart}</div>}
|
||||
{isOverflownTitle ? (<Tooltip title={title}>{titleContent}</Tooltip>) : titleContent}
|
||||
{iconEnd && !hasSubmenu && <div className="vm-legend-hits-menu-row__icon">{iconEnd}</div>}
|
||||
|
||||
{hasSubmenu && (
|
||||
<div className="vm-legend-hits-menu-row__icon vm-legend-hits-menu-row__icon_drop">
|
||||
<ArrowDropDownIcon/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{openSubmenu && submenu && (
|
||||
<div
|
||||
ref={submenuRef}
|
||||
className={classNames({
|
||||
"vm-legend-hits-menu": true,
|
||||
"vm-legend-hits-menu_submenu": true,
|
||||
"vm-legend-hits-menu_submenu_left": posSubmenuLeft
|
||||
})}
|
||||
>
|
||||
<div className="vm-legend-hits-menu-section">
|
||||
{submenu.map(({ icon, title, handler }) => (
|
||||
<LegendHitsMenuRow
|
||||
key={title}
|
||||
iconStart={icon}
|
||||
title={title}
|
||||
handler={handler}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default LegendHitsMenuRow;
|
||||
@@ -0,0 +1,23 @@
|
||||
import React, { FC } from "preact/compat";
|
||||
import { LegendLogHits } from "../../../../api/types";
|
||||
|
||||
interface Props {
|
||||
legend: LegendLogHits;
|
||||
}
|
||||
|
||||
const LegendHitsMenuStats: FC<Props> = ({ legend }) => {
|
||||
const totalFormatted = legend.total.toLocaleString("en-US");
|
||||
const percentage = Math.round((legend.total / legend.totalHits) * 100);
|
||||
|
||||
return (
|
||||
<div className="vm-legend-hits-menu-section">
|
||||
<div className="vm-legend-hits-menu-row">
|
||||
<div className="vm-legend-hits-menu-row__title">
|
||||
Total: {totalFormatted} ({percentage}%)
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default LegendHitsMenuStats;
|
||||
@@ -0,0 +1,178 @@
|
||||
@use "src/styles/variables" as *;
|
||||
|
||||
.vm-legend-hits-menu {
|
||||
min-width: 160px;
|
||||
z-index: 1;
|
||||
|
||||
&_submenu {
|
||||
position: absolute;
|
||||
top: calc(-1 * $padding-small);
|
||||
background-color: $color-background-block;
|
||||
left: calc(100% + ($padding-small / 2));
|
||||
box-shadow: $box-shadow-popper;
|
||||
border-radius: $border-radius-small;
|
||||
animation: vm-submenu-show 150ms cubic-bezier(0.280, 0.840, 0.2, 1);
|
||||
transform-origin: top left;
|
||||
|
||||
&_left {
|
||||
left: auto;
|
||||
right: calc(100% + ($padding-small / 2));
|
||||
transform-origin: top right;
|
||||
}
|
||||
}
|
||||
|
||||
&-section {
|
||||
border-bottom: $border-divider;
|
||||
|
||||
&:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
}
|
||||
|
||||
&-row {
|
||||
position: relative;
|
||||
display: flex;
|
||||
gap: $padding-small;
|
||||
align-items: center;
|
||||
justify-content: flex-start;
|
||||
padding: 0 $padding-global;
|
||||
transition: background-color 0.3s;
|
||||
color: $color-text;
|
||||
|
||||
&_interactive {
|
||||
cursor: pointer;
|
||||
|
||||
&:hover {
|
||||
background-color: rgba(0, 0, 0, 0.05);
|
||||
}
|
||||
}
|
||||
|
||||
&_info {
|
||||
font-size: $font-size-small;
|
||||
font-weight: 500;
|
||||
padding-block: $padding-small;
|
||||
}
|
||||
|
||||
&_info &__icon {
|
||||
color: $color-info;
|
||||
}
|
||||
|
||||
&__icon {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
|
||||
&_drop {
|
||||
transform: rotate(-90deg);
|
||||
}
|
||||
}
|
||||
|
||||
&__title {
|
||||
flex-grow: 1;
|
||||
padding: $padding-global 0;
|
||||
position: relative;
|
||||
max-width: 400px;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
}
|
||||
|
||||
&-other-list {
|
||||
width: 80vw;
|
||||
height: 80vh;
|
||||
overflow: auto;
|
||||
|
||||
&__search {
|
||||
position: sticky;
|
||||
top: 0;
|
||||
padding: $padding-small 0;
|
||||
background-color: $color-background-block;
|
||||
border-bottom: $border-divider;
|
||||
z-index: 2;
|
||||
}
|
||||
|
||||
&-row {
|
||||
border-bottom: $border-divider;
|
||||
|
||||
&_header {
|
||||
border-bottom: none;
|
||||
position: sticky;
|
||||
top: 65px;
|
||||
background-color: $color-background-block;
|
||||
z-index: 1;
|
||||
width: 100%;
|
||||
|
||||
&:after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 1px;
|
||||
border-bottom: $border-divider;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&-cell {
|
||||
padding: calc($padding-small / 2) 0;
|
||||
text-align: left;
|
||||
|
||||
&_header {
|
||||
padding: $padding-small;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
&_number {
|
||||
padding: $padding-small;
|
||||
text-align: right;
|
||||
font-variant-numeric: tabular-nums;
|
||||
}
|
||||
|
||||
&_fields {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
&-fields {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
align-items: center;
|
||||
justify-content: flex-start;
|
||||
|
||||
&__field {
|
||||
padding: calc($padding-small / 2) $padding-small;
|
||||
border-radius: $border-radius-small;
|
||||
transition: background-color 0.3s;
|
||||
|
||||
&:hover {
|
||||
background-color: $color-hover-black;
|
||||
}
|
||||
|
||||
&:not(:last-child) {
|
||||
&:after {
|
||||
content: ',';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&-actions {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes vm-submenu-show {
|
||||
0% {
|
||||
opacity: 0;
|
||||
}
|
||||
100% {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
@@ -36,6 +36,14 @@ interface UseGetBarHitsOptionsArgs {
|
||||
graphOptions: GraphOptions;
|
||||
}
|
||||
|
||||
export const OTHER_HITS_LABEL = "other";
|
||||
|
||||
export const getLabelFromLogHit = (logHit: LogHits) => {
|
||||
if (logHit?._isOther) return OTHER_HITS_LABEL;
|
||||
const fields = Object.values(logHit?.fields || {});
|
||||
return fields.map((value) => value || "\"\"").join(", ");
|
||||
};
|
||||
|
||||
const useBarHitsOptions = ({
|
||||
data,
|
||||
logHits,
|
||||
@@ -59,12 +67,12 @@ const useBarHitsOptions = ({
|
||||
let colorN = 0;
|
||||
return data.map((_d, i) => {
|
||||
if (i === 0) return {}; // 0 index is xAxis(timestamps)
|
||||
const fields = Object.values(logHits?.[i - 1]?.fields || {});
|
||||
const label = fields.map((value) => value || "\"\"").join(", ");
|
||||
const color = getCssVariable(label ? seriesColors[colorN] : "color-log-hits-bar-0");
|
||||
if (label) colorN++;
|
||||
const target = logHits?.[i - 1];
|
||||
const label = getLabelFromLogHit(target);
|
||||
const color = getCssVariable(target?._isOther ? "color-log-hits-bar-0" : seriesColors[colorN]);
|
||||
if (!target?._isOther) colorN++;
|
||||
return {
|
||||
label: label || "other",
|
||||
label,
|
||||
width: strokeWidth[graphOptions.graphStyle],
|
||||
spanGaps: true,
|
||||
stroke: color,
|
||||
|
||||
@@ -32,6 +32,11 @@ $chart-tooltip-y: -1 * ($padding-global + $chart-tooltip-half-icon);
|
||||
max-width: calc(100vw/3);
|
||||
}
|
||||
|
||||
&_hits &-data {
|
||||
display: grid;
|
||||
grid-template-columns: $font-size 1fr;
|
||||
}
|
||||
|
||||
&_sticky {
|
||||
pointer-events: auto;
|
||||
z-index: 99;
|
||||
@@ -90,6 +95,8 @@ $chart-tooltip-y: -1 * ($padding-global + $chart-tooltip-half-icon);
|
||||
}
|
||||
|
||||
&__marker {
|
||||
min-width: $font-size;
|
||||
max-width: $font-size;
|
||||
width: $font-size;
|
||||
height: $font-size;
|
||||
border: 1px solid rgba($color-white, 0.5);
|
||||
|
||||
@@ -0,0 +1,154 @@
|
||||
import React, { FC, useCallback, useEffect, useMemo, useState } from "preact/compat";
|
||||
import Autocomplete, { AutocompleteOptions } from "../../../Main/Autocomplete/Autocomplete";
|
||||
import { AUTOCOMPLETE_LIMITS } from "../../../../constants/queryAutocomplete";
|
||||
import { QueryEditorAutocompleteProps } from "../QueryEditor";
|
||||
import { getContextData, splitLogicalParts } from "./parser";
|
||||
import { ContextType, LogicalPart, LogicalPartType } from "./types";
|
||||
import { useFetchLogsQLOptions } from "./useFetchLogsQLOptions";
|
||||
import { pipeList } from "./pipes";
|
||||
|
||||
const LogsQueryEditorAutocomplete: FC<QueryEditorAutocompleteProps> = ({
|
||||
value,
|
||||
anchorEl,
|
||||
caretPosition,
|
||||
hasHelperText,
|
||||
onSelect,
|
||||
onFoundOptions
|
||||
}) => {
|
||||
const [offsetPos, setOffsetPos] = useState({ top: 0, left: 0 });
|
||||
|
||||
const fullValue = useMemo(() => {
|
||||
if (caretPosition[0] !== caretPosition[1]) return { valueBeforeCursor: value, valueAfterCursor: "" };
|
||||
const valueBeforeCursor = value.substring(0, caretPosition[0]);
|
||||
const valueAfterCursor = value.substring(caretPosition[1]);
|
||||
return { valueBeforeCursor, valueAfterCursor };
|
||||
}, [value, caretPosition]);
|
||||
|
||||
const logicalParts = useMemo(() => {
|
||||
return splitLogicalParts(value);
|
||||
}, [value]);
|
||||
|
||||
const contextData = useMemo(() => {
|
||||
if (caretPosition[0] !== caretPosition[1]) return;
|
||||
const part = logicalParts.find(p => caretPosition[0] >= p.position[0] && caretPosition[0] <= p.position[1]);
|
||||
if (!part) return;
|
||||
const cursorStartPosition = caretPosition[0] - part.position[0];
|
||||
return {
|
||||
...part,
|
||||
...getContextData(part, cursorStartPosition)
|
||||
};
|
||||
}, [logicalParts, caretPosition]);
|
||||
|
||||
const { fieldNames, fieldValues, loading } = useFetchLogsQLOptions(contextData);
|
||||
|
||||
const options = useMemo(() => {
|
||||
switch (contextData?.contextType) {
|
||||
case ContextType.FilterName:
|
||||
case ContextType.FilterUnknown:
|
||||
return fieldNames;
|
||||
case ContextType.FilterValue:
|
||||
return fieldValues;
|
||||
case ContextType.PipeName:
|
||||
return pipeList;
|
||||
default:
|
||||
return [];
|
||||
}
|
||||
}, [contextData, fieldNames, fieldValues]);
|
||||
|
||||
const getUpdatedValue = (insertValue: string, logicalParts: LogicalPart[], id?: number) => {
|
||||
return logicalParts.reduce((acc, part) => {
|
||||
const value = part.id === id ? insertValue : part.value;
|
||||
const separator = part.type === LogicalPartType.Pipe ? " | " : " ";
|
||||
return `${acc}${separator}${value}`;
|
||||
}, "").trim();
|
||||
};
|
||||
|
||||
const getModifyInsert = (insert: string, contextType: ContextType, value = "", insertType?: string) => {
|
||||
let modifiedInsert = insert;
|
||||
|
||||
if (insertType === ContextType.FilterName) {
|
||||
modifiedInsert += ":";
|
||||
} else if (contextType === ContextType.FilterValue) {
|
||||
const insertWithQuotes = value.startsWith("_stream:") ? modifiedInsert : `"${modifiedInsert}"`;
|
||||
modifiedInsert = `${contextData?.filterName || ""}:${insertWithQuotes}`;
|
||||
}
|
||||
|
||||
return modifiedInsert;
|
||||
};
|
||||
|
||||
const handleSelect = useCallback((insert: string, item: AutocompleteOptions) => {
|
||||
const {
|
||||
id,
|
||||
contextType = ContextType.FilterUnknown,
|
||||
value = "",
|
||||
position = [0, 0]
|
||||
} = contextData || {};
|
||||
|
||||
const insertValue = getModifyInsert(insert, contextType, value, item.type);
|
||||
const newValue = getUpdatedValue(insertValue, logicalParts, id);
|
||||
const updatedPosition = (position[0] || 1) + insertValue.length + (item.type === ContextType.PipeName ? 1 : 0);
|
||||
|
||||
onSelect(newValue, updatedPosition);
|
||||
}, [contextData, logicalParts]);
|
||||
|
||||
|
||||
useEffect(() => {
|
||||
if (!anchorEl.current) {
|
||||
setOffsetPos({ top: 0, left: 0 });
|
||||
return;
|
||||
}
|
||||
|
||||
const element = anchorEl.current.querySelector("textarea") || anchorEl.current;
|
||||
const style = window.getComputedStyle(element);
|
||||
const fontSize = `${style.getPropertyValue("font-size")}`;
|
||||
const fontFamily = `${style.getPropertyValue("font-family")}`;
|
||||
const lineHeight = parseInt(`${style.getPropertyValue("line-height")}`);
|
||||
|
||||
const span = document.createElement("div");
|
||||
span.style.font = `${fontSize} ${fontFamily}`;
|
||||
span.style.padding = style.getPropertyValue("padding");
|
||||
span.style.lineHeight = `${lineHeight}px`;
|
||||
span.style.width = `${element.offsetWidth}px`;
|
||||
span.style.maxWidth = `${element.offsetWidth}px`;
|
||||
span.style.whiteSpace = style.getPropertyValue("white-space");
|
||||
span.style.overflowWrap = style.getPropertyValue("overflow-wrap");
|
||||
|
||||
const marker = document.createElement("span");
|
||||
span.appendChild(document.createTextNode(fullValue.valueBeforeCursor || ""));
|
||||
span.appendChild(marker);
|
||||
span.appendChild(document.createTextNode(fullValue.valueAfterCursor || ""));
|
||||
document.body.appendChild(span);
|
||||
|
||||
const spanRect = span.getBoundingClientRect();
|
||||
const markerRect = marker.getBoundingClientRect();
|
||||
|
||||
const leftOffset = markerRect.left - spanRect.left;
|
||||
const topOffset = markerRect.bottom - spanRect.bottom - (hasHelperText ? lineHeight : 0);
|
||||
setOffsetPos({ top: topOffset, left: leftOffset });
|
||||
|
||||
span.remove();
|
||||
marker.remove();
|
||||
}, [anchorEl, caretPosition, hasHelperText, fullValue]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Autocomplete
|
||||
loading={loading}
|
||||
disabledFullScreen
|
||||
value={contextData?.valueContext || ""}
|
||||
options={options}
|
||||
anchor={anchorEl}
|
||||
minLength={0}
|
||||
offset={offsetPos}
|
||||
onSelect={handleSelect}
|
||||
onFoundOptions={onFoundOptions}
|
||||
maxDisplayResults={{
|
||||
limit: AUTOCOMPLETE_LIMITS.displayResults,
|
||||
message: "Please, specify the query more precisely."
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default LogsQueryEditorAutocomplete;
|
||||
@@ -0,0 +1,117 @@
|
||||
import { ContextData, ContextType, LogicalPart, LogicalPartPosition, LogicalPartType } from "./types";
|
||||
import { pipeList } from "./pipes";
|
||||
|
||||
const BUILDER_OPERATORS = ["AND", "OR", "NOT"];
|
||||
const PIPE_NAMES = pipeList.map(p => p.value);
|
||||
|
||||
export const splitLogicalParts = (expr: string) => {
|
||||
// Replace spaces around the colon (:) with just the colon, removing the spaces
|
||||
const input = expr; //.replace(/\s*:\s*/g, ":");
|
||||
const parts: LogicalPart[] = [];
|
||||
let currentPart = "";
|
||||
let isPipePart = false;
|
||||
|
||||
const quotes = ["'", "\"", "`"];
|
||||
let insideQuotes = false;
|
||||
let expectedQuote = "";
|
||||
|
||||
const openBrackets = ["(", "[", "{"];
|
||||
const closeBrackets = [")", "]", "}"];
|
||||
const brackets = [...openBrackets, ...closeBrackets];
|
||||
let insideBrackets = 0;
|
||||
|
||||
let startIndex = 0;
|
||||
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
const char = input[i];
|
||||
|
||||
// Check if the current character is a quote
|
||||
if (quotes.includes(char)) {
|
||||
const isClosedQuote: boolean = insideQuotes && (char === expectedQuote);
|
||||
insideQuotes = !isClosedQuote;
|
||||
expectedQuote = isClosedQuote ? "" : char;
|
||||
}
|
||||
|
||||
// Check if the current character is a bracket
|
||||
if (!insideQuotes && brackets.includes(char)) {
|
||||
const dir = openBrackets.includes(char) ? 1 : -1;
|
||||
insideBrackets += dir;
|
||||
}
|
||||
|
||||
// Check if the current character is a pipe
|
||||
if ((!insideQuotes && !insideBrackets && char === "|")) {
|
||||
isPipePart = true;
|
||||
const countStartSpaces = currentPart.match(/^ */)?.[0].length || 0;
|
||||
const countEndSpaces = currentPart.match(/ *$/)?.[0].length || 0;
|
||||
pushPart(currentPart, true, [startIndex + countStartSpaces, i - countEndSpaces - 1], parts);
|
||||
currentPart = "";
|
||||
startIndex = i + 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if the current character is a space
|
||||
if (!isPipePart && !insideQuotes && !insideBrackets && char === " ") {
|
||||
const nextStr = input.slice(i).replace(/^\s*/, "");
|
||||
const prevStr = input.slice(0, i).replace(/\s*$/, "");
|
||||
if (!nextStr.startsWith(":") && !prevStr.endsWith(":")) {
|
||||
pushPart(currentPart, false, [startIndex, i - 1], parts);
|
||||
currentPart = "";
|
||||
startIndex = i + 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
currentPart += char;
|
||||
}
|
||||
|
||||
// push the last part
|
||||
pushPart(currentPart, isPipePart, [startIndex, input.length], parts);
|
||||
|
||||
return parts;
|
||||
};
|
||||
|
||||
const pushPart = (currentPart: string, isPipePart: boolean, position: LogicalPartPosition, parts: LogicalPart[]) => {
|
||||
const trimmedPart = currentPart.trim();
|
||||
if (!trimmedPart) return;
|
||||
const isOperator = BUILDER_OPERATORS.includes(trimmedPart.toUpperCase());
|
||||
parts.push({
|
||||
id: parts.length,
|
||||
value: trimmedPart,
|
||||
position,
|
||||
type: isPipePart
|
||||
? LogicalPartType.Pipe
|
||||
: isOperator ? LogicalPartType.Operator : LogicalPartType.Filter,
|
||||
});
|
||||
};
|
||||
|
||||
export const getContextData = (part: LogicalPart, cursorPos: number) => {
|
||||
const valueBeforeCursor = part.value.substring(0, cursorPos);
|
||||
const valueAfterCursor = part.value.substring(cursorPos);
|
||||
|
||||
const metaData: ContextData = {
|
||||
valueBeforeCursor,
|
||||
valueAfterCursor,
|
||||
valueContext: part.value,
|
||||
contextType: ContextType.Unknown,
|
||||
};
|
||||
|
||||
if (part.type === LogicalPartType.Filter) {
|
||||
const noColon = !valueBeforeCursor.includes(":") && !valueAfterCursor.includes(":");
|
||||
if (noColon) {
|
||||
metaData.contextType = ContextType.FilterUnknown;
|
||||
} else if (valueBeforeCursor.includes(":")) {
|
||||
const [filterName, filterValue] = valueBeforeCursor.split(":");
|
||||
metaData.contextType = ContextType.FilterValue;
|
||||
metaData.filterName = filterName;
|
||||
metaData.valueContext = filterValue;
|
||||
} else {
|
||||
metaData.contextType = ContextType.FilterName;
|
||||
}
|
||||
} else if (part.type === LogicalPartType.Pipe) {
|
||||
const valueStartWithPipe = PIPE_NAMES.some(p => part.value.startsWith(p));
|
||||
metaData.contextType = valueStartWithPipe ? ContextType.PipeValue : ContextType.PipeName;
|
||||
}
|
||||
|
||||
metaData.valueContext = metaData.valueContext.replace(/^["']|["']$/g, "");
|
||||
return metaData;
|
||||
};
|
||||
@@ -0,0 +1,130 @@
|
||||
import React from "react";
|
||||
import { ContextType } from "./types";
|
||||
import { FunctionIcon } from "../../../Main/Icons";
|
||||
|
||||
const docsUrl = "https://docs.victoriametrics.com/victorialogs/logsql";
|
||||
const classLink = "vm-link vm-link_colored";
|
||||
|
||||
const prepareDescription = (text: string): string => {
|
||||
const replaceClass = `$1 target="_blank" class="${classLink}" $2`;
|
||||
const replaceHref = `$1 $2${docsUrl}#`;
|
||||
return text
|
||||
.replace(/(<a) (href=")#/gm, replaceHref)
|
||||
.replace(/(<a) (href="[^"]+")/gm, replaceClass);
|
||||
};
|
||||
|
||||
export const pipeList = [
|
||||
{
|
||||
"value": "copy",
|
||||
"description": "<a href=\"#copy-pipe\"><code>copy</code></a> copies <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a>."
|
||||
},
|
||||
{
|
||||
"value": "delete",
|
||||
"description": "<a href=\"#delete-pipe\"><code>delete</code></a> deletes <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a>."
|
||||
},
|
||||
{
|
||||
"value": "drop_empty_fields",
|
||||
"description": "<a href=\"#drop_empty_fields-pipe\"><code>drop_empty_fields</code></a> drops <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a> with empty values."
|
||||
},
|
||||
{
|
||||
"value": "extract",
|
||||
"description": "<a href=\"#extract-pipe\"><code>extract</code></a> extracts the specified text into the given log fields."
|
||||
},
|
||||
{
|
||||
"value": "extract_regexp",
|
||||
"description": "<a href=\"#extract_regexp-pipe\"><code>extract_regexp</code></a> extracts the specified text into the given log fields via <a href=\"https://github.com/google/re2/wiki/Syntax\" rel=\"external\" target=\"_blank\">RE2 regular expressions</a>."
|
||||
},
|
||||
{
|
||||
"value": "field_names",
|
||||
"description": "<a href=\"#field_names-pipe\"><code>field_names</code></a> returns all the names of <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a>."
|
||||
},
|
||||
{
|
||||
"value": "field_values",
|
||||
"description": "<a href=\"#field_values-pipe\"><code>field_values</code></a> returns all the values for the given <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log field</a>."
|
||||
},
|
||||
{
|
||||
"value": "fields",
|
||||
"description": "<a href=\"#fields-pipe\"><code>fields</code></a> selects the given set of <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a>."
|
||||
},
|
||||
{
|
||||
"value": "filter",
|
||||
"description": "<a href=\"#filter-pipe\"><code>filter</code></a> applies additional <a href=\"#filters\">filters</a> to results."
|
||||
},
|
||||
{
|
||||
"value": "format",
|
||||
"description": "<a href=\"#format-pipe\"><code>format</code></a> formats output field from input <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a>."
|
||||
},
|
||||
{
|
||||
"value": "limit",
|
||||
"description": "<a href=\"#limit-pipe\"><code>limit</code></a> limits the number selected logs."
|
||||
},
|
||||
{
|
||||
"value": "math",
|
||||
"description": "<a href=\"#math-pipe\"><code>math</code></a> performs mathematical calculations over <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a>."
|
||||
},
|
||||
{
|
||||
"value": "offset",
|
||||
"description": "<a href=\"#offset-pipe\"><code>offset</code></a> skips the given number of selected logs."
|
||||
},
|
||||
{
|
||||
"value": "pack_json",
|
||||
"description": "<a href=\"#pack_json-pipe\"><code>pack_json</code></a> packs <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a> into JSON object."
|
||||
},
|
||||
{
|
||||
"value": "pack_logfmt",
|
||||
"description": "<a href=\"#pack_logfmt-pipe\"><code>pack_logfmt</code></a> packs <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a> into <a href=\"https://brandur.org/logfmt\" rel=\"external\" target=\"_blank\">logfmt</a> message."
|
||||
},
|
||||
{
|
||||
"value": "rename",
|
||||
"description": "<a href=\"#rename-pipe\"><code>rename</code></a> renames <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a>."
|
||||
},
|
||||
{
|
||||
"value": "replace",
|
||||
"description": "<a href=\"#replace-pipe\"><code>replace</code></a> replaces substrings in the specified <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a>."
|
||||
},
|
||||
{
|
||||
"value": "replace_regexp",
|
||||
"description": "<a href=\"#replace_regexp-pipe\"><code>replace_regexp</code></a> updates <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a> with regular expressions."
|
||||
},
|
||||
{
|
||||
"value": "sort",
|
||||
"description": "<a href=\"#sort-pipe\"><code>sort</code></a> sorts logs by the given <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">fields</a>."
|
||||
},
|
||||
{
|
||||
"value": "stats",
|
||||
"description": "<a href=\"#stats-pipe\"><code>stats</code></a> calculates various stats over the selected logs."
|
||||
},
|
||||
{
|
||||
"value": "stream_context",
|
||||
"description": "<a href=\"#stream_context-pipe\"><code>stream_context</code></a> allows selecting surrounding logs in front and after the matching logs\nper each <a href=\"/victorialogs/keyconcepts/#stream-fields\">log stream</a>."
|
||||
},
|
||||
{
|
||||
"value": "top",
|
||||
"description": "<a href=\"#top-pipe\"><code>top</code></a> returns top <code>N</code> field sets with the maximum number of matching logs."
|
||||
},
|
||||
{
|
||||
"value": "uniq",
|
||||
"description": "<a href=\"#uniq-pipe\"><code>uniq</code></a> returns unique log entires."
|
||||
},
|
||||
{
|
||||
"value": "unpack_json",
|
||||
"description": "<a href=\"#unpack_json-pipe\"><code>unpack_json</code></a> unpacks JSON messages from <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a>."
|
||||
},
|
||||
{
|
||||
"value": "unpack_logfmt",
|
||||
"description": "<a href=\"#unpack_logfmt-pipe\"><code>unpack_logfmt</code></a> unpacks <a href=\"https://brandur.org/logfmt\" rel=\"external\" target=\"_blank\">logfmt</a> messages from <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a>."
|
||||
},
|
||||
{
|
||||
"value": "unpack_syslog",
|
||||
"description": "<a href=\"#unpack_syslog-pipe\"><code>unpack_syslog</code></a> unpacks <a href=\"https://en.wikipedia.org/wiki/Syslog\" rel=\"external\" target=\"_blank\">syslog</a> messages from <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a>."
|
||||
},
|
||||
{
|
||||
"value": "unroll",
|
||||
"description": "<a href=\"#unroll-pipe\"><code>unroll</code></a> unrolls JSON arrays from <a href=\"https://docs.victoriametrics.com/victorialogs/keyconcepts/#data-model\">log fields</a>."
|
||||
}
|
||||
].map(item => ({
|
||||
...item,
|
||||
type: ContextType.PipeName,
|
||||
icon: <FunctionIcon/>,
|
||||
description: prepareDescription(item.description),
|
||||
}));
|
||||
@@ -0,0 +1,31 @@
|
||||
export enum LogicalPartType {
|
||||
Filter = "Filter",
|
||||
Pipe = "Pipe",
|
||||
Operator = "Operator",
|
||||
}
|
||||
|
||||
export type LogicalPartPosition = [start: number, end: number];
|
||||
|
||||
export interface LogicalPart {
|
||||
id: number;
|
||||
value: string;
|
||||
type: LogicalPartType;
|
||||
position: LogicalPartPosition;
|
||||
}
|
||||
|
||||
export interface ContextData {
|
||||
valueBeforeCursor: string;
|
||||
valueAfterCursor: string;
|
||||
contextType: ContextType;
|
||||
valueContext: string;
|
||||
filterName?: string;
|
||||
}
|
||||
|
||||
export enum ContextType {
|
||||
FilterName = "FilterName",
|
||||
FilterUnknown = "FilterUnknown",
|
||||
FilterValue = "FilterValue",
|
||||
PipeName = "Pipes",
|
||||
PipeValue = "PipeValue",
|
||||
Unknown = "Unknown",
|
||||
}
|
||||
@@ -0,0 +1,137 @@
|
||||
import React, { useEffect, useState, useRef, Dispatch, SetStateAction } from "preact/compat";
|
||||
import dayjs from "dayjs";
|
||||
import { ContextData, ContextType } from "./types";
|
||||
import { FunctionIcon, LabelIcon, MetricIcon, ValueIcon } from "../../../Main/Icons";
|
||||
import { AutocompleteOptions } from "../../../Main/Autocomplete/Autocomplete";
|
||||
import { useAppState } from "../../../../state/common/StateContext";
|
||||
import { useTimeState } from "../../../../state/time/TimeStateContext";
|
||||
import { useCallback } from "react";
|
||||
import { AUTOCOMPLETE_LIMITS } from "../../../../constants/queryAutocomplete";
|
||||
import { LogsFiledValues } from "../../../../api/types";
|
||||
import { useLogsDispatch, useLogsState } from "../../../../state/logsPanel/LogsStateContext";
|
||||
|
||||
type FetchDataArgs = {
|
||||
urlSuffix: string;
|
||||
setter: Dispatch<SetStateAction<AutocompleteOptions[]>>
|
||||
type: ContextType;
|
||||
params?: URLSearchParams;
|
||||
}
|
||||
|
||||
const icons = {
|
||||
[ContextType.FilterName]: <MetricIcon/>,
|
||||
[ContextType.FilterUnknown]: <MetricIcon/>,
|
||||
[ContextType.FilterValue]: <ValueIcon/>,
|
||||
[ContextType.PipeName]: <FunctionIcon/>,
|
||||
[ContextType.PipeValue]: <LabelIcon/>,
|
||||
[ContextType.Unknown]: <ValueIcon/>
|
||||
};
|
||||
|
||||
export const useFetchLogsQLOptions = (contextData?: ContextData) => {
|
||||
const { serverUrl } = useAppState();
|
||||
const { period: { start, end } } = useTimeState();
|
||||
const { autocompleteCache } = useLogsState();
|
||||
const dispatch = useLogsDispatch();
|
||||
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const [fieldNames, setFieldNames] = useState<AutocompleteOptions[]>([]);
|
||||
const [fieldValues, setFieldValues] = useState<AutocompleteOptions[]>([]);
|
||||
|
||||
const abortControllerRef = useRef(new AbortController());
|
||||
|
||||
const getQueryParams = useCallback((params?: Record<string, string>) => {
|
||||
const startDay = dayjs(start * 1000).startOf("day").valueOf() / 1000;
|
||||
const endDay = dayjs(end * 1000).endOf("day").valueOf() / 1000;
|
||||
|
||||
return new URLSearchParams({
|
||||
...(params || {}),
|
||||
limit: `${AUTOCOMPLETE_LIMITS.queryLimit}`,
|
||||
start: `${startDay}`,
|
||||
end: `${endDay}`
|
||||
});
|
||||
}, [start, end]);
|
||||
|
||||
const processData = (values: LogsFiledValues[], type: ContextType): AutocompleteOptions[] => {
|
||||
return values.map(v => ({
|
||||
value: v.value,
|
||||
type: `${type}`,
|
||||
icon: icons[type]
|
||||
}));
|
||||
};
|
||||
|
||||
const fetchData = async ({ urlSuffix, setter, type, params }: FetchDataArgs) => {
|
||||
// if (!value && type === TypeData.metric) return;
|
||||
abortControllerRef.current.abort();
|
||||
abortControllerRef.current = new AbortController();
|
||||
const { signal } = abortControllerRef.current;
|
||||
const key = `${urlSuffix}?${params?.toString()}`;
|
||||
setLoading(true);
|
||||
try {
|
||||
const cachedData = autocompleteCache.get(key);
|
||||
if (cachedData) {
|
||||
setter(processData(cachedData, type));
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
const response = await fetch(`${serverUrl}/select/logsql/${urlSuffix}?${params}`, { signal });
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
const value = (data?.values || []) as LogsFiledValues[];
|
||||
setter(value ? processData(value, type) : []);
|
||||
dispatch({ type: "SET_AUTOCOMPLETE_CACHE", payload: { key, value } });
|
||||
}
|
||||
setLoading(false);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.name !== "AbortError") {
|
||||
dispatch({ type: "SET_AUTOCOMPLETE_CACHE", payload: { key, value: [] } });
|
||||
setLoading(false);
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// fetch field names
|
||||
useEffect(() => {
|
||||
const validContexts = [ContextType.FilterName, ContextType.FilterUnknown];
|
||||
const isInvalidContext = !validContexts.includes(contextData?.contextType || ContextType.Unknown);
|
||||
if (!serverUrl || isInvalidContext) {
|
||||
return;
|
||||
}
|
||||
|
||||
setFieldNames([]);
|
||||
|
||||
fetchData({
|
||||
urlSuffix: "field_names",
|
||||
setter: setFieldNames,
|
||||
type: ContextType.FilterName,
|
||||
params: getQueryParams({ query: "*" })
|
||||
});
|
||||
|
||||
return () => abortControllerRef.current?.abort();
|
||||
}, [serverUrl, contextData]);
|
||||
|
||||
// fetch field values
|
||||
useEffect(() => {
|
||||
const isInvalidContext = contextData?.contextType !== ContextType.FilterValue;
|
||||
if (!serverUrl || isInvalidContext || !contextData?.filterName) {
|
||||
return;
|
||||
}
|
||||
|
||||
setFieldValues([]);
|
||||
|
||||
fetchData({
|
||||
urlSuffix: "field_values",
|
||||
setter: setFieldValues,
|
||||
type: ContextType.FilterValue,
|
||||
params: getQueryParams({ query: "*", field: contextData.filterName })
|
||||
});
|
||||
|
||||
return () => abortControllerRef.current?.abort();
|
||||
}, [serverUrl, contextData]);
|
||||
|
||||
return {
|
||||
fieldNames,
|
||||
fieldValues,
|
||||
loading,
|
||||
};
|
||||
};
|
||||
@@ -2,7 +2,6 @@ import React, { FC, useEffect, useRef, useState } from "preact/compat";
|
||||
import { KeyboardEvent } from "react";
|
||||
import { ErrorTypes } from "../../../types";
|
||||
import TextField from "../../Main/TextField/TextField";
|
||||
import QueryEditorAutocomplete from "./QueryEditorAutocomplete";
|
||||
import "./style.scss";
|
||||
import { QueryStats } from "../../../api/types";
|
||||
import { partialWarning, seriesFetchedWarning } from "./warningText";
|
||||
@@ -11,6 +10,16 @@ import useDeviceDetect from "../../../hooks/useDeviceDetect";
|
||||
import { useQueryState } from "../../../state/query/QueryStateContext";
|
||||
import debounce from "lodash.debounce";
|
||||
|
||||
export interface QueryEditorAutocompleteProps {
|
||||
value: string;
|
||||
anchorEl: React.RefObject<HTMLInputElement>;
|
||||
caretPosition: [number, number]; // [start, end]
|
||||
hasHelperText: boolean;
|
||||
includeFunctions: boolean;
|
||||
onSelect: (val: string, caretPosition: number) => void;
|
||||
onFoundOptions: (val: AutocompleteOptions[]) => void;
|
||||
}
|
||||
|
||||
export interface QueryEditorProps {
|
||||
onChange: (query: string) => void;
|
||||
onEnter: () => void;
|
||||
@@ -19,6 +28,7 @@ export interface QueryEditorProps {
|
||||
value: string;
|
||||
oneLiner?: boolean;
|
||||
autocomplete: boolean;
|
||||
autocompleteEl?: FC<QueryEditorAutocompleteProps>;
|
||||
error?: ErrorTypes | string;
|
||||
stats?: QueryStats;
|
||||
label: string;
|
||||
@@ -33,6 +43,7 @@ const QueryEditor: FC<QueryEditorProps> = ({
|
||||
onArrowUp,
|
||||
onArrowDown,
|
||||
autocomplete,
|
||||
autocompleteEl: AutocompleteEl,
|
||||
error,
|
||||
stats,
|
||||
label,
|
||||
@@ -47,7 +58,7 @@ const QueryEditor: FC<QueryEditorProps> = ({
|
||||
const [caretPositionInput, setCaretPositionInput] = useState<[number, number]>([0, 0]);
|
||||
const autocompleteAnchorEl = useRef<HTMLInputElement>(null);
|
||||
|
||||
const [showAutocomplete, setShowAutocomplete] = useState(autocomplete);
|
||||
const [showAutocomplete, setShowAutocomplete] = useState(!!AutocompleteEl);
|
||||
const debouncedSetShowAutocomplete = useRef(debounce(setShowAutocomplete, 500)).current;
|
||||
|
||||
const warning = [
|
||||
@@ -113,7 +124,7 @@ const QueryEditor: FC<QueryEditorProps> = ({
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
setOpenAutocomplete(autocomplete);
|
||||
setOpenAutocomplete(!!AutocompleteEl && autocompleteQuick);
|
||||
}, [autocompleteQuick]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -140,8 +151,8 @@ const QueryEditor: FC<QueryEditorProps> = ({
|
||||
inputmode={"search"}
|
||||
caretPosition={caretPositionInput}
|
||||
/>
|
||||
{showAutocomplete && autocomplete && (
|
||||
<QueryEditorAutocomplete
|
||||
{showAutocomplete && autocomplete && AutocompleteEl && (
|
||||
<AutocompleteEl
|
||||
value={value}
|
||||
anchorEl={autocompleteAnchorEl}
|
||||
caretPosition={caretPositionAutocomplete}
|
||||
|
||||
@@ -1,20 +1,11 @@
|
||||
import React, { FC, useState, useEffect, useMemo, useCallback } from "preact/compat";
|
||||
import Autocomplete, { AutocompleteOptions } from "../../Main/Autocomplete/Autocomplete";
|
||||
import Autocomplete from "../../Main/Autocomplete/Autocomplete";
|
||||
import { useFetchQueryOptions } from "../../../hooks/useFetchQueryOptions";
|
||||
import { escapeRegexp, hasUnclosedQuotes } from "../../../utils/regexp";
|
||||
import useGetMetricsQL from "../../../hooks/useGetMetricsQL";
|
||||
import { QueryContextType } from "../../../types";
|
||||
import { AUTOCOMPLETE_LIMITS } from "../../../constants/queryAutocomplete";
|
||||
|
||||
interface QueryEditorAutocompleteProps {
|
||||
value: string;
|
||||
anchorEl: React.RefObject<HTMLElement>;
|
||||
caretPosition: [number, number]; // [start, end]
|
||||
hasHelperText: boolean;
|
||||
includeFunctions: boolean;
|
||||
onSelect: (val: string, caretPosition: number) => void;
|
||||
onFoundOptions: (val: AutocompleteOptions[]) => void;
|
||||
}
|
||||
import { QueryEditorAutocompleteProps } from "./QueryEditor";
|
||||
|
||||
const QueryEditorAutocomplete: FC<QueryEditorAutocompleteProps> = ({
|
||||
value,
|
||||
|
||||
@@ -0,0 +1,246 @@
|
||||
import React, { FC, useMemo, useState } from "preact/compat";
|
||||
import useBoolean from "../../../hooks/useBoolean";
|
||||
import { RestartIcon, SettingsIcon } from "../../Main/Icons";
|
||||
import Button from "../../Main/Button/Button";
|
||||
import Modal from "../../Main/Modal/Modal";
|
||||
import Tooltip from "../../Main/Tooltip/Tooltip";
|
||||
import { Logs } from "../../../api/types";
|
||||
import Select from "../../Main/Select/Select";
|
||||
import { useSearchParams } from "react-router-dom";
|
||||
import "./style.scss";
|
||||
import Switch from "../../Main/Switch/Switch";
|
||||
import TextField from "../../Main/TextField/TextField";
|
||||
import dayjs from "dayjs";
|
||||
import Hyperlink from "../../Main/Hyperlink/Hyperlink";
|
||||
import {
|
||||
LOGS_DISPLAY_FIELDS,
|
||||
LOGS_GROUP_BY,
|
||||
LOGS_DATE_FORMAT,
|
||||
LOGS_URL_PARAMS,
|
||||
WITHOUT_GROUPING
|
||||
} from "../../../constants/logs";
|
||||
|
||||
const {
|
||||
GROUP_BY,
|
||||
NO_WRAP_LINES,
|
||||
COMPACT_GROUP_HEADER,
|
||||
DISPLAY_FIELDS,
|
||||
DATE_FORMAT
|
||||
} = LOGS_URL_PARAMS;
|
||||
|
||||
const title = "Group view settings";
|
||||
|
||||
interface Props {
|
||||
logs: Logs[];
|
||||
}
|
||||
|
||||
const GroupLogsConfigurators: FC<Props> = ({ logs }) => {
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
|
||||
const groupBy = searchParams.get(GROUP_BY) || LOGS_GROUP_BY;
|
||||
const noWrapLines = searchParams.get(NO_WRAP_LINES) === "true";
|
||||
const compactGroupHeader = searchParams.get(COMPACT_GROUP_HEADER) === "true";
|
||||
const displayFieldsString = searchParams.get(DISPLAY_FIELDS) || "";
|
||||
const displayFields = displayFieldsString ? displayFieldsString.split(",") : [];
|
||||
|
||||
const [dateFormat, setDateFormat] = useState(searchParams.get(DATE_FORMAT) || LOGS_DATE_FORMAT);
|
||||
const [errorFormat, setErrorFormat] = useState("");
|
||||
|
||||
const isGroupChanged = groupBy !== LOGS_GROUP_BY;
|
||||
const isDisplayFieldsChanged = displayFields.length > 0;
|
||||
const isTimeChanged = searchParams.get(DATE_FORMAT) !== LOGS_DATE_FORMAT;
|
||||
const hasChanges = [
|
||||
isGroupChanged,
|
||||
isDisplayFieldsChanged,
|
||||
noWrapLines,
|
||||
compactGroupHeader,
|
||||
isTimeChanged
|
||||
].some(Boolean);
|
||||
|
||||
const logsKeys = useMemo(() => {
|
||||
const excludeKeys = ["_msg", "_time"];
|
||||
const uniqKeys = Array.from(new Set(logs.map(l => Object.keys(l)).flat()));
|
||||
return uniqKeys.filter(k => !excludeKeys.includes(k));
|
||||
}, [logs]);
|
||||
|
||||
const {
|
||||
value: openModal,
|
||||
toggle: toggleOpen,
|
||||
setFalse: handleClose,
|
||||
} = useBoolean(false);
|
||||
|
||||
const handleSelectGroupBy = (key: string) => {
|
||||
searchParams.set(GROUP_BY, key);
|
||||
setSearchParams(searchParams);
|
||||
};
|
||||
|
||||
const handleSelectDisplayField = (value: string) => {
|
||||
const prev = displayFields;
|
||||
const newDisplayFields = prev.includes(value) ? prev.filter(v => v !== value) : [...prev, value];
|
||||
searchParams.set(DISPLAY_FIELDS, newDisplayFields.join(","));
|
||||
setSearchParams(searchParams);
|
||||
};
|
||||
|
||||
const handleResetDisplayFields = () => {
|
||||
searchParams.delete(DISPLAY_FIELDS);
|
||||
setSearchParams(searchParams);
|
||||
};
|
||||
|
||||
const toggleWrapLines = () => {
|
||||
searchParams.set(NO_WRAP_LINES, String(!noWrapLines));
|
||||
setSearchParams(searchParams);
|
||||
};
|
||||
|
||||
const toggleCompactGroupHeader = () => {
|
||||
searchParams.set(COMPACT_GROUP_HEADER, String(!compactGroupHeader));
|
||||
setSearchParams(searchParams);
|
||||
};
|
||||
|
||||
const handleChangeDateFormat = (format: string) => {
|
||||
const date = new Date();
|
||||
if (!dayjs(date, format, true).isValid()) {
|
||||
setErrorFormat("Invalid date format");
|
||||
}
|
||||
setDateFormat(format);
|
||||
};
|
||||
|
||||
const handleSaveAndClose = () => {
|
||||
searchParams.set(DATE_FORMAT, dateFormat);
|
||||
setSearchParams(searchParams);
|
||||
handleClose();
|
||||
};
|
||||
|
||||
const tooltipContent = () => {
|
||||
if (!hasChanges) return title;
|
||||
return (
|
||||
<div className="vm-group-logs-configurator__tooltip">
|
||||
<p>{title}</p>
|
||||
<hr/>
|
||||
<ul>
|
||||
{isGroupChanged && <li>Group by <code>{`"${groupBy}"`}</code></li>}
|
||||
{isDisplayFieldsChanged && <li>Display fields: {displayFields.length || 1}</li>}
|
||||
{noWrapLines && <li>Single-line text is enabled</li>}
|
||||
{compactGroupHeader && <li>Compact group header is enabled</li>}
|
||||
{isTimeChanged && <li>Date format: <code>{dateFormat}</code></li>}
|
||||
</ul>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="vm-group-logs-configurator-button">
|
||||
<Tooltip title={tooltipContent()}>
|
||||
<Button
|
||||
variant="text"
|
||||
startIcon={<SettingsIcon/>}
|
||||
onClick={toggleOpen}
|
||||
ariaLabel={title}
|
||||
/>
|
||||
</Tooltip>
|
||||
{hasChanges && <span className="vm-group-logs-configurator-button__marker"/>}
|
||||
</div>
|
||||
{openModal && (
|
||||
<Modal
|
||||
title={title}
|
||||
onClose={handleSaveAndClose}
|
||||
>
|
||||
<div className="vm-group-logs-configurator">
|
||||
<div className="vm-group-logs-configurator-item">
|
||||
<Select
|
||||
value={groupBy}
|
||||
list={[WITHOUT_GROUPING, ...logsKeys]}
|
||||
label="Group by field"
|
||||
placeholder="Group by field"
|
||||
onChange={handleSelectGroupBy}
|
||||
searchable
|
||||
/>
|
||||
<Tooltip title={"Reset grouping"}>
|
||||
<Button
|
||||
variant="text"
|
||||
color="primary"
|
||||
startIcon={<RestartIcon/>}
|
||||
onClick={() => handleSelectGroupBy(LOGS_GROUP_BY)}
|
||||
/>
|
||||
</Tooltip>
|
||||
<span className="vm-group-logs-configurator-item__info">
|
||||
Select a field to group logs by (default: <code>{LOGS_GROUP_BY}</code>).
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div className="vm-group-logs-configurator-item">
|
||||
<Select
|
||||
value={displayFields}
|
||||
list={logsKeys}
|
||||
label="Display fields"
|
||||
placeholder="Display fields"
|
||||
onChange={handleSelectDisplayField}
|
||||
searchable
|
||||
/>
|
||||
<Tooltip title={"Clear fields"}>
|
||||
<Button
|
||||
variant="text"
|
||||
color="primary"
|
||||
startIcon={<RestartIcon/>}
|
||||
onClick={handleResetDisplayFields}
|
||||
/>
|
||||
</Tooltip>
|
||||
<span className="vm-group-logs-configurator-item__info">
|
||||
Select fields to display instead of the message (default: <code>{LOGS_DISPLAY_FIELDS}</code>).
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div className="vm-group-logs-configurator-item">
|
||||
<TextField
|
||||
autofocus
|
||||
label="Date format"
|
||||
value={dateFormat}
|
||||
onChange={handleChangeDateFormat}
|
||||
error={errorFormat}
|
||||
/>
|
||||
<Tooltip title={"Reset format"}>
|
||||
<Button
|
||||
variant="text"
|
||||
color="primary"
|
||||
startIcon={<RestartIcon/>}
|
||||
onClick={() => setDateFormat(LOGS_DATE_FORMAT)}
|
||||
/>
|
||||
</Tooltip>
|
||||
<span className="vm-group-logs-configurator-item__info vm-group-logs-configurator-item__info_input">
|
||||
Set the date format (e.g., <code>YYYY-MM-DD HH:mm:ss</code>).
|
||||
Learn more in <Hyperlink
|
||||
href="https://day.js.org/docs/en/display/format"
|
||||
>this documentation</Hyperlink>. <br/>
|
||||
Your current date format: <code>{dayjs().format(dateFormat || LOGS_DATE_FORMAT)}</code>
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div className="vm-group-logs-configurator-item">
|
||||
<Switch
|
||||
value={noWrapLines}
|
||||
onChange={toggleWrapLines}
|
||||
label="Single-line message"
|
||||
/>
|
||||
<span className="vm-group-logs-configurator-item__info">
|
||||
Displays message in a single line and truncates it with an ellipsis if it exceeds the available space
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div className="vm-group-logs-configurator-item">
|
||||
<Switch
|
||||
value={compactGroupHeader}
|
||||
onChange={toggleCompactGroupHeader}
|
||||
label="Compact group header"
|
||||
/>
|
||||
<span className="vm-group-logs-configurator-item__info">
|
||||
Shows group headers in one line with a "+N more" badge for extra fields.
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default GroupLogsConfigurators;
|
||||
@@ -0,0 +1,48 @@
|
||||
@use "src/styles/variables" as *;
|
||||
|
||||
.vm-group-logs-configurator {
|
||||
display: grid;
|
||||
gap: calc($padding-large * 2);
|
||||
padding: $padding-global 0;
|
||||
width: 600px;
|
||||
|
||||
&-item {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 31px;
|
||||
align-items: center;
|
||||
justify-content: stretch;
|
||||
gap: 0 $padding-small;
|
||||
|
||||
&__info {
|
||||
margin-top: $padding-small;
|
||||
grid-column: 1/span 2;
|
||||
font-size: $font-size-small;
|
||||
color: $color-text-secondary;
|
||||
line-height: 130%;
|
||||
|
||||
&_input {
|
||||
margin-top: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&-button {
|
||||
position: relative;
|
||||
|
||||
&__marker {
|
||||
position: absolute;
|
||||
top: 6px;
|
||||
left: 6px;
|
||||
width: 5px;
|
||||
height: 5px;
|
||||
border-radius: 50%;
|
||||
background-color: $color-secondary;
|
||||
}
|
||||
}
|
||||
|
||||
&__tooltip {
|
||||
ul {
|
||||
list-style-position: inside;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -30,6 +30,10 @@ const Accordion: FC<AccordionProps> = ({
|
||||
onChange && onChange(isOpen);
|
||||
}, [isOpen]);
|
||||
|
||||
useEffect(() => {
|
||||
setIsOpen(defaultExpanded);
|
||||
}, [defaultExpanded]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<header
|
||||
|
||||
@@ -28,7 +28,7 @@ interface AutocompleteProps {
|
||||
offset?: {top: number, left: number}
|
||||
maxDisplayResults?: {limit: number, message?: string}
|
||||
loading?: boolean;
|
||||
onSelect: (val: string) => void
|
||||
onSelect: (val: string, item: AutocompleteOptions) => void
|
||||
onOpenAutocomplete?: (val: boolean) => void
|
||||
onFoundOptions?: (val: AutocompleteOptions[]) => void
|
||||
onChangeWrapperRef?: (elementRef: React.RefObject<HTMLElement>) => void
|
||||
@@ -97,9 +97,9 @@ const Autocomplete: FC<AutocompleteProps> = ({
|
||||
return noOptionsText && !foundOptions.length;
|
||||
}, [noOptionsText,foundOptions]);
|
||||
|
||||
const createHandlerSelect = (item: string) => () => {
|
||||
const createHandlerSelect = (item: AutocompleteOptions) => () => {
|
||||
if (disabled) return;
|
||||
onSelect(item);
|
||||
onSelect(item.value, item);
|
||||
if (!selected) handleCloseAutocomplete();
|
||||
};
|
||||
|
||||
@@ -141,7 +141,7 @@ const Autocomplete: FC<AutocompleteProps> = ({
|
||||
|
||||
if (key === "Enter") {
|
||||
const item = foundOptions[focusOption.index];
|
||||
item && onSelect(item.value);
|
||||
item && onSelect(item.value, item);
|
||||
if (!selected) handleCloseAutocomplete();
|
||||
}
|
||||
|
||||
@@ -206,7 +206,7 @@ const Autocomplete: FC<AutocompleteProps> = ({
|
||||
})}
|
||||
id={`$autocomplete$${option.value}`}
|
||||
key={`${i}${option.value}`}
|
||||
onClick={createHandlerSelect(option.value)}
|
||||
onClick={createHandlerSelect(option)}
|
||||
onMouseEnter={createHandlerMouseEnter(i)}
|
||||
onMouseLeave={handlerMouseLeave}
|
||||
>
|
||||
|
||||
@@ -581,3 +581,45 @@ export const CommentIcon = () => (
|
||||
></path>
|
||||
</svg>
|
||||
);
|
||||
|
||||
export const FilterIcon = () => (
|
||||
<svg
|
||||
viewBox="0 0 24 24"
|
||||
fill="currentColor"
|
||||
>
|
||||
<path
|
||||
d="M4.25 5.61C6.27 8.2 10 13 10 13v6c0 .55.45 1 1 1h2c.55 0 1-.45 1-1v-6s3.72-4.8 5.74-7.39c.51-.66.04-1.61-.79-1.61H5.04c-.83 0-1.3.95-.79 1.61"
|
||||
></path>
|
||||
</svg>
|
||||
);
|
||||
|
||||
export const FilterOffIcon = () => (
|
||||
<svg
|
||||
viewBox="0 0 24 24"
|
||||
fill="currentColor"
|
||||
>
|
||||
<path
|
||||
d="M19.79 5.61C20.3 4.95 19.83 4 19 4H6.83l7.97 7.97zM2.81 2.81 1.39 4.22 10 13v6c0 .55.45 1 1 1h2c.55 0 1-.45 1-1v-2.17l5.78 5.78 1.41-1.41z"
|
||||
></path>
|
||||
</svg>
|
||||
);
|
||||
|
||||
export const OpenNewIcon = () => (
|
||||
<svg
|
||||
viewBox="0 0 24 24"
|
||||
fill="currentColor"
|
||||
>
|
||||
<path
|
||||
d="M19 19H5V5h7V3H5c-1.11 0-2 .9-2 2v14c0 1.1.89 2 2 2h14c1.1 0 2-.9 2-2v-7h-2zM14 3v2h3.59l-9.83 9.83 1.41 1.41L19 6.41V10h2V3z"
|
||||
></path>
|
||||
</svg>
|
||||
);
|
||||
|
||||
export const ModalIcon = () => (
|
||||
<svg
|
||||
viewBox="0 0 24 24"
|
||||
fill="currentColor"
|
||||
>
|
||||
<path d="M19 4H5c-1.11 0-2 .9-2 2v12c0 1.1.89 2 2 2h14c1.1 0 2-.9 2-2V6c0-1.1-.89-2-2-2m0 14H5V8h14z"></path>
|
||||
</svg>
|
||||
);
|
||||
|
||||
@@ -67,11 +67,11 @@ const Modal: FC<ModalProps> = ({
|
||||
})}
|
||||
onMouseDown={onClose}
|
||||
>
|
||||
<div className="vm-modal-content">
|
||||
<div
|
||||
className="vm-modal-content-header"
|
||||
onMouseDown={handleMouseDown}
|
||||
>
|
||||
<div
|
||||
className="vm-modal-content"
|
||||
onMouseDown={handleMouseDown}
|
||||
>
|
||||
<div className="vm-modal-content-header">
|
||||
{title && (
|
||||
<div className="vm-modal-content-header__title">
|
||||
{title}
|
||||
@@ -91,7 +91,6 @@ const Modal: FC<ModalProps> = ({
|
||||
{/* tabIndex to fix Ctrl-A */}
|
||||
<div
|
||||
className="vm-modal-content-body"
|
||||
onMouseDown={handleMouseDown}
|
||||
tabIndex={0}
|
||||
>
|
||||
{children}
|
||||
|
||||
@@ -15,9 +15,10 @@ interface PopperProps {
|
||||
open: boolean
|
||||
onClose: () => void
|
||||
buttonRef: React.RefObject<HTMLElement>
|
||||
placement?: "bottom-right" | "bottom-left" | "top-left" | "top-right"
|
||||
placement?: "bottom-right" | "bottom-left" | "top-left" | "top-right" | "fixed"
|
||||
placementPosition?: { top: number, left: number } | null
|
||||
animation?: string
|
||||
offset?: {top: number, left: number}
|
||||
offset?: { top: number, left: number }
|
||||
clickOutside?: boolean,
|
||||
fullWidth?: boolean
|
||||
title?: string
|
||||
@@ -29,6 +30,7 @@ const Popper: FC<PopperProps> = ({
|
||||
children,
|
||||
buttonRef,
|
||||
placement = "bottom-left",
|
||||
placementPosition,
|
||||
open = false,
|
||||
onClose,
|
||||
offset = { top: 6, left: 0 },
|
||||
@@ -92,13 +94,18 @@ const Popper: FC<PopperProps> = ({
|
||||
if (needAlignRight) position.left = buttonPos.right - popperSize.width;
|
||||
if (needAlignTop) position.top = buttonPos.top - popperSize.height - offsetTop;
|
||||
|
||||
const { innerWidth, innerHeight } = window;
|
||||
const margin = 20;
|
||||
if (placement === "fixed" && placementPosition) {
|
||||
position.top = Math.max(placementPosition.top + offset.top, 0);
|
||||
position.left = Math.max(placementPosition.left + offset.left, 0);
|
||||
return position;
|
||||
}
|
||||
|
||||
const isOverflowBottom = (position.top + popperSize.height + margin) > innerHeight;
|
||||
const isOverflowTop = (position.top - margin) < 0;
|
||||
const isOverflowRight = (position.left + popperSize.width + margin) > innerWidth;
|
||||
const isOverflowLeft = (position.left - margin) < 0;
|
||||
const { innerWidth, innerHeight } = window;
|
||||
|
||||
const isOverflowBottom = (position.top + popperSize.height) > innerHeight;
|
||||
const isOverflowTop = (position.top) < 0;
|
||||
const isOverflowRight = (position.left + popperSize.width) > innerWidth;
|
||||
const isOverflowLeft = (position.left) < 0;
|
||||
|
||||
if (isOverflowBottom) position.top = buttonPos.top - popperSize.height - offsetTop;
|
||||
if (isOverflowTop) position.top = buttonPos.height + buttonPos.top + offsetTop;
|
||||
@@ -106,11 +113,11 @@ const Popper: FC<PopperProps> = ({
|
||||
if (isOverflowLeft) position.left = buttonPos.left + offsetLeft;
|
||||
|
||||
if (fullWidth) position.width = `${buttonPos.width}px`;
|
||||
if (position.top < 0) position.top = 20;
|
||||
if (position.left < 0) position.left = 20;
|
||||
if (position.top < 0) position.top = 0;
|
||||
if (position.left < 0) position.left = 0;
|
||||
|
||||
return position;
|
||||
},[buttonRef, placement, isOpen, children, fullWidth]);
|
||||
}, [buttonRef, placement, isOpen, children, fullWidth]);
|
||||
|
||||
const handleClickClose = (e: ReactMouseEvent<HTMLButtonElement, MouseEvent>) => {
|
||||
e.stopPropagation();
|
||||
@@ -131,10 +138,10 @@ const Popper: FC<PopperProps> = ({
|
||||
if (!popperRef.current || !isOpen || (isMobile && !disabledFullScreen)) return;
|
||||
const { right, width } = popperRef.current.getBoundingClientRect();
|
||||
if (right > window.innerWidth) {
|
||||
const left = window.innerWidth - 20 - width;
|
||||
popperRef.current.style.left = left < window.innerWidth ? "0" : `${left}px`;
|
||||
const left = window.innerWidth - width;
|
||||
popperRef.current.style.left = `${left}px`;
|
||||
}
|
||||
}, [isOpen, popperRef]);
|
||||
}, [isOpen, popperRef, placementPosition]);
|
||||
|
||||
const handlePopstate = useCallback(() => {
|
||||
if (isOpen && isMobile && !disabledFullScreen) {
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
border-radius: $border-radius-small;
|
||||
|
||||
&_open {
|
||||
z-index: 101;
|
||||
z-index: 100;
|
||||
opacity: 1;
|
||||
transform-origin: top center;
|
||||
animation: vm-slider 150ms cubic-bezier(0.280, 0.840, 0.420, 1.1);
|
||||
|
||||
@@ -33,9 +33,9 @@
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
background-color: $color-hover-black;
|
||||
padding: 2px 2px 2px 6px;
|
||||
padding: 2px 2px 2px $padding-small;
|
||||
border-radius: $border-radius-small;
|
||||
font-size: $font-size;
|
||||
font-size: $font-size-small;
|
||||
line-height: $font-size;
|
||||
max-width: 100%;
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ import useBoolean from "../../../hooks/useBoolean";
|
||||
import TextField from "../../Main/TextField/TextField";
|
||||
import { KeyboardEvent, useState } from "react";
|
||||
import Modal from "../../Main/Modal/Modal";
|
||||
import { getFromStorage, removeFromStorage, saveToStorage } from "../../../utils/storage";
|
||||
import { useSearchParams } from "react-router-dom";
|
||||
|
||||
const title = "Table settings";
|
||||
|
||||
@@ -30,6 +30,8 @@ const TableSettings: FC<TableSettingsProps> = ({
|
||||
onChangeColumns,
|
||||
toggleTableCompact
|
||||
}) => {
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
|
||||
const buttonRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const {
|
||||
@@ -38,11 +40,6 @@ const TableSettings: FC<TableSettingsProps> = ({
|
||||
setFalse: handleClose,
|
||||
} = useBoolean(false);
|
||||
|
||||
const {
|
||||
value: saveColumns,
|
||||
toggle: toggleSaveColumns,
|
||||
} = useBoolean(Boolean(getFromStorage("TABLE_COLUMNS")));
|
||||
|
||||
const [searchColumn, setSearchColumn] = useState("");
|
||||
const [indexFocusItem, setIndexFocusItem] = useState(-1);
|
||||
|
||||
@@ -60,15 +57,34 @@ const TableSettings: FC<TableSettingsProps> = ({
|
||||
return filteredColumns.every(col => selectedColumns.includes(col));
|
||||
}, [selectedColumns, filteredColumns]);
|
||||
|
||||
const handleChangeDisplayColumns = (displayColumns: string[]) => {
|
||||
onChangeColumns(displayColumns);
|
||||
|
||||
const updatedParams = new URLSearchParams(searchParams.toString());
|
||||
const isAllCheck = displayColumns.length === columns.length;
|
||||
|
||||
if (isAllCheck) {
|
||||
updatedParams.delete("columns");
|
||||
} else {
|
||||
updatedParams.set("columns", displayColumns.map(encodeURIComponent).join(","));
|
||||
}
|
||||
|
||||
setSearchParams(updatedParams);
|
||||
};
|
||||
|
||||
const handleChange = (key: string) => {
|
||||
onChangeColumns(selectedColumns.includes(key) ? selectedColumns.filter(col => col !== key) : [...selectedColumns, key]);
|
||||
const displayColumns = selectedColumns.includes(key)
|
||||
? selectedColumns.filter(col => col !== key)
|
||||
: [...selectedColumns, key];
|
||||
|
||||
handleChangeDisplayColumns(displayColumns);
|
||||
};
|
||||
|
||||
const toggleAllColumns = () => {
|
||||
if (isAllChecked) {
|
||||
onChangeColumns(selectedColumns.filter(col => !filteredColumns.includes(col)));
|
||||
handleChangeDisplayColumns(selectedColumns.filter(col => !filteredColumns.includes(col)));
|
||||
} else {
|
||||
onChangeColumns(filteredColumns);
|
||||
handleChangeDisplayColumns(filteredColumns);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -95,22 +111,16 @@ const TableSettings: FC<TableSettingsProps> = ({
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (arrayEquals(columns, selectedColumns) || saveColumns) return;
|
||||
if (arrayEquals(columns, selectedColumns) || searchParams.has("columns")) return;
|
||||
onChangeColumns(columns);
|
||||
}, [columns]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!saveColumns) {
|
||||
removeFromStorage(["TABLE_COLUMNS"]);
|
||||
} else if (selectedColumns.length) {
|
||||
saveToStorage("TABLE_COLUMNS", selectedColumns.join(","));
|
||||
}
|
||||
}, [saveColumns, selectedColumns]);
|
||||
|
||||
useEffect(() => {
|
||||
const saveColumns = getFromStorage("TABLE_COLUMNS") as string;
|
||||
if (!saveColumns) return;
|
||||
onChangeColumns(saveColumns.split(","));
|
||||
const hasColumns = searchParams.has("columns");
|
||||
if (!hasColumns) return;
|
||||
const columnsParam = searchParams.get("columns") || "";
|
||||
const columnsArray = columnsParam.split(",").map(decodeURIComponent).filter(Boolean);
|
||||
onChangeColumns(columnsArray);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
@@ -183,19 +193,6 @@ const TableSettings: FC<TableSettingsProps> = ({
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
<div className="vm-table-settings-modal-preserve">
|
||||
<Checkbox
|
||||
checked={saveColumns}
|
||||
onChange={toggleSaveColumns}
|
||||
label={"Preserve column settings"}
|
||||
disabled={tableCompact}
|
||||
color={"primary"}
|
||||
/>
|
||||
<p className="vm-table-settings-modal-preserve__info">
|
||||
This label indicates that when the checkbox is activated,
|
||||
the current column configurations will not be reset.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="vm-table-settings-modal-section">
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
.vm-table-settings {
|
||||
&-modal {
|
||||
.vm-modal-content-body {
|
||||
min-width: clamp(300px, 600px, 90vw);
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
@@ -83,16 +84,5 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&-preserve {
|
||||
padding: $padding-global;
|
||||
|
||||
&__info {
|
||||
padding-top: $padding-small;
|
||||
font-size: $font-size-small;
|
||||
color: $color-text-secondary;
|
||||
line-height: 130%;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,2 +1,22 @@
|
||||
import { DATE_TIME_FORMAT } from "./date";
|
||||
|
||||
export const LOGS_ENTRIES_LIMIT = 50;
|
||||
export const LOGS_BARS_VIEW = 100;
|
||||
export const LOGS_LIMIT_HITS = 5;
|
||||
|
||||
// "Ungrouped" is a string that is used as a value for the "groupBy" parameter.
|
||||
export const WITHOUT_GROUPING = "Ungrouped";
|
||||
|
||||
// Default values for the logs configurators.
|
||||
export const LOGS_GROUP_BY = "_stream";
|
||||
export const LOGS_DISPLAY_FIELDS = "_msg";
|
||||
export const LOGS_DATE_FORMAT = `${DATE_TIME_FORMAT}.SSS`;
|
||||
|
||||
// URL parameters for the logs page.
|
||||
export const LOGS_URL_PARAMS = {
|
||||
GROUP_BY: "groupBy",
|
||||
DISPLAY_FIELDS: "displayFields",
|
||||
NO_WRAP_LINES: "noWrapLines",
|
||||
COMPACT_GROUP_HEADER: "compactGroupHeader",
|
||||
DATE_FORMAT: "dateFormat",
|
||||
};
|
||||
|
||||
@@ -20,7 +20,7 @@ const useClickOutside = <T extends HTMLElement = HTMLElement>(
|
||||
handler(event); // Call the handler only if the click is outside of the element passed.
|
||||
}, [ref, handler]);
|
||||
|
||||
useEventListener("mousedown", listener);
|
||||
useEventListener("mouseup", listener);
|
||||
useEventListener("touchstart", listener);
|
||||
};
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ import { QueryStats } from "../../../api/types";
|
||||
import { usePrettifyQuery } from "./hooks/usePrettifyQuery";
|
||||
import QueryHistory from "../QueryHistory/QueryHistory";
|
||||
import AnomalyConfig from "../../../components/ExploreAnomaly/AnomalyConfig";
|
||||
import QueryEditorAutocomplete from "../../../components/Configurators/QueryEditor/QueryEditorAutocomplete";
|
||||
|
||||
export interface QueryConfiguratorProps {
|
||||
queryErrors: string[];
|
||||
@@ -216,6 +217,7 @@ const QueryConfigurator: FC<QueryConfiguratorProps> = ({
|
||||
<QueryEditor
|
||||
value={stateQuery[i]}
|
||||
autocomplete={!hideButtons?.autocomplete && (autocomplete || autocompleteQuick)}
|
||||
autocompleteEl={QueryEditorAutocomplete}
|
||||
error={queryErrors[i]}
|
||||
stats={stats[i]}
|
||||
onArrowUp={createHandlerArrow(-1, i)}
|
||||
|
||||
@@ -69,7 +69,7 @@ const ExploreLogs: FC = () => {
|
||||
};
|
||||
|
||||
const handleApplyFilter = (val: string) => {
|
||||
setQuery(prev => `_stream: ${val === "other" ? "{}" : val} AND (${prev})`);
|
||||
setQuery(prev => `${val} AND (${prev})`);
|
||||
setIsUpdatingQuery(true);
|
||||
};
|
||||
|
||||
|
||||
@@ -6,6 +6,9 @@ import useDeviceDetect from "../../../hooks/useDeviceDetect";
|
||||
import Button from "../../../components/Main/Button/Button";
|
||||
import QueryEditor from "../../../components/Configurators/QueryEditor/QueryEditor";
|
||||
import TextField from "../../../components/Main/TextField/TextField";
|
||||
import LogsQueryEditorAutocomplete from "../../../components/Configurators/QueryEditor/LogsQL/LogsQueryEditorAutocomplete";
|
||||
import { useQueryDispatch, useQueryState } from "../../../state/query/QueryStateContext";
|
||||
import Switch from "../../../components/Main/Switch/Switch";
|
||||
|
||||
export interface ExploreLogHeaderProps {
|
||||
query: string;
|
||||
@@ -27,6 +30,8 @@ const ExploreLogsHeader: FC<ExploreLogHeaderProps> = ({
|
||||
onRun,
|
||||
}) => {
|
||||
const { isMobile } = useDeviceDetect();
|
||||
const { autocomplete } = useQueryState();
|
||||
const queryDispatch = useQueryDispatch();
|
||||
|
||||
const [errorLimit, setErrorLimit] = useState("");
|
||||
const [limitInput, setLimitInput] = useState(limit);
|
||||
@@ -42,6 +47,10 @@ const ExploreLogsHeader: FC<ExploreLogHeaderProps> = ({
|
||||
}
|
||||
};
|
||||
|
||||
const onChangeAutocomplete = () => {
|
||||
queryDispatch({ type: "TOGGLE_AUTOCOMPLETE" });
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
setLimitInput(limit);
|
||||
}, [limit]);
|
||||
@@ -57,7 +66,8 @@ const ExploreLogsHeader: FC<ExploreLogHeaderProps> = ({
|
||||
<div className="vm-explore-logs-header-top">
|
||||
<QueryEditor
|
||||
value={query}
|
||||
autocomplete={false}
|
||||
autocomplete={autocomplete}
|
||||
autocompleteEl={LogsQueryEditorAutocomplete}
|
||||
onArrowUp={() => null}
|
||||
onArrowDown={() => null}
|
||||
onEnter={onRun}
|
||||
@@ -75,7 +85,14 @@ const ExploreLogsHeader: FC<ExploreLogHeaderProps> = ({
|
||||
/>
|
||||
</div>
|
||||
<div className="vm-explore-logs-header-bottom">
|
||||
<div className="vm-explore-logs-header-bottom-contols"></div>
|
||||
<div className="vm-explore-logs-header-bottom-contols">
|
||||
<Switch
|
||||
label={"Autocomplete"}
|
||||
value={autocomplete}
|
||||
onChange={onChangeAutocomplete}
|
||||
fullWidth={isMobile}
|
||||
/>
|
||||
</div>
|
||||
<div className="vm-explore-logs-header-bottom-helpful">
|
||||
<a
|
||||
className="vm-link vm-link_with-icon"
|
||||
|
||||
@@ -26,6 +26,9 @@
|
||||
}
|
||||
|
||||
&-contols {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: flex-start;
|
||||
flex-grow: 1;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,24 +1,19 @@
|
||||
import React, { FC, useCallback, useEffect, useMemo, useRef } from "preact/compat";
|
||||
import { MouseEvent, useState } from "react";
|
||||
import React, { FC, useCallback, useEffect, useMemo } from "preact/compat";
|
||||
import { useState } from "react";
|
||||
import "./style.scss";
|
||||
import { Logs } from "../../../api/types";
|
||||
import Accordion from "../../../components/Main/Accordion/Accordion";
|
||||
import { groupByMultipleKeys } from "../../../utils/array";
|
||||
import Tooltip from "../../../components/Main/Tooltip/Tooltip";
|
||||
import useCopyToClipboard from "../../../hooks/useCopyToClipboard";
|
||||
import GroupLogsItem from "./GroupLogsItem";
|
||||
import { useAppState } from "../../../state/common/StateContext";
|
||||
import classNames from "classnames";
|
||||
import Button from "../../../components/Main/Button/Button";
|
||||
import { CollapseIcon, ExpandIcon, StorageIcon } from "../../../components/Main/Icons";
|
||||
import Popper from "../../../components/Main/Popper/Popper";
|
||||
import TextField from "../../../components/Main/TextField/TextField";
|
||||
import useBoolean from "../../../hooks/useBoolean";
|
||||
import useStateSearchParams from "../../../hooks/useStateSearchParams";
|
||||
import { CollapseIcon, ExpandIcon } from "../../../components/Main/Icons";
|
||||
import { useSearchParams } from "react-router-dom";
|
||||
import { getStreamPairs } from "../../../utils/logs";
|
||||
|
||||
const WITHOUT_GROUPING = "No Grouping";
|
||||
import GroupLogsConfigurators
|
||||
from "../../../components/LogsConfigurators/GroupLogsConfigurators/GroupLogsConfigurators";
|
||||
import GroupLogsHeader from "./GroupLogsHeader";
|
||||
import { LOGS_DISPLAY_FIELDS, LOGS_GROUP_BY, LOGS_URL_PARAMS, WITHOUT_GROUPING } from "../../../constants/logs";
|
||||
|
||||
interface Props {
|
||||
logs: Logs[];
|
||||
@@ -26,73 +21,31 @@ interface Props {
|
||||
}
|
||||
|
||||
const GroupLogs: FC<Props> = ({ logs, settingsRef }) => {
|
||||
const { isDarkTheme } = useAppState();
|
||||
const copyToClipboard = useCopyToClipboard();
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
const [searchParams] = useSearchParams();
|
||||
|
||||
const [expandGroups, setExpandGroups] = useState<boolean[]>([]);
|
||||
const [groupBy, setGroupBy] = useStateSearchParams("_stream", "groupBy");
|
||||
const [copied, setCopied] = useState<string | null>(null);
|
||||
const [searchKey, setSearchKey] = useState("");
|
||||
const optionsButtonRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const {
|
||||
value: openOptions,
|
||||
toggle: toggleOpenOptions,
|
||||
setFalse: handleCloseOptions,
|
||||
} = useBoolean(false);
|
||||
const groupBy = searchParams.get(LOGS_URL_PARAMS.GROUP_BY) || LOGS_GROUP_BY;
|
||||
const displayFieldsString = searchParams.get(LOGS_URL_PARAMS.DISPLAY_FIELDS) || LOGS_DISPLAY_FIELDS;
|
||||
const displayFields = displayFieldsString.split(",");
|
||||
|
||||
const expandAll = useMemo(() => expandGroups.every(Boolean), [expandGroups]);
|
||||
|
||||
const logsKeys = useMemo(() => {
|
||||
const excludeKeys = ["_msg", "_time"];
|
||||
const uniqKeys = Array.from(new Set(logs.map(l => Object.keys(l)).flat()));
|
||||
return [WITHOUT_GROUPING, ...uniqKeys.filter(k => !excludeKeys.includes(k))];
|
||||
}, [logs]);
|
||||
|
||||
const filteredLogsKeys = useMemo(() => {
|
||||
if (!searchKey) return logsKeys;
|
||||
try {
|
||||
const regexp = new RegExp(searchKey, "i");
|
||||
return logsKeys.filter(item => regexp.test(item))
|
||||
.sort((a, b) => (a.match(regexp)?.index || 0) - (b.match(regexp)?.index || 0));
|
||||
} catch (e) {
|
||||
return [];
|
||||
}
|
||||
}, [logsKeys, searchKey]);
|
||||
|
||||
const groupData = useMemo(() => {
|
||||
return groupByMultipleKeys(logs, [groupBy]).map((item) => {
|
||||
const streamValue = item.values[0]?.[groupBy] || "";
|
||||
const pairs = getStreamPairs(streamValue);
|
||||
// values sorting by time
|
||||
const values = item.values.sort((a,b) => new Date(b._time).getTime() - new Date(a._time).getTime());
|
||||
const values = item.values.sort((a, b) => new Date(b._time).getTime() - new Date(a._time).getTime());
|
||||
return {
|
||||
keys: item.keys,
|
||||
keysString: item.keys.join(""),
|
||||
values,
|
||||
pairs,
|
||||
};
|
||||
}).sort((a, b) => a.keysString.localeCompare(b.keysString)); // groups sorting
|
||||
}).sort((a, b) => b.values.length - a.values.length); // groups sorting
|
||||
}, [logs, groupBy]);
|
||||
|
||||
const handleClickByPair = (value: string) => async (e: MouseEvent<HTMLDivElement>) => {
|
||||
e.stopPropagation();
|
||||
const isKeyValue = /(.+)?=(".+")/.test(value);
|
||||
const copyValue = isKeyValue ? `${value.replace(/=/, ": ")}` : `${groupBy}: "${value}"`;
|
||||
const isCopied = await copyToClipboard(copyValue);
|
||||
if (isCopied) {
|
||||
setCopied(value);
|
||||
}
|
||||
};
|
||||
|
||||
const handleSelectGroupBy = (key: string) => () => {
|
||||
setGroupBy(key);
|
||||
searchParams.set("groupBy", key);
|
||||
setSearchParams(searchParams);
|
||||
handleCloseOptions();
|
||||
};
|
||||
|
||||
const handleToggleExpandAll = useCallback(() => {
|
||||
setExpandGroups(new Array(groupData.length).fill(!expandAll));
|
||||
}, [expandAll, groupData.length]);
|
||||
@@ -105,11 +58,6 @@ const GroupLogs: FC<Props> = ({ logs, settingsRef }) => {
|
||||
});
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (copied === null) return;
|
||||
const timeout = setTimeout(() => setCopied(null), 2000);
|
||||
return () => clearTimeout(timeout);
|
||||
}, [copied]);
|
||||
|
||||
useEffect(() => {
|
||||
setExpandGroups(new Array(groupData.length).fill(true));
|
||||
@@ -124,38 +72,16 @@ const GroupLogs: FC<Props> = ({ logs, settingsRef }) => {
|
||||
key={item.keysString}
|
||||
>
|
||||
<Accordion
|
||||
key={String(expandGroups[i])}
|
||||
defaultExpanded={expandGroups[i]}
|
||||
onChange={handleChangeExpand(i)}
|
||||
title={groupBy !== WITHOUT_GROUPING && (
|
||||
<div className="vm-group-logs-section-keys">
|
||||
<span className="vm-group-logs-section-keys__title">Group by <code>{groupBy}</code>:</span>
|
||||
{item.pairs.map((pair) => (
|
||||
<Tooltip
|
||||
title={copied === pair ? "Copied" : "Copy to clipboard"}
|
||||
key={`${item.keysString}_${pair}`}
|
||||
placement={"top-center"}
|
||||
>
|
||||
<div
|
||||
className={classNames({
|
||||
"vm-group-logs-section-keys__pair": true,
|
||||
"vm-group-logs-section-keys__pair_dark": isDarkTheme
|
||||
})}
|
||||
onClick={handleClickByPair(pair)}
|
||||
>
|
||||
{pair}
|
||||
</div>
|
||||
</Tooltip>
|
||||
))}
|
||||
<span className="vm-group-logs-section-keys__count">{item.values.length} entries</span>
|
||||
</div>
|
||||
)}
|
||||
title={groupBy !== WITHOUT_GROUPING && <GroupLogsHeader group={item}/>}
|
||||
>
|
||||
<div className="vm-group-logs-section-rows">
|
||||
{item.values.map((value) => (
|
||||
<GroupLogsItem
|
||||
key={`${value._msg}${value._time}`}
|
||||
log={value}
|
||||
displayFields={displayFields}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
@@ -175,47 +101,7 @@ const GroupLogs: FC<Props> = ({ logs, settingsRef }) => {
|
||||
ariaLabel={expandAll ? "Collapse All" : "Expand All"}
|
||||
/>
|
||||
</Tooltip>
|
||||
<Tooltip title={"Group by"}>
|
||||
<div ref={optionsButtonRef}>
|
||||
<Button
|
||||
variant="text"
|
||||
startIcon={<StorageIcon/>}
|
||||
onClick={toggleOpenOptions}
|
||||
ariaLabel={"Group by"}
|
||||
/>
|
||||
</div>
|
||||
</Tooltip>
|
||||
{
|
||||
<Popper
|
||||
open={openOptions}
|
||||
placement="bottom-right"
|
||||
onClose={handleCloseOptions}
|
||||
buttonRef={optionsButtonRef}
|
||||
>
|
||||
<div className="vm-list vm-group-logs-header-keys">
|
||||
<div className="vm-group-logs-header-keys__search">
|
||||
<TextField
|
||||
label="Search key"
|
||||
value={searchKey}
|
||||
onChange={setSearchKey}
|
||||
type="search"
|
||||
/>
|
||||
</div>
|
||||
{filteredLogsKeys.map(id => (
|
||||
<div
|
||||
className={classNames({
|
||||
"vm-list-item": true,
|
||||
"vm-list-item_active": id === groupBy
|
||||
})}
|
||||
key={id}
|
||||
onClick={handleSelectGroupBy(id)}
|
||||
>
|
||||
{id}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</Popper>
|
||||
}
|
||||
<GroupLogsConfigurators logs={logs}/>
|
||||
</div>
|
||||
), settingsRef.current)}
|
||||
</>
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import React, { FC, memo, useCallback, useEffect, useState } from "preact/compat";
|
||||
import Tooltip from "../../../components/Main/Tooltip/Tooltip";
|
||||
import Button from "../../../components/Main/Button/Button";
|
||||
import { CopyIcon } from "../../../components/Main/Icons";
|
||||
import { CopyIcon, StorageIcon, VisibilityIcon } from "../../../components/Main/Icons";
|
||||
import useCopyToClipboard from "../../../hooks/useCopyToClipboard";
|
||||
import { useSearchParams } from "react-router-dom";
|
||||
import { LOGS_GROUP_BY, LOGS_URL_PARAMS } from "../../../constants/logs";
|
||||
|
||||
interface Props {
|
||||
field: string;
|
||||
@@ -11,8 +13,17 @@ interface Props {
|
||||
|
||||
const GroupLogsFieldRow: FC<Props> = ({ field, value }) => {
|
||||
const copyToClipboard = useCopyToClipboard();
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
|
||||
const [copied, setCopied] = useState<boolean>(false);
|
||||
|
||||
const groupBy = searchParams.get(LOGS_URL_PARAMS.GROUP_BY) || LOGS_GROUP_BY;
|
||||
const displayFieldsString = searchParams.get(LOGS_URL_PARAMS.DISPLAY_FIELDS) || "";
|
||||
const displayFields = displayFieldsString ? displayFieldsString.split(",") : [];
|
||||
|
||||
const isSelectedField = displayFields.includes(field);
|
||||
const isGroupByField = groupBy === field;
|
||||
|
||||
const handleCopy = useCallback(async () => {
|
||||
if (copied) return;
|
||||
try {
|
||||
@@ -23,6 +34,18 @@ const GroupLogsFieldRow: FC<Props> = ({ field, value }) => {
|
||||
}
|
||||
}, [copied, copyToClipboard]);
|
||||
|
||||
const handleSelectDisplayField = () => {
|
||||
const prev = displayFields;
|
||||
const newDisplayFields = prev.includes(field) ? prev.filter(v => v !== field) : [...prev, field];
|
||||
searchParams.set(LOGS_URL_PARAMS.DISPLAY_FIELDS, newDisplayFields.join(","));
|
||||
setSearchParams(searchParams);
|
||||
};
|
||||
|
||||
const handleSelectGroupBy = () => {
|
||||
isGroupByField ? searchParams.delete(LOGS_URL_PARAMS.GROUP_BY) : searchParams.set(LOGS_URL_PARAMS.GROUP_BY, field);
|
||||
setSearchParams(searchParams);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (copied === null) return;
|
||||
const timeout = setTimeout(() => setCopied(false), 2000);
|
||||
@@ -35,6 +58,7 @@ const GroupLogsFieldRow: FC<Props> = ({ field, value }) => {
|
||||
<div className="vm-group-logs-row-fields-item-controls__wrapper">
|
||||
<Tooltip title={copied ? "Copied" : "Copy to clipboard"}>
|
||||
<Button
|
||||
className="vm-group-logs-row-fields-item-controls__button"
|
||||
variant="text"
|
||||
color="gray"
|
||||
size="small"
|
||||
@@ -43,6 +67,34 @@ const GroupLogsFieldRow: FC<Props> = ({ field, value }) => {
|
||||
ariaLabel="copy to clipboard"
|
||||
/>
|
||||
</Tooltip>
|
||||
<Tooltip
|
||||
key={`${field}_${isSelectedField}_${isGroupByField}`}
|
||||
title={isSelectedField ? "Hide this field" : "Show this field instead of the message"}
|
||||
>
|
||||
<Button
|
||||
className="vm-group-logs-row-fields-item-controls__button"
|
||||
variant="text"
|
||||
color={isSelectedField ? "secondary" : "gray"}
|
||||
size="small"
|
||||
startIcon={isSelectedField ? <VisibilityIcon/> : <VisibilityIcon/>}
|
||||
onClick={handleSelectDisplayField}
|
||||
ariaLabel="copy to clipboard"
|
||||
/>
|
||||
</Tooltip>
|
||||
<Tooltip
|
||||
key={`${field}_${isSelectedField}_${isGroupByField}`}
|
||||
title={isGroupByField ? "Ungroup this field" : "Group by this field"}
|
||||
>
|
||||
<Button
|
||||
className="vm-group-logs-row-fields-item-controls__button"
|
||||
variant="text"
|
||||
color={isGroupByField ? "secondary" : "gray"}
|
||||
size="small"
|
||||
startIcon={<StorageIcon/>}
|
||||
onClick={handleSelectGroupBy}
|
||||
ariaLabel="copy to clipboard"
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</td>
|
||||
<td className="vm-group-logs-row-fields-item__key">{field}</td>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user