apptest: do not use "at" and "pb" import aliases for apptest and prombpmarshal packages

The import aliases may complicate maintenance of the code in the long term
if they aren't used consistently, e.g. if one file imports the apptest under the default name
while the other file imports the apptest under the "at" name.

The aliases also complicate grepping the code by apptest.* or prompbmarshal.* .
This commit is contained in:
Aliaksandr Valialkin
2025-07-26 01:04:47 +02:00
parent da5c065f29
commit 73015bccb9
19 changed files with 711 additions and 707 deletions

View File

@@ -445,9 +445,11 @@ test-full:
test-full-386:
GOEXPERIMENT=synctest GOARCH=386 go test -coverprofile=coverage.txt -covermode=atomic ./lib/... ./app/...
integration-test: apptest
integration-test:
$(MAKE) apptest
apptest: victoria-metrics vmagent vmalert vmauth vmctl vmbackup vmrestore
apptest:
$(MAKE) victoria-metrics vmagent vmalert vmauth vmctl vmbackup vmrestore
go test ./apptest/... -skip="^TestCluster.*"
benchmark:

View File

@@ -15,7 +15,7 @@ import (
"testing"
"time"
pb "github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
)
// PrometheusQuerier contains methods available to Prometheus-like HTTP API for Querying
@@ -40,7 +40,7 @@ type PrometheusQuerier interface {
// Writer contains methods for writing new data
type Writer interface {
// Prometheus APIs
PrometheusAPIV1Write(t *testing.T, records []pb.TimeSeries, opts QueryOpts)
PrometheusAPIV1Write(t *testing.T, records []prompbmarshal.TimeSeries, opts QueryOpts)
PrometheusAPIV1ImportPrometheus(t *testing.T, records []string, opts QueryOpts)
PrometheusAPIV1ImportCSV(t *testing.T, records []string, opts QueryOpts)
PrometheusAPIV1ImportNative(t *testing.T, data []byte, opts QueryOpts)

View File

@@ -7,24 +7,24 @@ import (
"testing"
"time"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
)
type testBackupRestoreOpts struct {
startSUT func() at.PrometheusWriteQuerier
startSUT func() apptest.PrometheusWriteQuerier
stopSUT func()
storageDataPaths []string
snapshotCreateURLs func(at.PrometheusWriteQuerier) []string
snapshotCreateURLs func(apptest.PrometheusWriteQuerier) []string
}
func TestSingleBackupRestore(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
storageDataPath := filepath.Join(tc.Dir(), "vmsingle")
opts := testBackupRestoreOpts{
startSUT: func() at.PrometheusWriteQuerier {
startSUT: func() apptest.PrometheusWriteQuerier {
return tc.MustStartVmsingle("vmsingle", []string{
"-storageDataPath=" + storageDataPath,
"-retentionPeriod=100y",
@@ -37,9 +37,9 @@ func TestSingleBackupRestore(t *testing.T) {
storageDataPaths: []string{
storageDataPath,
},
snapshotCreateURLs: func(sut at.PrometheusWriteQuerier) []string {
snapshotCreateURLs: func(sut apptest.PrometheusWriteQuerier) []string {
return []string{
sut.(*at.Vmsingle).SnapshotCreateURL(),
sut.(*apptest.Vmsingle).SnapshotCreateURL(),
}
},
}
@@ -48,15 +48,15 @@ func TestSingleBackupRestore(t *testing.T) {
}
func TestClusterBackupRestore(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
storage1DataPath := filepath.Join(tc.Dir(), "vmstorage1")
storage2DataPath := filepath.Join(tc.Dir(), "vmstorage2")
opts := testBackupRestoreOpts{
startSUT: func() at.PrometheusWriteQuerier {
return tc.MustStartCluster(&at.ClusterOptions{
startSUT: func() apptest.PrometheusWriteQuerier {
return tc.MustStartCluster(&apptest.ClusterOptions{
Vmstorage1Instance: "vmstorage1",
Vmstorage1Flags: []string{
"-storageDataPath=" + storage1DataPath,
@@ -85,8 +85,8 @@ func TestClusterBackupRestore(t *testing.T) {
storage1DataPath,
storage2DataPath,
},
snapshotCreateURLs: func(sut at.PrometheusWriteQuerier) []string {
c := sut.(*at.Vmcluster)
snapshotCreateURLs: func(sut apptest.PrometheusWriteQuerier) []string {
c := sut.(*apptest.Vmcluster)
return []string{
c.Vmstorages[0].SnapshotCreateURL(),
c.Vmstorages[1].SnapshotCreateURL(),
@@ -97,14 +97,14 @@ func TestClusterBackupRestore(t *testing.T) {
testBackupRestore(tc, opts)
}
func testBackupRestore(tc *at.TestCase, opts testBackupRestoreOpts) {
func testBackupRestore(tc *apptest.TestCase, opts testBackupRestoreOpts) {
t := tc.T()
const msecPerMinute = 60 * 1000
genData := func(count int, prefix string, start int64) (recs []string, wantSeries []map[string]string, wantQueryResults []*at.QueryResult) {
genData := func(count int, prefix string, start int64) (recs []string, wantSeries []map[string]string, wantQueryResults []*apptest.QueryResult) {
recs = make([]string, count)
wantSeries = make([]map[string]string, count)
wantQueryResults = make([]*at.QueryResult, count)
wantQueryResults = make([]*apptest.QueryResult, count)
for i := range count {
name := fmt.Sprintf("%s_%03d", prefix, i)
value := float64(i)
@@ -112,9 +112,9 @@ func testBackupRestore(tc *at.TestCase, opts testBackupRestoreOpts) {
recs[i] = fmt.Sprintf("%s %f %d", name, value, timestamp)
wantSeries[i] = map[string]string{"__name__": name}
wantQueryResults[i] = &at.QueryResult{
wantQueryResults[i] = &apptest.QueryResult{
Metric: map[string]string{"__name__": name},
Samples: []*at.Sample{{Timestamp: timestamp, Value: value}},
Samples: []*apptest.Sample{{Timestamp: timestamp, Value: value}},
}
}
return recs, wantSeries, wantQueryResults
@@ -127,18 +127,18 @@ func testBackupRestore(tc *at.TestCase, opts testBackupRestoreOpts) {
// assertSeries retrieves set of all metric names from the storage and
// compares it with the expected set.
assertSeries := func(app at.PrometheusQuerier, query string, start, end int64, want []map[string]string) {
assertSeries := func(app apptest.PrometheusQuerier, query string, start, end int64, want []map[string]string) {
t.Helper()
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/series response",
Got: func() any {
return app.PrometheusAPIV1Series(t, query, at.QueryOpts{
return app.PrometheusAPIV1Series(t, query, apptest.QueryOpts{
Start: fmt.Sprintf("%d", start),
End: fmt.Sprintf("%d", end),
}).Sort()
},
Want: &at.PrometheusAPIV1SeriesResponse{
Want: &apptest.PrometheusAPIV1SeriesResponse{
Status: "success",
Data: want,
},
@@ -148,20 +148,20 @@ func testBackupRestore(tc *at.TestCase, opts testBackupRestoreOpts) {
// assertSeries retrieves all data from the storage and compares it with the
// expected result.
assertQueryResults := func(app at.PrometheusQuerier, query string, start, end int64, want []*at.QueryResult) {
assertQueryResults := func(app apptest.PrometheusQuerier, query string, start, end int64, want []*apptest.QueryResult) {
t.Helper()
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query_range response",
Got: func() any {
return app.PrometheusAPIV1QueryRange(t, query, at.QueryOpts{
return app.PrometheusAPIV1QueryRange(t, query, apptest.QueryOpts{
Start: fmt.Sprintf("%d", start),
End: fmt.Sprintf("%d", end),
Step: "60s",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: want,
},
@@ -171,7 +171,7 @@ func testBackupRestore(tc *at.TestCase, opts testBackupRestoreOpts) {
})
}
createBackup := func(sut at.PrometheusWriteQuerier, name string) {
createBackup := func(sut apptest.PrometheusWriteQuerier, name string) {
for i, storageDataPath := range opts.storageDataPaths {
replica := fmt.Sprintf("replica-%d", i)
instance := fmt.Sprintf("vmbackup-%s-%s", name, replica)
@@ -216,13 +216,13 @@ func testBackupRestore(tc *at.TestCase, opts testBackupRestoreOpts) {
sut := opts.startSUT()
sut.PrometheusAPIV1ImportPrometheus(t, batch1Data, at.QueryOpts{})
sut.PrometheusAPIV1ImportPrometheus(t, batch1Data, apptest.QueryOpts{})
sut.ForceFlush(t)
assertSeries(sut, `{__name__=~"batch1.*"}`, start, end, wantBatch1Series)
assertQueryResults(sut, `{__name__=~"batch1.*"}`, start, end, wantBatch1QueryResults)
createBackup(sut, "batch1")
sut.PrometheusAPIV1ImportPrometheus(t, batch2Data, at.QueryOpts{})
sut.PrometheusAPIV1ImportPrometheus(t, batch2Data, apptest.QueryOpts{})
sut.ForceFlush(t)
assertSeries(sut, `{__name__=~"batch(1|2).*"}`, start, end, wantBatch12Series)
assertQueryResults(sut, `{__name__=~"batch(1|2).*"}`, start, end, wantBatch12QueryResults)

View File

@@ -5,15 +5,16 @@ import (
"testing"
"time"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/decimal"
pb "github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/decimal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
)
func TestSingleDeduplication_dedulicationIsOff(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartVmsingle("vmsingle", []string{
@@ -26,7 +27,7 @@ func TestSingleDeduplication_dedulicationIsOff(t *testing.T) {
}
func TestSingleDeduplication_dedulicationIsOn(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartVmsingle("vmsingle", []string{
@@ -39,10 +40,10 @@ func TestSingleDeduplication_dedulicationIsOn(t *testing.T) {
}
func TestClusterDeduplication_deduplicationIsOff(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartCluster(&at.ClusterOptions{
sut := tc.MustStartCluster(&apptest.ClusterOptions{
Vmstorage1Instance: "vmstorage1",
Vmstorage1Flags: []string{
"-dedup.minScrapeInterval=0",
@@ -59,10 +60,10 @@ func TestClusterDeduplication_deduplicationIsOff(t *testing.T) {
}
func TestClusterDeduplication_deduplicationIsOn(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartCluster(&at.ClusterOptions{
sut := tc.MustStartCluster(&apptest.ClusterOptions{
Vmstorage1Instance: "vmstorage1",
Vmstorage1Flags: []string{
"-dedup.minScrapeInterval=10s",
@@ -79,7 +80,7 @@ func TestClusterDeduplication_deduplicationIsOn(t *testing.T) {
}
// See https://docs.victoriametrics.com/victoriametrics/single-server-victoriametrics/#deduplication
func testDeduplication(tc *at.TestCase, sut at.PrometheusWriteQuerier, deduplicationIsOn bool) {
func testDeduplication(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier, deduplicationIsOn bool) {
t := tc.T()
firstDayOfThisMonth := func() time.Time {
@@ -98,34 +99,34 @@ func testDeduplication(tc *at.TestCase, sut at.PrometheusWriteQuerier, deduplica
ts3 := start.Add(3 * time.Second).UnixMilli()
ts5 := start.Add(5 * time.Second).UnixMilli()
ts10 := start.Add(10 * time.Second).UnixMilli()
data := []pb.TimeSeries{
data := []prompbmarshal.TimeSeries{
{
Labels: []pb.Label{{Name: "__name__", Value: "metric1"}},
Samples: []pb.Sample{
Labels: []prompbmarshal.Label{{Name: "__name__", Value: "metric1"}},
Samples: []prompbmarshal.Sample{
{Timestamp: ts1, Value: 3},
{Timestamp: ts3, Value: 10},
{Timestamp: ts5, Value: 5},
},
},
{
Labels: []pb.Label{{Name: "__name__", Value: "metric2"}},
Samples: []pb.Sample{
Labels: []prompbmarshal.Label{{Name: "__name__", Value: "metric2"}},
Samples: []prompbmarshal.Sample{
{Timestamp: ts1, Value: 3},
{Timestamp: ts3, Value: decimal.StaleNaN},
{Timestamp: ts5, Value: 5},
},
},
{
Labels: []pb.Label{{Name: "__name__", Value: "metric3"}},
Samples: []pb.Sample{
Labels: []prompbmarshal.Label{{Name: "__name__", Value: "metric3"}},
Samples: []prompbmarshal.Sample{
{Timestamp: ts10, Value: 30},
{Timestamp: ts10, Value: 100},
{Timestamp: ts10, Value: 50},
},
},
{
Labels: []pb.Label{{Name: "__name__", Value: "metric4"}},
Samples: []pb.Sample{
Labels: []prompbmarshal.Label{{Name: "__name__", Value: "metric4"}},
Samples: []prompbmarshal.Sample{
{Timestamp: ts10, Value: 30},
{Timestamp: ts10, Value: decimal.StaleNaN},
{Timestamp: ts10, Value: 50},
@@ -133,31 +134,31 @@ func testDeduplication(tc *at.TestCase, sut at.PrometheusWriteQuerier, deduplica
},
}
sut.PrometheusAPIV1Write(t, data, at.QueryOpts{})
sut.PrometheusAPIV1Write(t, data, apptest.QueryOpts{})
sut.ForceFlush(t)
sut.ForceMerge(t)
wantDuplicates := &at.PrometheusAPIV1QueryResponse{
wantDuplicates := &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
{Metric: map[string]string{"__name__": "metric1"}, Samples: []*at.Sample{
Result: []*apptest.QueryResult{
{Metric: map[string]string{"__name__": "metric1"}, Samples: []*apptest.Sample{
{Timestamp: ts1, Value: 3},
{Timestamp: ts3, Value: 10},
{Timestamp: ts5, Value: 5},
}},
{Metric: map[string]string{"__name__": "metric2"}, Samples: []*at.Sample{
{Metric: map[string]string{"__name__": "metric2"}, Samples: []*apptest.Sample{
{Timestamp: ts1, Value: 3},
{Timestamp: ts3, Value: decimal.StaleNaN},
{Timestamp: ts5, Value: 5},
}},
{Metric: map[string]string{"__name__": "metric3"}, Samples: []*at.Sample{
{Metric: map[string]string{"__name__": "metric3"}, Samples: []*apptest.Sample{
{Timestamp: ts10, Value: 30},
{Timestamp: ts10, Value: 50},
{Timestamp: ts10, Value: 100},
}},
{Metric: map[string]string{"__name__": "metric4"}, Samples: []*at.Sample{
{Metric: map[string]string{"__name__": "metric4"}, Samples: []*apptest.Sample{
{Timestamp: ts10, Value: 30},
{Timestamp: ts10, Value: 50},
{Timestamp: ts10, Value: decimal.StaleNaN},
@@ -165,30 +166,30 @@ func testDeduplication(tc *at.TestCase, sut at.PrometheusWriteQuerier, deduplica
},
},
}
wantDeduped := &at.PrometheusAPIV1QueryResponse{
wantDeduped := &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
{Metric: map[string]string{"__name__": "metric1"}, Samples: []*at.Sample{
Result: []*apptest.QueryResult{
{Metric: map[string]string{"__name__": "metric1"}, Samples: []*apptest.Sample{
// VictoriaMetrics leaves a single raw sample with the
// biggest timestamp for each time series per each
// -dedup.minScrapeInterval discrete interval if
// -dedup.minScrapeInterval is set to positive duration.
{Timestamp: ts5, Value: 5},
}},
{Metric: map[string]string{"__name__": "metric2"}, Samples: []*at.Sample{
{Metric: map[string]string{"__name__": "metric2"}, Samples: []*apptest.Sample{
// Even if NaN is present among duplicates, VictoriaMetrics
// still chooses the sample with the biggest timestamp.
{Timestamp: ts5, Value: 5},
}},
{Metric: map[string]string{"__name__": "metric3"}, Samples: []*at.Sample{
{Metric: map[string]string{"__name__": "metric3"}, Samples: []*apptest.Sample{
// If multiple raw samples have the same timestamp on the
// given -dedup.minScrapeInterval discrete interval, then
// the sample with the biggest value is kept.
{Timestamp: ts10, Value: 100},
}},
{Metric: map[string]string{"__name__": "metric4"}, Samples: []*at.Sample{
{Metric: map[string]string{"__name__": "metric4"}, Samples: []*apptest.Sample{
// If multiple raw samples have the same timestamp on the
// given -dedup.minScrapeInterval discrete interval, then
// stale markers are preferred over any other value.
@@ -203,10 +204,10 @@ func testDeduplication(tc *at.TestCase, sut at.PrometheusWriteQuerier, deduplica
want = wantDeduped
}
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected response",
Got: func() any {
got := sut.PrometheusAPIV1Export(t, `{__name__=~"metric.*"}`, at.QueryOpts{
got := sut.PrometheusAPIV1Export(t, `{__name__=~"metric.*"}`, apptest.QueryOpts{
ReduceMemUsage: "1",
Start: fmt.Sprintf("%d", start.UnixMilli()),
End: fmt.Sprintf("%d", end.UnixMilli()),

View File

@@ -6,14 +6,14 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/fs"
)
func TestSingleExportImportNative(t *testing.T) {
fs.MustRemoveDir(t.Name())
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartDefaultVmsingle()
@@ -24,7 +24,7 @@ func TestSingleExportImportNative(t *testing.T) {
func TestClusterExportImportNative(t *testing.T) {
fs.MustRemoveDir(t.Name())
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartDefaultCluster()
@@ -34,39 +34,39 @@ func TestClusterExportImportNative(t *testing.T) {
// testExportImportNative test export and import in VictoriaMetrics native format.
// see: https://docs.victoriametrics.com/#how-to-import-data-in-native-format
func testExportImportNative(t *testing.T, sut at.PrometheusWriteQuerier) {
func testExportImportNative(t *testing.T, sut apptest.PrometheusWriteQuerier) {
// create test data
sut.PrometheusAPIV1ImportPrometheus(t, []string{
`native_export_import 10 1707123456700`, // 2024-02-05T08:57:36.700Z
}, at.QueryOpts{
}, apptest.QueryOpts{
ExtraLabels: []string{"el1=elv1", "el2=elv2"},
})
sut.ForceFlush(t)
// export test data via native export API
exportResult := sut.PrometheusAPIV1ExportNative(t, "native_export_import", at.QueryOpts{
exportResult := sut.PrometheusAPIV1ExportNative(t, "native_export_import", apptest.QueryOpts{
Start: "2024-02-05T08:50:00.700Z",
End: "2024-02-05T09:00:00.700Z",
})
// re-import test data via native import API
sut.PrometheusAPIV1ImportNative(t, exportResult, at.QueryOpts{})
sut.PrometheusAPIV1ImportNative(t, exportResult, apptest.QueryOpts{})
sut.ForceFlush(t)
// check query result
got := sut.PrometheusAPIV1QueryRange(t, "native_export_import", at.QueryOpts{
got := sut.PrometheusAPIV1QueryRange(t, "native_export_import", apptest.QueryOpts{
Start: "2024-02-05T08:57:36.700Z",
End: "2024-02-05T08:57:36.700Z",
Step: "60s",
})
cmpOptions := []cmp.Option{
cmpopts.IgnoreFields(at.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
cmpopts.IgnoreFields(apptest.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
cmpopts.EquateNaNs(),
}
want := at.NewPrometheusAPIV1QueryResponse(t, `{"data": {"result": [{"metric": {"__name__": "native_export_import", "el1": "elv1", "el2":"elv2"}, "values": []}]}}`)
want.Data.Result[0].Samples = []*at.Sample{
at.NewSample(t, "2024-02-05T08:57:36.700Z", 10),
want := apptest.NewPrometheusAPIV1QueryResponse(t, `{"data": {"result": [{"metric": {"__name__": "native_export_import", "el1": "elv1", "el2":"elv2"}, "values": []}]}}`)
want.Data.Result[0].Samples = []*apptest.Sample{
apptest.NewSample(t, "2024-02-05T08:57:36.700Z", 10),
}
if diff := cmp.Diff(want, got, cmpOptions...); diff != "" {
t.Errorf("unexpected response (-want, +got):\n%s", diff)

View File

@@ -5,14 +5,14 @@ import (
"github.com/google/go-cmp/cmp"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
)
func testMetricsIndex(t *testing.T, sut at.PrometheusWriteQuerier) {
func testMetricsIndex(t *testing.T, sut apptest.PrometheusWriteQuerier) {
// verify index is empty at the start
expected := at.GraphiteMetricsIndexResponse{}
expected := apptest.GraphiteMetricsIndexResponse{}
tenant := "1:2"
got := sut.GraphiteMetricsIndex(t, at.QueryOpts{Tenant: tenant})
got := sut.GraphiteMetricsIndex(t, apptest.QueryOpts{Tenant: tenant})
if diff := cmp.Diff(expected, got); diff != "" {
t.Errorf("unexpected response (-want, +got):\n%s", diff)
}
@@ -31,20 +31,20 @@ func testMetricsIndex(t *testing.T, sut at.PrometheusWriteQuerier) {
dataSet[idx] += ingestTimestamp
}
sut.PrometheusAPIV1ImportPrometheus(t, dataSet, at.QueryOpts{Tenant: tenant})
sut.PrometheusAPIV1ImportPrometheus(t, dataSet, apptest.QueryOpts{Tenant: tenant})
sut.ForceFlush(t)
// verify ingested metrics correctly returned in index response
expected = []string{"metric_name_1", "metric_name_2", "metric_name_3"}
got = sut.GraphiteMetricsIndex(t, at.QueryOpts{Tenant: tenant})
got = sut.GraphiteMetricsIndex(t, apptest.QueryOpts{Tenant: tenant})
if diff := cmp.Diff(expected, got); diff != "" {
t.Errorf("unexpected response (-want, +got):\n%s", diff)
}
}
func TestSingleMetricsIndex(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartDefaultVmsingle()
@@ -53,7 +53,7 @@ func TestSingleMetricsIndex(t *testing.T) {
}
func TestClusterMetricsIndex(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartDefaultCluster()

View File

@@ -6,44 +6,44 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/fs"
pb "github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
)
func TestSingleIngestionProtocols(t *testing.T) {
fs.MustRemoveDir(t.Name())
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartDefaultVmsingle()
type opts struct {
query string
wantMetrics []map[string]string
wantSamples []*at.Sample
wantSamples []*apptest.Sample
}
f := func(sut at.PrometheusQuerier, opts *opts) {
f := func(sut apptest.PrometheusQuerier, opts *opts) {
t.Helper()
wantResult := []*at.QueryResult{}
wantResult := []*apptest.QueryResult{}
for idx, wm := range opts.wantMetrics {
wantResult = append(wantResult, &at.QueryResult{
wantResult = append(wantResult, &apptest.QueryResult{
Metric: wm,
Samples: []*at.Sample{opts.wantSamples[idx]},
Samples: []*apptest.Sample{opts.wantSamples[idx]},
})
}
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /export query response",
Got: func() any {
got := sut.PrometheusAPIV1Export(t, opts.query, at.QueryOpts{
got := sut.PrometheusAPIV1Export(t, opts.query, apptest.QueryOpts{
Start: "2024-02-05T08:50:00.700Z",
End: "2024-02-05T09:00:00.700Z",
})
got.Sort()
return got
},
Want: &at.PrometheusAPIV1QueryResponse{Data: &at.QueryData{Result: wantResult}},
Want: &apptest.PrometheusAPIV1QueryResponse{Data: &apptest.QueryData{Result: wantResult}},
CmpOpts: []cmp.Option{
cmpopts.IgnoreFields(at.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
cmpopts.IgnoreFields(apptest.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
},
})
}
@@ -52,7 +52,7 @@ func TestSingleIngestionProtocols(t *testing.T) {
sut.InfluxWrite(t, []string{
`influxline series1=10 1707123456700`, // 2024-02-05T08:57:36.700Z
`influxline,label=foo1,label1=value1,label2=value2 series2=40 1707123456800`, // 2024-02-05T08:57:36.800Z
}, at.QueryOpts{
}, apptest.QueryOpts{
ExtraLabels: []string{"el1=elv1", "el2=elv2"},
})
sut.ForceFlush(t)
@@ -73,7 +73,7 @@ func TestSingleIngestionProtocols(t *testing.T) {
"el2": "elv2",
},
},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123456700, Value: 10},
{Timestamp: 1707123456800, Value: 40},
},
@@ -83,7 +83,7 @@ func TestSingleIngestionProtocols(t *testing.T) {
sut.OpenTSDBAPIPut(t, []string{
`{"metric":"opentsdbimport.foo","value":45.34, "timestamp": "1707123457"}`,
`{"metric":"opentsdbimport.bar","value":43, "timestamp": "1707123456"}`,
}, at.QueryOpts{
}, apptest.QueryOpts{
ExtraLabels: []string{"el1=elv1", "el2=elv2"},
})
sut.ForceFlush(t)
@@ -101,7 +101,7 @@ func TestSingleIngestionProtocols(t *testing.T) {
"el2": "elv2",
},
},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123456000, Value: 43},
{Timestamp: 1707123457000, Value: 45.34},
},
@@ -111,7 +111,7 @@ func TestSingleIngestionProtocols(t *testing.T) {
sut.PrometheusAPIV1ImportCSV(t, []string{
`GOOG,1.23,4.56,NYSE,1707123457`,
`MSFT,23,56,NASDAQ,1707123457`,
}, at.QueryOpts{
}, apptest.QueryOpts{
ExtraLabels: []string{"el1=elv1", "el2=elv2"},
Format: "2:metric:csv_import,3:metric:csv_import_v2,1:label:ticker,4:label:market,5:time:unix_s",
})
@@ -148,7 +148,7 @@ func TestSingleIngestionProtocols(t *testing.T) {
"el2": "elv2",
},
},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123457000, Value: 23},
{Timestamp: 1707123457000, Value: 1.23},
{Timestamp: 1707123457000, Value: 56},
@@ -160,7 +160,7 @@ func TestSingleIngestionProtocols(t *testing.T) {
sut.PrometheusAPIV1ImportPrometheus(t, []string{
`importprometheus_series 10 1707123456700`, // 2024-02-05T08:57:36.700Z
`importprometheus_series2{label="foo",label1="value1"} 20 1707123456800`, // 2024-02-05T08:57:36.800Z
}, at.QueryOpts{
}, apptest.QueryOpts{
ExtraLabels: []string{"el1=elv1", "el2=elv2"},
})
sut.ForceFlush(t)
@@ -180,22 +180,22 @@ func TestSingleIngestionProtocols(t *testing.T) {
"el2": "elv2",
},
},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123456700, Value: 10},
{Timestamp: 1707123456800, Value: 20},
},
})
// prometheus remote write format
pbData := []pb.TimeSeries{
pbData := []prompbmarshal.TimeSeries{
{
Labels: []pb.Label{
Labels: []prompbmarshal.Label{
{
Name: "__name__",
Value: "prometheusrw_series",
},
},
Samples: []pb.Sample{
Samples: []prompbmarshal.Sample{
{
Value: 10,
Timestamp: 1707123456700, // 2024-02-05T08:57:36.700Z
@@ -204,7 +204,7 @@ func TestSingleIngestionProtocols(t *testing.T) {
},
},
{
Labels: []pb.Label{
Labels: []prompbmarshal.Label{
{
Name: "__name__",
Value: "prometheusrw_series2",
@@ -218,7 +218,7 @@ func TestSingleIngestionProtocols(t *testing.T) {
Value: "value1",
},
},
Samples: []pb.Sample{
Samples: []prompbmarshal.Sample{
{
Value: 20,
Timestamp: 1707123456800, // 2024-02-05T08:57:36.800Z
@@ -226,7 +226,7 @@ func TestSingleIngestionProtocols(t *testing.T) {
},
},
}
sut.PrometheusAPIV1Write(t, pbData, at.QueryOpts{})
sut.PrometheusAPIV1Write(t, pbData, apptest.QueryOpts{})
sut.ForceFlush(t)
f(sut, &opts{
query: `{__name__=~"prometheusrw.+"}`,
@@ -240,7 +240,7 @@ func TestSingleIngestionProtocols(t *testing.T) {
"label1": "value1",
},
},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123456700, Value: 10}, // 2024-02-05T08:57:36.700Z
{Timestamp: 1707123456800, Value: 20}, // 2024-02-05T08:57:36.700Z
},
@@ -250,7 +250,7 @@ func TestSingleIngestionProtocols(t *testing.T) {
func TestClusterIngestionProtocols(t *testing.T) {
fs.MustRemoveDir(t.Name())
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
vmstorage := tc.MustStartVmstorage("vmstorage", []string{
"-storageDataPath=" + tc.Dir() + "/vmstorage",
@@ -266,31 +266,31 @@ func TestClusterIngestionProtocols(t *testing.T) {
type opts struct {
query string
wantMetrics []map[string]string
wantSamples []*at.Sample
wantSamples []*apptest.Sample
}
f := func(opts *opts) {
t.Helper()
wantResult := []*at.QueryResult{}
wantResult := []*apptest.QueryResult{}
for idx, wm := range opts.wantMetrics {
wantResult = append(wantResult, &at.QueryResult{
wantResult = append(wantResult, &apptest.QueryResult{
Metric: wm,
Samples: []*at.Sample{opts.wantSamples[idx]},
Samples: []*apptest.Sample{opts.wantSamples[idx]},
})
}
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /export query response",
Got: func() any {
got := vmselect.PrometheusAPIV1Export(t, opts.query, at.QueryOpts{
got := vmselect.PrometheusAPIV1Export(t, opts.query, apptest.QueryOpts{
Start: "2024-02-05T08:50:00.700Z",
End: "2024-02-05T09:00:00.700Z",
})
got.Sort()
return got
},
Want: &at.PrometheusAPIV1QueryResponse{Data: &at.QueryData{Result: wantResult}},
Want: &apptest.PrometheusAPIV1QueryResponse{Data: &apptest.QueryData{Result: wantResult}},
CmpOpts: []cmp.Option{
cmpopts.IgnoreFields(at.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
cmpopts.IgnoreFields(apptest.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
},
})
}
@@ -299,7 +299,7 @@ func TestClusterIngestionProtocols(t *testing.T) {
vminsert.PrometheusAPIV1ImportPrometheus(t, []string{
`importprometheus_series 10 1707123456700`, // 2024-02-05T08:57:36.700Z
`importprometheus_series2{label="foo",label1="value1"} 20 1707123456800`, // 2024-02-05T08:57:36.800Z
}, at.QueryOpts{
}, apptest.QueryOpts{
ExtraLabels: []string{"el1=elv1", "el2=elv2"},
})
vmstorage.ForceFlush(t)
@@ -319,7 +319,7 @@ func TestClusterIngestionProtocols(t *testing.T) {
"el2": "elv2",
},
},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123456700, Value: 10},
{Timestamp: 1707123456800, Value: 20},
},
@@ -329,7 +329,7 @@ func TestClusterIngestionProtocols(t *testing.T) {
vminsert.InfluxWrite(t, []string{
`influxline series1=10 1707123456700`, // 2024-02-05T08:57:36.700Z
`influxline,label=foo1,label1=value1,label2=value2 series2=40 1707123456800`, // 2024-02-05T08:57:36.800Z
}, at.QueryOpts{
}, apptest.QueryOpts{
ExtraLabels: []string{"el1=elv1", "el2=elv2"},
})
vmstorage.ForceFlush(t)
@@ -350,7 +350,7 @@ func TestClusterIngestionProtocols(t *testing.T) {
"el2": "elv2",
},
},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123456700, Value: 10},
{Timestamp: 1707123456800, Value: 40},
},
@@ -360,7 +360,7 @@ func TestClusterIngestionProtocols(t *testing.T) {
vminsert.PrometheusAPIV1ImportCSV(t, []string{
`GOOG,1.23,4.56,NYSE,1707123457`, // 2024-02-05T08:57:37.000Z
`MSFT,23,56,NASDAQ,1707123457`, // 2024-02-05T08:57:37.000Z
}, at.QueryOpts{
}, apptest.QueryOpts{
ExtraLabels: []string{"el1=elv1", "el2=elv2"},
Format: "2:metric:csv_import,3:metric:csv_import_v2,1:label:ticker,4:label:market,5:time:unix_s",
})
@@ -397,7 +397,7 @@ func TestClusterIngestionProtocols(t *testing.T) {
"el2": "elv2",
},
},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123457000, Value: 23}, // 2024-02-05T08:57:37.000Z
{Timestamp: 1707123457000, Value: 1.23}, // 2024-02-05T08:57:37.000Z
{Timestamp: 1707123457000, Value: 56}, // 2024-02-05T08:57:37.000Z
@@ -409,7 +409,7 @@ func TestClusterIngestionProtocols(t *testing.T) {
vminsert.OpenTSDBAPIPut(t, []string{
`{"metric":"opentsdbimport.foo","value":45.34, "timestamp": "1707123457"}`, // 2024-02-05T08:57:37.000Z
`{"metric":"opentsdbimport.bar","value":43, "timestamp": "1707123456"}`, // 2024-02-05T08:57:36.000Z
}, at.QueryOpts{
}, apptest.QueryOpts{
ExtraLabels: []string{"el1=elv1", "el2=elv2"},
})
vmstorage.ForceFlush(t)
@@ -427,22 +427,22 @@ func TestClusterIngestionProtocols(t *testing.T) {
"el2": "elv2",
},
},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123456000, Value: 43}, // 2024-02-05T08:57:36.000Z
{Timestamp: 1707123457000, Value: 45.34}, // 2024-02-05T08:57:37.000Z
},
})
// prometheus remote write format
pbData := []pb.TimeSeries{
pbData := []prompbmarshal.TimeSeries{
{
Labels: []pb.Label{
Labels: []prompbmarshal.Label{
{
Name: "__name__",
Value: "prometheusrw_series",
},
},
Samples: []pb.Sample{
Samples: []prompbmarshal.Sample{
{
Value: 10,
Timestamp: 1707123456700, // 2024-02-05T08:57:36.700Z
@@ -451,7 +451,7 @@ func TestClusterIngestionProtocols(t *testing.T) {
},
},
{
Labels: []pb.Label{
Labels: []prompbmarshal.Label{
{
Name: "__name__",
Value: "prometheusrw_series2",
@@ -465,7 +465,7 @@ func TestClusterIngestionProtocols(t *testing.T) {
Value: "value1",
},
},
Samples: []pb.Sample{
Samples: []prompbmarshal.Sample{
{
Value: 20,
Timestamp: 1707123456800, // 2024-02-05T08:57:36.800Z
@@ -473,7 +473,7 @@ func TestClusterIngestionProtocols(t *testing.T) {
},
},
}
vminsert.PrometheusAPIV1Write(t, pbData, at.QueryOpts{})
vminsert.PrometheusAPIV1Write(t, pbData, apptest.QueryOpts{})
vmstorage.ForceFlush(t)
f(&opts{
query: `{__name__=~"prometheusrw.+"}`,
@@ -487,7 +487,7 @@ func TestClusterIngestionProtocols(t *testing.T) {
"label1": "value1",
},
},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123456700, Value: 10}, // 2024-02-05T08:57:36.700Z
{Timestamp: 1707123456800, Value: 20}, // 2024-02-05T08:57:36.700Z
},

View File

@@ -6,7 +6,7 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
)
// Data used in examples in
@@ -31,7 +31,7 @@ var docData = []string{
// TestSingleKeyConceptsQuery verifies cases from https://docs.victoriametrics.com/victoriametrics/keyconcepts/#query-data
// for vm-single.
func TestSingleKeyConceptsQuery(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartDefaultVmsingle()
@@ -42,7 +42,7 @@ func TestSingleKeyConceptsQuery(t *testing.T) {
// TestClusterKeyConceptsQueryData verifies cases from https://docs.victoriametrics.com/victoriametrics/keyconcepts/#query-data
// for vm-cluster.
func TestClusterKeyConceptsQueryData(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartDefaultCluster()
@@ -51,10 +51,10 @@ func TestClusterKeyConceptsQueryData(t *testing.T) {
}
// testKeyConceptsQueryData verifies cases from https://docs.victoriametrics.com/victoriametrics/keyconcepts/#query-data
func testKeyConceptsQueryData(t *testing.T, sut at.PrometheusWriteQuerier) {
func testKeyConceptsQueryData(t *testing.T, sut apptest.PrometheusWriteQuerier) {
// Insert example data from documentation.
sut.PrometheusAPIV1ImportPrometheus(t, docData, at.QueryOpts{})
sut.PrometheusAPIV1ImportPrometheus(t, docData, apptest.QueryOpts{})
sut.ForceFlush(t)
testInstantQuery(t, sut)
@@ -65,14 +65,14 @@ func testKeyConceptsQueryData(t *testing.T, sut at.PrometheusWriteQuerier) {
// testInstantQuery verifies the statements made in the `Instant query` section
// of the VictoriaMetrics documentation. See:
// https://docs.victoriametrics.com/victoriametrics/keyconcepts/#instant-query
func testInstantQuery(t *testing.T, q at.PrometheusQuerier) {
func testInstantQuery(t *testing.T, q apptest.PrometheusQuerier) {
// Get the value of the foo_bar time series at 2022-05-10T08:03:00Z with the
// step of 5m and timeout 5s. There is no sample at exactly this timestamp.
// Therefore, VictoriaMetrics will search for the nearest sample within the
// [time-5m..time] interval.
got := q.PrometheusAPIV1Query(t, "foo_bar", at.QueryOpts{Time: "2022-05-10T08:03:00.000Z", Step: "5m"})
want := at.NewPrometheusAPIV1QueryResponse(t, `{"data":{"result":[{"metric":{"__name__":"foo_bar"},"value":[1652169780,"3"]}]}}`)
opt := cmpopts.IgnoreFields(at.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType")
got := q.PrometheusAPIV1Query(t, "foo_bar", apptest.QueryOpts{Time: "2022-05-10T08:03:00.000Z", Step: "5m"})
want := apptest.NewPrometheusAPIV1QueryResponse(t, `{"data":{"result":[{"metric":{"__name__":"foo_bar"},"value":[1652169780,"3"]}]}}`)
opt := cmpopts.IgnoreFields(apptest.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType")
if diff := cmp.Diff(want, got, opt); diff != "" {
t.Errorf("unexpected response (-want, +got):\n%s", diff)
}
@@ -82,7 +82,7 @@ func testInstantQuery(t *testing.T, q at.PrometheusQuerier) {
// Therefore, VictoriaMetrics will search for the nearest sample within the
// [time-1m..time] interval. Since the nearest sample is 2m away and the
// step is 1m, then the VictoriaMetrics must return empty response.
got = q.PrometheusAPIV1Query(t, "foo_bar", at.QueryOpts{Time: "2022-05-10T08:18:00.000Z", Step: "1m"})
got = q.PrometheusAPIV1Query(t, "foo_bar", apptest.QueryOpts{Time: "2022-05-10T08:18:00.000Z", Step: "1m"})
if len(got.Data.Result) > 0 {
t.Errorf("unexpected response: got non-empty result, want empty result:\n%v", got)
}
@@ -91,14 +91,14 @@ func testInstantQuery(t *testing.T, q at.PrometheusQuerier) {
// testRangeQuery verifies the statements made in the `Range query` section of
// the VictoriaMetrics documentation. See:
// https://docs.victoriametrics.com/victoriametrics/keyconcepts/#range-query
func testRangeQuery(t *testing.T, q at.PrometheusQuerier) {
f := func(start, end, step string, wantSamples []*at.Sample) {
func testRangeQuery(t *testing.T, q apptest.PrometheusQuerier) {
f := func(start, end, step string, wantSamples []*apptest.Sample) {
t.Helper()
got := q.PrometheusAPIV1QueryRange(t, "foo_bar", at.QueryOpts{Start: start, End: end, Step: step})
want := at.NewPrometheusAPIV1QueryResponse(t, `{"data": {"result": [{"metric": {"__name__": "foo_bar"}, "values": []}]}}`)
got := q.PrometheusAPIV1QueryRange(t, "foo_bar", apptest.QueryOpts{Start: start, End: end, Step: step})
want := apptest.NewPrometheusAPIV1QueryResponse(t, `{"data": {"result": [{"metric": {"__name__": "foo_bar"}, "values": []}]}}`)
want.Data.Result[0].Samples = wantSamples
opt := cmpopts.IgnoreFields(at.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType")
opt := cmpopts.IgnoreFields(apptest.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType")
if diff := cmp.Diff(want, got, opt); diff != "" {
t.Errorf("unexpected response (-want, +got):\n%s", diff)
}
@@ -107,53 +107,53 @@ func testRangeQuery(t *testing.T, q at.PrometheusQuerier) {
// Verify the statement that the query result for
// [2022-05-10T07:59:00Z..2022-05-10T08:17:00Z] time range and 1m step will
// contain 17 points.
f("2022-05-10T07:59:00.000Z", "2022-05-10T08:17:00.000Z", "1m", []*at.Sample{
f("2022-05-10T07:59:00.000Z", "2022-05-10T08:17:00.000Z", "1m", []*apptest.Sample{
// Sample for 2022-05-10T07:59:00Z is missing because the time series has
// samples only starting from 8:00.
at.NewSample(t, "2022-05-10T08:00:00Z", 1),
at.NewSample(t, "2022-05-10T08:01:00Z", 2),
at.NewSample(t, "2022-05-10T08:02:00Z", 3),
at.NewSample(t, "2022-05-10T08:03:00Z", 3),
at.NewSample(t, "2022-05-10T08:04:00Z", 5),
at.NewSample(t, "2022-05-10T08:05:00Z", 5),
at.NewSample(t, "2022-05-10T08:06:00Z", 5.5),
at.NewSample(t, "2022-05-10T08:07:00Z", 5.5),
at.NewSample(t, "2022-05-10T08:08:00Z", 4),
at.NewSample(t, "2022-05-10T08:09:00Z", 4),
apptest.NewSample(t, "2022-05-10T08:00:00Z", 1),
apptest.NewSample(t, "2022-05-10T08:01:00Z", 2),
apptest.NewSample(t, "2022-05-10T08:02:00Z", 3),
apptest.NewSample(t, "2022-05-10T08:03:00Z", 3),
apptest.NewSample(t, "2022-05-10T08:04:00Z", 5),
apptest.NewSample(t, "2022-05-10T08:05:00Z", 5),
apptest.NewSample(t, "2022-05-10T08:06:00Z", 5.5),
apptest.NewSample(t, "2022-05-10T08:07:00Z", 5.5),
apptest.NewSample(t, "2022-05-10T08:08:00Z", 4),
apptest.NewSample(t, "2022-05-10T08:09:00Z", 4),
// Sample for 2022-05-10T08:10:00Z is missing because there is no sample
// within the [8:10 - 1m .. 8:10] interval.
at.NewSample(t, "2022-05-10T08:11:00Z", 3.5),
at.NewSample(t, "2022-05-10T08:12:00Z", 3.25),
at.NewSample(t, "2022-05-10T08:13:00Z", 3),
at.NewSample(t, "2022-05-10T08:14:00Z", 2),
at.NewSample(t, "2022-05-10T08:15:00Z", 1),
at.NewSample(t, "2022-05-10T08:16:00Z", 4),
at.NewSample(t, "2022-05-10T08:17:00Z", 4),
apptest.NewSample(t, "2022-05-10T08:11:00Z", 3.5),
apptest.NewSample(t, "2022-05-10T08:12:00Z", 3.25),
apptest.NewSample(t, "2022-05-10T08:13:00Z", 3),
apptest.NewSample(t, "2022-05-10T08:14:00Z", 2),
apptest.NewSample(t, "2022-05-10T08:15:00Z", 1),
apptest.NewSample(t, "2022-05-10T08:16:00Z", 4),
apptest.NewSample(t, "2022-05-10T08:17:00Z", 4),
})
// Verify the statement that a query is executed at start, start+step,
// start+2*step, …, step+N*step timestamps, where N is the whole number
// of steps that fit between start and end.
f("2022-05-10T08:00:01.000Z", "2022-05-10T08:02:00.000Z", "1m", []*at.Sample{
at.NewSample(t, "2022-05-10T08:00:01Z", 1),
at.NewSample(t, "2022-05-10T08:01:01Z", 2),
f("2022-05-10T08:00:01.000Z", "2022-05-10T08:02:00.000Z", "1m", []*apptest.Sample{
apptest.NewSample(t, "2022-05-10T08:00:01Z", 1),
apptest.NewSample(t, "2022-05-10T08:01:01Z", 2),
})
// Verify the statement that a query is executed at start, start+step,
// start+2*step, …, end timestamps, when end = start + N*step.
f("2022-05-10T08:00:00.000Z", "2022-05-10T08:02:00.000Z", "1m", []*at.Sample{
at.NewSample(t, "2022-05-10T08:00:00Z", 1),
at.NewSample(t, "2022-05-10T08:01:00Z", 2),
at.NewSample(t, "2022-05-10T08:02:00Z", 3),
f("2022-05-10T08:00:00.000Z", "2022-05-10T08:02:00.000Z", "1m", []*apptest.Sample{
apptest.NewSample(t, "2022-05-10T08:00:00Z", 1),
apptest.NewSample(t, "2022-05-10T08:01:00Z", 2),
apptest.NewSample(t, "2022-05-10T08:02:00Z", 3),
})
// If the step isnt set, then it defaults to 5m (5 minutes).
f("2022-05-10T07:59:00.000Z", "2022-05-10T08:17:00.000Z", "", []*at.Sample{
f("2022-05-10T07:59:00.000Z", "2022-05-10T08:17:00.000Z", "", []*apptest.Sample{
// Sample for 2022-05-10T07:59:00Z is missing because the time series has
// samples only starting from 8:00.
at.NewSample(t, "2022-05-10T08:04:00Z", 5),
at.NewSample(t, "2022-05-10T08:09:00Z", 4),
at.NewSample(t, "2022-05-10T08:14:00Z", 2),
apptest.NewSample(t, "2022-05-10T08:04:00Z", 5),
apptest.NewSample(t, "2022-05-10T08:09:00Z", 4),
apptest.NewSample(t, "2022-05-10T08:14:00Z", 2),
})
}
@@ -164,11 +164,11 @@ func testRangeQuery(t *testing.T, q at.PrometheusQuerier) {
// will not produce ephemeral points.
//
// See: https://docs.victoriametrics.com/victoriametrics/keyconcepts/#range-query
func testRangeQueryIsEquivalentToManyInstantQueries(t *testing.T, q at.PrometheusQuerier) {
f := func(timestamp string, want *at.Sample) {
func testRangeQueryIsEquivalentToManyInstantQueries(t *testing.T, q apptest.PrometheusQuerier) {
f := func(timestamp string, want *apptest.Sample) {
t.Helper()
gotInstant := q.PrometheusAPIV1Query(t, "foo_bar", at.QueryOpts{Time: timestamp, Step: "1m"})
gotInstant := q.PrometheusAPIV1Query(t, "foo_bar", apptest.QueryOpts{Time: timestamp, Step: "1m"})
if want == nil {
if got, want := len(gotInstant.Data.Result), 0; got != want {
t.Errorf("unexpected instant result size: got %d, want %d", got, want)
@@ -181,7 +181,7 @@ func testRangeQueryIsEquivalentToManyInstantQueries(t *testing.T, q at.Prometheu
}
}
rangeRes := q.PrometheusAPIV1QueryRange(t, "foo_bar", at.QueryOpts{
rangeRes := q.PrometheusAPIV1QueryRange(t, "foo_bar", apptest.QueryOpts{
Start: "2022-05-10T07:59:00.000Z",
End: "2022-05-10T08:17:00.000Z",
Step: "1m",
@@ -210,7 +210,7 @@ func testRangeQueryIsEquivalentToManyInstantQueries(t *testing.T, q at.Prometheu
}
func TestSingleMillisecondPrecisionInInstantQueries(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartDefaultVmsingle()
@@ -219,7 +219,7 @@ func TestSingleMillisecondPrecisionInInstantQueries(t *testing.T) {
}
func TestClusterMillisecondPrecisionInInstantQueries(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartDefaultCluster()
@@ -227,7 +227,7 @@ func TestClusterMillisecondPrecisionInInstantQueries(t *testing.T) {
testMillisecondPrecisionInInstantQueries(tc, sut)
}
func testMillisecondPrecisionInInstantQueries(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
func testMillisecondPrecisionInInstantQueries(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier) {
t := tc.T()
type opts struct {
@@ -235,30 +235,30 @@ func testMillisecondPrecisionInInstantQueries(tc *at.TestCase, sut at.Prometheus
qtime string
step string
wantMetric map[string]string
wantSample *at.Sample
wantSamples []*at.Sample
wantSample *apptest.Sample
wantSamples []*apptest.Sample
}
f := func(sut at.PrometheusQuerier, opts *opts) {
f := func(sut apptest.PrometheusQuerier, opts *opts) {
t.Helper()
wantResult := []*at.QueryResult{}
wantResult := []*apptest.QueryResult{}
if opts.wantMetric != nil && (opts.wantSample != nil || len(opts.wantSamples) > 0) {
wantResult = append(wantResult, &at.QueryResult{
wantResult = append(wantResult, &apptest.QueryResult{
Metric: opts.wantMetric,
Sample: opts.wantSample,
Samples: opts.wantSamples,
})
}
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query response",
Got: func() any {
return sut.PrometheusAPIV1Query(t, opts.query, at.QueryOpts{
return sut.PrometheusAPIV1Query(t, opts.query, apptest.QueryOpts{
Time: opts.qtime,
Step: opts.step,
})
},
Want: &at.PrometheusAPIV1QueryResponse{Data: &at.QueryData{Result: wantResult}},
Want: &apptest.PrometheusAPIV1QueryResponse{Data: &apptest.QueryData{Result: wantResult}},
CmpOpts: []cmp.Option{
cmpopts.IgnoreFields(at.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
cmpopts.IgnoreFields(apptest.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
},
})
}
@@ -266,7 +266,7 @@ func testMillisecondPrecisionInInstantQueries(tc *at.TestCase, sut at.Prometheus
sut.PrometheusAPIV1ImportPrometheus(t, []string{
`series1{label="foo"} 10 1707123456700`, // 2024-02-05T08:57:36.700Z
`series1{label="foo"} 20 1707123456800`, // 2024-02-05T08:57:36.800Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
sut.ForceFlush(t)
// Verify that both points were created correctly. Fetch both points by
@@ -276,7 +276,7 @@ func testMillisecondPrecisionInInstantQueries(tc *at.TestCase, sut at.Prometheus
query: "series1[101ms]",
qtime: "1707123456800", // 2024-02-05T08:57:36.800Z
wantMetric: map[string]string{"__name__": "series1", "label": "foo"},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123456700, Value: 10},
{Timestamp: 1707123456800, Value: 20},
},
@@ -288,7 +288,7 @@ func testMillisecondPrecisionInInstantQueries(tc *at.TestCase, sut at.Prometheus
qtime: "1707123456700", // 2024-02-05T08:57:36.700Z
step: "1ms",
wantMetric: map[string]string{"__name__": "series1", "label": "foo"},
wantSample: &at.Sample{Timestamp: 1707123456700, Value: 10},
wantSample: &apptest.Sample{Timestamp: 1707123456700, Value: 10},
})
// Search the first point at 1ms past its own timestamp.
@@ -306,7 +306,7 @@ func testMillisecondPrecisionInInstantQueries(tc *at.TestCase, sut at.Prometheus
qtime: "1707123456800", // 2024-02-05T08:57:36.800Z
step: "1ms",
wantMetric: map[string]string{"__name__": "series1", "label": "foo"},
wantSample: &at.Sample{Timestamp: 1707123456800, Value: 20},
wantSample: &apptest.Sample{Timestamp: 1707123456800, Value: 20},
})
// Fetch the last point at its timestamp with step 1ms.
@@ -317,7 +317,7 @@ func testMillisecondPrecisionInInstantQueries(tc *at.TestCase, sut at.Prometheus
qtime: "1707123456801", // 2024-02-05T08:57:36.801Z
step: "1ms",
// wantMetric: map[string]string{"__name__": "series1", "label": "foo"},
// wantSample: &at.Sample{Timestamp: 1707123456801, Value: 20},
// wantSample: &apptest.Sample{Timestamp: 1707123456801, Value: 20},
})
// Insert samples with different dates. The difference in ms between the two
@@ -325,7 +325,7 @@ func testMillisecondPrecisionInInstantQueries(tc *at.TestCase, sut at.Prometheus
sut.PrometheusAPIV1ImportPrometheus(t, []string{
`series2{label="foo"} 10 1638564958042`, // 2021-12-03T20:55:58.042Z
`series2{label="foo"} 20 1642801537346`, // 2022-01-21T21:45:37.346Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
sut.ForceFlush(t)
// Both Prometheus and VictoriaMetrics exclude the leftmost millisecond,
@@ -335,7 +335,7 @@ func testMillisecondPrecisionInInstantQueries(tc *at.TestCase, sut at.Prometheus
qtime: "1642801537346",
step: "1ms",
wantMetric: map[string]string{"__name__": "series2", "label": "foo"},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1642801537346, Value: 20},
},
})
@@ -344,7 +344,7 @@ func testMillisecondPrecisionInInstantQueries(tc *at.TestCase, sut at.Prometheus
qtime: "1642801537346", // 2022-01-21T21:45:37.346Z
step: "1ms",
wantMetric: map[string]string{"label": "foo"},
wantSample: &at.Sample{Timestamp: 1642801537346, Value: 1},
wantSample: &apptest.Sample{Timestamp: 1642801537346, Value: 1},
})
// Adding 1ms to the duration (4236579305ms) causes queries to return 2
@@ -354,7 +354,7 @@ func testMillisecondPrecisionInInstantQueries(tc *at.TestCase, sut at.Prometheus
qtime: "1642801537346",
step: "1ms",
wantMetric: map[string]string{"__name__": "series2", "label": "foo"},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1638564958042, Value: 10}, // 2021-12-03T20:55:58.042Z
{Timestamp: 1642801537346, Value: 20},
},
@@ -364,6 +364,6 @@ func testMillisecondPrecisionInInstantQueries(tc *at.TestCase, sut at.Prometheus
qtime: "1642801537346", // 2022-01-21T21:45:37.346Z
step: "1ms",
wantMetric: map[string]string{"label": "foo"},
wantSample: &at.Sample{Timestamp: 1642801537346, Value: 2},
wantSample: &apptest.Sample{Timestamp: 1642801537346, Value: 2},
})
}

View File

@@ -8,13 +8,13 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/fs"
)
func TestSingleMetricNamesStats(t *testing.T) {
fs.MustRemoveDir(t.Name())
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartVmsingle("vmsingle", []string{"-storage.trackMetricNamesStats=true", "-retentionPeriod=100y"})
@@ -34,101 +34,101 @@ func TestSingleMetricNamesStats(t *testing.T) {
for idx := range dataSet {
dataSet[idx] += ingestTimestamp
}
tsdbMetricNameEntryCmpOpts := cmpopts.IgnoreFields(at.TSDBStatusResponseMetricNameEntry{}, "LastRequestTimestamp")
tsdbMetricNameEntryCmpOpts := cmpopts.IgnoreFields(apptest.TSDBStatusResponseMetricNameEntry{}, "LastRequestTimestamp")
sut.PrometheusAPIV1ImportPrometheus(t, dataSet, at.QueryOpts{})
sut.PrometheusAPIV1ImportPrometheus(t, dataSet, apptest.QueryOpts{})
sut.ForceFlush(t)
// verify ingest request correctly registered
expected := at.MetricNamesStatsResponse{
Records: []at.MetricNamesStatsRecord{
expected := apptest.MetricNamesStatsResponse{
Records: []apptest.MetricNamesStatsRecord{
{MetricName: largeMetricName},
{MetricName: "metric_name_1"},
{MetricName: "metric_name_2"},
{MetricName: "metric_name_3"},
},
}
got := sut.APIV1StatusMetricNamesStats(t, "", "", "", at.QueryOpts{})
got := sut.APIV1StatusMetricNamesStats(t, "", "", "", apptest.QueryOpts{})
if diff := cmp.Diff(expected, got); diff != "" {
t.Errorf("unexpected response (-want, +got):\n%s", diff)
}
// verify query request correctly registered
sut.PrometheusAPIV1Query(t, `{__name__!=""}`, at.QueryOpts{Time: ingestDateTime})
expected = at.MetricNamesStatsResponse{
Records: []at.MetricNamesStatsRecord{
sut.PrometheusAPIV1Query(t, `{__name__!=""}`, apptest.QueryOpts{Time: ingestDateTime})
expected = apptest.MetricNamesStatsResponse{
Records: []apptest.MetricNamesStatsRecord{
{MetricName: largeMetricName, QueryRequestsCount: 1},
{MetricName: "metric_name_1", QueryRequestsCount: 3},
{MetricName: "metric_name_2", QueryRequestsCount: 1},
{MetricName: "metric_name_3", QueryRequestsCount: 1},
},
}
got = sut.APIV1StatusMetricNamesStats(t, "", "", "", at.QueryOpts{})
got = sut.APIV1StatusMetricNamesStats(t, "", "", "", apptest.QueryOpts{})
if diff := cmp.Diff(expected, got); diff != "" {
t.Errorf("unexpected response (-want, +got):\n%s", diff)
}
expectedStatsResponse := at.TSDBStatusResponse{
Data: at.TSDBStatusResponseData{
expectedStatsResponse := apptest.TSDBStatusResponse{
Data: apptest.TSDBStatusResponseData{
TotalSeries: 6,
TotalLabelValuePairs: 12,
SeriesCountByMetricName: []at.TSDBStatusResponseMetricNameEntry{
SeriesCountByMetricName: []apptest.TSDBStatusResponseMetricNameEntry{
{Name: "metric_name_1", RequestsCount: 3},
{Name: largeMetricName, RequestsCount: 1},
{Name: "metric_name_2", RequestsCount: 1},
{Name: "metric_name_3", RequestsCount: 1},
},
SeriesCountByLabelName: []at.TSDBStatusResponseEntry{{Name: "__name__"}, {Name: "label"}},
SeriesCountByFocusLabelValue: []at.TSDBStatusResponseEntry{},
SeriesCountByLabelValuePair: []at.TSDBStatusResponseEntry{
SeriesCountByLabelName: []apptest.TSDBStatusResponseEntry{{Name: "__name__"}, {Name: "label"}},
SeriesCountByFocusLabelValue: []apptest.TSDBStatusResponseEntry{},
SeriesCountByLabelValuePair: []apptest.TSDBStatusResponseEntry{
{Name: "__name__=" + largeMetricName},
{Name: "__name__=metric_name_1"}, {Name: "label=baz"},
{Name: "__name__=metric_name_2"}, {Name: "__name__=metric_name_3"},
{Name: "label=bar"}, {Name: "label=foo"},
},
LabelValueCountByLabelName: []at.TSDBStatusResponseEntry{{Name: "__name__"}, {Name: "label"}},
LabelValueCountByLabelName: []apptest.TSDBStatusResponseEntry{{Name: "__name__"}, {Name: "label"}},
},
}
expectedStatsResponse.Sort()
gotStatus := sut.APIV1StatusTSDB(t, "", date, "", at.QueryOpts{})
gotStatus := sut.APIV1StatusTSDB(t, "", date, "", apptest.QueryOpts{})
if diff := cmp.Diff(expectedStatsResponse, gotStatus, tsdbMetricNameEntryCmpOpts); diff != "" {
t.Errorf("unexpected APIV1StatusTSDB response (-want, +got):\n%s", diff)
}
// perform query request for single metric and check counter increase
sut.PrometheusAPIV1Query(t, `metric_name_2`, at.QueryOpts{Time: ingestDateTime})
expected = at.MetricNamesStatsResponse{
Records: []at.MetricNamesStatsRecord{
sut.PrometheusAPIV1Query(t, `metric_name_2`, apptest.QueryOpts{Time: ingestDateTime})
expected = apptest.MetricNamesStatsResponse{
Records: []apptest.MetricNamesStatsRecord{
{MetricName: largeMetricName, QueryRequestsCount: 1},
{MetricName: "metric_name_1", QueryRequestsCount: 3},
{MetricName: "metric_name_2", QueryRequestsCount: 2},
{MetricName: "metric_name_3", QueryRequestsCount: 1},
},
}
got = sut.APIV1StatusMetricNamesStats(t, "", "", "", at.QueryOpts{})
got = sut.APIV1StatusMetricNamesStats(t, "", "", "", apptest.QueryOpts{})
if diff := cmp.Diff(expected, got); diff != "" {
t.Errorf("unexpected response (-want, +got):\n%s", diff)
}
// verify le filter
expected = at.MetricNamesStatsResponse{
Records: []at.MetricNamesStatsRecord{
expected = apptest.MetricNamesStatsResponse{
Records: []apptest.MetricNamesStatsRecord{
{MetricName: largeMetricName, QueryRequestsCount: 1},
{MetricName: "metric_name_2", QueryRequestsCount: 2},
{MetricName: "metric_name_3", QueryRequestsCount: 1},
},
}
got = sut.APIV1StatusMetricNamesStats(t, "", "2", "", at.QueryOpts{})
got = sut.APIV1StatusMetricNamesStats(t, "", "2", "", apptest.QueryOpts{})
if diff := cmp.Diff(expected, got); diff != "" {
t.Errorf("unexpected response (-want, +got):\n%s", diff)
}
// reset state and check empty request response
sut.APIV1AdminStatusMetricNamesStatsReset(t, at.QueryOpts{})
expected = at.MetricNamesStatsResponse{
Records: []at.MetricNamesStatsRecord{},
sut.APIV1AdminStatusMetricNamesStatsReset(t, apptest.QueryOpts{})
expected = apptest.MetricNamesStatsResponse{
Records: []apptest.MetricNamesStatsRecord{},
}
got = sut.APIV1StatusMetricNamesStats(t, "", "", "", at.QueryOpts{})
got = sut.APIV1StatusMetricNamesStats(t, "", "", "", apptest.QueryOpts{})
if diff := cmp.Diff(expected, got); diff != "" {
t.Errorf("unexpected response (-want, +got):\n%s", diff)
}
@@ -138,7 +138,7 @@ func TestSingleMetricNamesStats(t *testing.T) {
func TestClusterMetricNamesStats(t *testing.T) {
fs.MustRemoveDir(t.Name())
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
vmstorage1 := tc.MustStartVmstorage("vmstorage-1", []string{
"-storageDataPath=" + tc.Dir() + "/vmstorage-1",
@@ -158,7 +158,7 @@ func TestClusterMetricNamesStats(t *testing.T) {
fmt.Sprintf("-storageNode=%s,%s", vmstorage1.VmselectAddr(), vmstorage2.VmselectAddr()),
})
// verify empty stats
resp := vmselect.MetricNamesStats(t, "", "", "", at.QueryOpts{Tenant: "0:0"})
resp := vmselect.MetricNamesStats(t, "", "", "", apptest.QueryOpts{Tenant: "0:0"})
if len(resp.Records) != 0 {
t.Fatalf("unexpected resp Records: %d, want: %d", len(resp.Records), 0)
}
@@ -180,92 +180,92 @@ func TestClusterMetricNamesStats(t *testing.T) {
dataSet[idx] += ingestTimestamp
}
tsdbMetricNameEntryCmpOpts := cmpopts.IgnoreFields(at.TSDBStatusResponseMetricNameEntry{}, "LastRequestTimestamp")
tsdbMetricNameEntryCmpOpts := cmpopts.IgnoreFields(apptest.TSDBStatusResponseMetricNameEntry{}, "LastRequestTimestamp")
// ingest per tenant data and verify it with search
tenantIDs := []string{"1:1", "1:15", "15:15"}
for _, tenantID := range tenantIDs {
vminsert.PrometheusAPIV1ImportPrometheus(t, dataSet, at.QueryOpts{Tenant: tenantID})
vminsert.PrometheusAPIV1ImportPrometheus(t, dataSet, apptest.QueryOpts{Tenant: tenantID})
vmstorage1.ForceFlush(t)
vmstorage2.ForceFlush(t)
// verify ingest request correctly registered
expected := at.MetricNamesStatsResponse{
Records: []at.MetricNamesStatsRecord{
expected := apptest.MetricNamesStatsResponse{
Records: []apptest.MetricNamesStatsRecord{
{MetricName: largeMetricName},
{MetricName: "metric_name_1"},
{MetricName: "metric_name_2"},
{MetricName: "metric_name_3"},
},
}
gotStats := vmselect.MetricNamesStats(t, "", "", "", at.QueryOpts{Tenant: tenantID})
gotStats := vmselect.MetricNamesStats(t, "", "", "", apptest.QueryOpts{Tenant: tenantID})
if diff := cmp.Diff(expected, gotStats); diff != "" {
t.Errorf("unexpected response (-want, +got):\n%s", diff)
}
// verify query request registered correctly
vmselect.PrometheusAPIV1Query(t, `{__name__!=""}`, at.QueryOpts{
vmselect.PrometheusAPIV1Query(t, `{__name__!=""}`, apptest.QueryOpts{
Tenant: tenantID, Time: ingestDateTime,
})
expected = at.MetricNamesStatsResponse{
Records: []at.MetricNamesStatsRecord{
expected = apptest.MetricNamesStatsResponse{
Records: []apptest.MetricNamesStatsRecord{
{MetricName: largeMetricName, QueryRequestsCount: 1},
{MetricName: "metric_name_2", QueryRequestsCount: 1},
{MetricName: "metric_name_3", QueryRequestsCount: 1},
{MetricName: "metric_name_1", QueryRequestsCount: 3},
},
}
gotStats = vmselect.MetricNamesStats(t, "", "", "", at.QueryOpts{Tenant: tenantID})
gotStats = vmselect.MetricNamesStats(t, "", "", "", apptest.QueryOpts{Tenant: tenantID})
if diff := cmp.Diff(expected, gotStats); diff != "" {
t.Errorf("unexpected response tenant: %s (-want, +got):\n%s", tenantID, diff)
}
expectedStatsResponse := at.TSDBStatusResponse{
Data: at.TSDBStatusResponseData{
expectedStatsResponse := apptest.TSDBStatusResponse{
Data: apptest.TSDBStatusResponseData{
TotalSeries: 6,
TotalLabelValuePairs: 12,
SeriesCountByMetricName: []at.TSDBStatusResponseMetricNameEntry{
SeriesCountByMetricName: []apptest.TSDBStatusResponseMetricNameEntry{
{Name: "metric_name_1", RequestsCount: 3},
{Name: largeMetricName, RequestsCount: 1},
{Name: "metric_name_2", RequestsCount: 1},
{Name: "metric_name_3", RequestsCount: 1},
},
SeriesCountByLabelName: []at.TSDBStatusResponseEntry{{Name: "__name__"}, {Name: "label"}},
SeriesCountByFocusLabelValue: []at.TSDBStatusResponseEntry{},
SeriesCountByLabelValuePair: []at.TSDBStatusResponseEntry{
SeriesCountByLabelName: []apptest.TSDBStatusResponseEntry{{Name: "__name__"}, {Name: "label"}},
SeriesCountByFocusLabelValue: []apptest.TSDBStatusResponseEntry{},
SeriesCountByLabelValuePair: []apptest.TSDBStatusResponseEntry{
{Name: "__name__=" + largeMetricName},
{Name: "__name__=metric_name_1"}, {Name: "label=baz"},
{Name: "__name__=metric_name_2"}, {Name: "__name__=metric_name_3"},
{Name: "label=bar"}, {Name: "label=foo"},
},
LabelValueCountByLabelName: []at.TSDBStatusResponseEntry{{Name: "__name__"}, {Name: "label"}},
LabelValueCountByLabelName: []apptest.TSDBStatusResponseEntry{{Name: "__name__"}, {Name: "label"}},
},
}
expectedStatsResponse.Sort()
gotStatus := vmselect.APIV1StatusTSDB(t, "", date, "", at.QueryOpts{Tenant: tenantID})
gotStatus := vmselect.APIV1StatusTSDB(t, "", date, "", apptest.QueryOpts{Tenant: tenantID})
if diff := cmp.Diff(expectedStatsResponse, gotStatus, tsdbMetricNameEntryCmpOpts); diff != "" {
t.Errorf("unexpected APIV1StatusTSDB response tenant: %s (-want, +got):\n%s", tenantID, diff)
}
}
// verify multitenant stats
expected := at.MetricNamesStatsResponse{
Records: []at.MetricNamesStatsRecord{
expected := apptest.MetricNamesStatsResponse{
Records: []apptest.MetricNamesStatsRecord{
{MetricName: largeMetricName, QueryRequestsCount: 3},
{MetricName: "metric_name_2", QueryRequestsCount: 3},
{MetricName: "metric_name_3", QueryRequestsCount: 3},
{MetricName: "metric_name_1", QueryRequestsCount: 9},
},
}
gotStats := vmselect.MetricNamesStats(t, "", "", "", at.QueryOpts{Tenant: "multitenant"})
gotStats := vmselect.MetricNamesStats(t, "", "", "", apptest.QueryOpts{Tenant: "multitenant"})
if diff := cmp.Diff(expected, gotStats); diff != "" {
t.Errorf("unexpected response (-want, +got):\n%s", diff)
}
// reset cache and check empty state
vmselect.MetricNamesStatsReset(t, at.QueryOpts{})
resp = vmselect.MetricNamesStats(t, "", "", "", at.QueryOpts{Tenant: "multitenant"})
vmselect.MetricNamesStatsReset(t, apptest.QueryOpts{})
resp = vmselect.MetricNamesStats(t, "", "", "", apptest.QueryOpts{Tenant: "multitenant"})
if len(resp.Records) != 0 {
t.Fatalf("want 0 records, got: %d", len(resp.Records))
}

View File

@@ -11,7 +11,7 @@ import (
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/decimal"
pb "github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
)
func millis(s string) int64 {
@@ -47,13 +47,13 @@ func TestClusterInstantQuery(t *testing.T) {
}
func testInstantQueryWithUTFNames(t *testing.T, sut apptest.PrometheusWriteQuerier) {
data := []pb.TimeSeries{
data := []prompbmarshal.TimeSeries{
{
Labels: []pb.Label{
Labels: []prompbmarshal.Label{
{Name: "__name__", Value: "3fooµ¥"},
{Name: "3👋tfにちは", Value: "漢©®€£"},
},
Samples: []pb.Sample{
Samples: []prompbmarshal.Sample{
{Value: 1, Timestamp: millis("2024-01-01T00:01:00Z")},
},
},
@@ -89,16 +89,16 @@ func testInstantQueryWithUTFNames(t *testing.T, sut apptest.PrometheusWriteQueri
fn(`{"3👋tfにちは"="漢©®€£"}`)
}
var staleNaNsData = func() []pb.TimeSeries {
return []pb.TimeSeries{
var staleNaNsData = func() []prompbmarshal.TimeSeries {
return []prompbmarshal.TimeSeries{
{
Labels: []pb.Label{
Labels: []prompbmarshal.Label{
{
Name: "__name__",
Value: "metric",
},
},
Samples: []pb.Sample{
Samples: []prompbmarshal.Sample{
{
Value: 1,
Timestamp: millis("2024-01-01T00:01:00Z"),
@@ -185,20 +185,20 @@ func testInstantQueryDoesNotReturnStaleNaNs(t *testing.T, sut apptest.Prometheus
// However, conversion of math.NaN to int64 could behave differently depending on platform and Go version.
// Hence, this test could succeed for some platforms even if fix is rolled back.
func testQueryRangeWithAtModifier(t *testing.T, sut apptest.PrometheusWriteQuerier) {
data := []pb.TimeSeries{
data := []prompbmarshal.TimeSeries{
{
Labels: []pb.Label{
Labels: []prompbmarshal.Label{
{Name: "__name__", Value: "up"},
},
Samples: []pb.Sample{
Samples: []prompbmarshal.Sample{
{Value: 1, Timestamp: millis("2025-01-01T00:01:00Z")},
},
},
{
Labels: []pb.Label{
Labels: []prompbmarshal.Label{
{Name: "__name__", Value: "metricNaN"},
},
Samples: []pb.Sample{
Samples: []prompbmarshal.Sample{
{Value: decimal.StaleNaN, Timestamp: millis("2025-01-01T00:01:00Z")},
},
},

View File

@@ -4,14 +4,14 @@ import (
"fmt"
"testing"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
)
func TestSingleSearchWithDisabledPerDayIndex(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
testSearchWithDisabledPerDayIndex(tc, func(name string, disablePerDayIndex bool) at.PrometheusWriteQuerier {
testSearchWithDisabledPerDayIndex(tc, func(name string, disablePerDayIndex bool) apptest.PrometheusWriteQuerier {
return tc.MustStartVmsingle("vmsingle-"+name, []string{
"-storageDataPath=" + tc.Dir() + "/vmsingle",
"-retentionPeriod=100y",
@@ -22,10 +22,10 @@ func TestSingleSearchWithDisabledPerDayIndex(t *testing.T) {
}
func TestClusterSearchWithDisabledPerDayIndex(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
testSearchWithDisabledPerDayIndex(tc, func(name string, disablePerDayIndex bool) at.PrometheusWriteQuerier {
testSearchWithDisabledPerDayIndex(tc, func(name string, disablePerDayIndex bool) apptest.PrometheusWriteQuerier {
// Using static ports for vmstorage because random ports may cause
// changes in how data is sharded.
vmstorage1 := tc.MustStartVmstorage("vmstorage1-"+name, []string{
@@ -51,15 +51,15 @@ func TestClusterSearchWithDisabledPerDayIndex(t *testing.T) {
"-storageNode=" + vmstorage1.VmselectAddr() + "," + vmstorage2.VmselectAddr(),
"-search.maxStalenessInterval=1m",
})
return &at.Vmcluster{
Vmstorages: []*at.Vmstorage{vmstorage1, vmstorage2},
return &apptest.Vmcluster{
Vmstorages: []*apptest.Vmstorage{vmstorage1, vmstorage2},
Vminsert: vminsert,
Vmselect: vmselect,
}
})
}
type startSUTFunc func(name string, disablePerDayIndex bool) at.PrometheusWriteQuerier
type startSUTFunc func(name string, disablePerDayIndex bool) apptest.PrometheusWriteQuerier
// testDisablePerDayIndex_Search shows what search results to expect when data
// is first inserted with per-day index enabled and then with per-day index
@@ -70,41 +70,41 @@ type startSUTFunc func(name string, disablePerDayIndex bool) at.PrometheusWriteQ
//
// The data inserted with disabled per-day index is not searchable with per-day
// index enabled unless the search time range is > 40 days.
func testSearchWithDisabledPerDayIndex(tc *at.TestCase, start startSUTFunc) {
func testSearchWithDisabledPerDayIndex(tc *apptest.TestCase, start startSUTFunc) {
t := tc.T()
type opts struct {
start, end string
wantSeries []map[string]string
wantQueryResults []*at.QueryResult
wantQueryResults []*apptest.QueryResult
}
assertSearchResults := func(sut at.PrometheusQuerier, opts *opts) {
assertSearchResults := func(sut apptest.PrometheusQuerier, opts *opts) {
t.Helper()
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/series response",
Got: func() any {
return sut.PrometheusAPIV1Series(t, `{__name__=~".*"}`, at.QueryOpts{
return sut.PrometheusAPIV1Series(t, `{__name__=~".*"}`, apptest.QueryOpts{
Start: opts.start,
End: opts.end,
}).Sort()
},
Want: &at.PrometheusAPIV1SeriesResponse{
Want: &apptest.PrometheusAPIV1SeriesResponse{
Status: "success",
Data: opts.wantSeries,
},
})
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query_range response",
Got: func() any {
return sut.PrometheusAPIV1QueryRange(t, `{__name__=~".*"}`, at.QueryOpts{
return sut.PrometheusAPIV1QueryRange(t, `{__name__=~".*"}`, apptest.QueryOpts{
Start: opts.start,
End: opts.end,
Step: "1d",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: opts.wantQueryResults,
},
@@ -116,16 +116,16 @@ func testSearchWithDisabledPerDayIndex(tc *at.TestCase, start startSUTFunc) {
// is searchable.
sut := start("with-per-day-index", false)
sample1 := []string{"metric1 111 1704067200000"} // 2024-01-01T00:00:00Z
sut.PrometheusAPIV1ImportPrometheus(t, sample1, at.QueryOpts{})
sut.PrometheusAPIV1ImportPrometheus(t, sample1, apptest.QueryOpts{})
sut.ForceFlush(t)
assertSearchResults(sut, &opts{
start: "2024-01-01T00:00:00Z",
end: "2024-01-01T23:59:59Z",
wantSeries: []map[string]string{{"__name__": "metric1"}},
wantQueryResults: []*at.QueryResult{
wantQueryResults: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "metric1"},
Samples: []*at.Sample{{Timestamp: 1704067200000, Value: float64(111)}},
Samples: []*apptest.Sample{{Timestamp: 1704067200000, Value: float64(111)}},
},
},
})
@@ -135,7 +135,7 @@ func testSearchWithDisabledPerDayIndex(tc *at.TestCase, start startSUTFunc) {
tc.StopPrometheusWriteQuerier(sut)
sut = start("without-per-day-index", true)
sample2 := []string{"metric2 222 1704067200000"} // 2024-01-01T00:00:00Z
sut.PrometheusAPIV1ImportPrometheus(t, sample2, at.QueryOpts{})
sut.PrometheusAPIV1ImportPrometheus(t, sample2, apptest.QueryOpts{})
sut.ForceFlush(t)
assertSearchResults(sut, &opts{
start: "2024-01-01T00:00:00Z",
@@ -144,14 +144,14 @@ func testSearchWithDisabledPerDayIndex(tc *at.TestCase, start startSUTFunc) {
{"__name__": "metric1"},
{"__name__": "metric2"},
},
wantQueryResults: []*at.QueryResult{
wantQueryResults: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "metric1"},
Samples: []*at.Sample{{Timestamp: 1704067200000, Value: float64(111)}},
Samples: []*apptest.Sample{{Timestamp: 1704067200000, Value: float64(111)}},
},
{
Metric: map[string]string{"__name__": "metric2"},
Samples: []*at.Sample{{Timestamp: 1704067200000, Value: float64(222)}},
Samples: []*apptest.Sample{{Timestamp: 1704067200000, Value: float64(222)}},
},
},
})
@@ -165,7 +165,7 @@ func testSearchWithDisabledPerDayIndex(tc *at.TestCase, start startSUTFunc) {
// - sample2 is not searchable when the time range is <= 40 days
// - sample2 becomes searchable when the time range is > 40 days
sample3 := []string{"metric1 333 1705708800000"} // 2024-01-20T00:00:00Z
sut.PrometheusAPIV1ImportPrometheus(t, sample3, at.QueryOpts{})
sut.PrometheusAPIV1ImportPrometheus(t, sample3, apptest.QueryOpts{})
sut.ForceFlush(t)
tc.StopPrometheusWriteQuerier(sut)
sut = start("with-per-day-index2", false)
@@ -177,10 +177,10 @@ func testSearchWithDisabledPerDayIndex(tc *at.TestCase, start startSUTFunc) {
wantSeries: []map[string]string{
{"__name__": "metric1"},
},
wantQueryResults: []*at.QueryResult{
wantQueryResults: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "metric1"},
Samples: []*at.Sample{{Timestamp: 1704067200000, Value: float64(111)}},
Samples: []*apptest.Sample{{Timestamp: 1704067200000, Value: float64(111)}},
},
},
})
@@ -190,7 +190,7 @@ func testSearchWithDisabledPerDayIndex(tc *at.TestCase, start startSUTFunc) {
start: "2024-01-20T00:00:00Z",
end: "2024-01-20T23:59:59Z",
wantSeries: []map[string]string{},
wantQueryResults: []*at.QueryResult{},
wantQueryResults: []*apptest.QueryResult{},
})
// Time range is 20 days (Jan 1st-20th) <= 40 days
@@ -200,10 +200,10 @@ func testSearchWithDisabledPerDayIndex(tc *at.TestCase, start startSUTFunc) {
wantSeries: []map[string]string{
{"__name__": "metric1"},
},
wantQueryResults: []*at.QueryResult{
wantQueryResults: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "metric1"},
Samples: []*at.Sample{
Samples: []*apptest.Sample{
{Timestamp: 1704067200000, Value: float64(111)},
{Timestamp: 1705708800000, Value: float64(333)},
},
@@ -219,17 +219,17 @@ func testSearchWithDisabledPerDayIndex(tc *at.TestCase, start startSUTFunc) {
{"__name__": "metric1"},
{"__name__": "metric2"},
},
wantQueryResults: []*at.QueryResult{
wantQueryResults: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "metric1"},
Samples: []*at.Sample{
Samples: []*apptest.Sample{
{Timestamp: 1704067200000, Value: float64(111)},
{Timestamp: 1705708800000, Value: float64(333)},
},
},
{
Metric: map[string]string{"__name__": "metric2"},
Samples: []*at.Sample{
Samples: []*apptest.Sample{
{Timestamp: 1704067200000, Value: float64(222)},
},
},
@@ -246,7 +246,7 @@ func TestSingleActiveTimeseriesMetric_disabledPerDayIndex(t *testing.T) {
}
func testSingleActiveTimeseriesMetric(t *testing.T, disablePerDayIndex bool) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
vmsingle := tc.MustStartVmsingle("vmsingle", []string{
@@ -268,7 +268,7 @@ func TestClusterActiveTimeseriesMetric_disabledPerDayIndex(t *testing.T) {
}
func testClusterActiveTimeseriesMetric(t *testing.T, disablePerDayIndex bool) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
vmstorage1 := tc.MustStartVmstorage("vmstorage1", []string{
@@ -283,8 +283,8 @@ func testClusterActiveTimeseriesMetric(t *testing.T, disablePerDayIndex bool) {
"-storageNode=" + vmstorage1.VminsertAddr() + "," + vmstorage2.VminsertAddr(),
})
vmcluster := &at.Vmcluster{
Vmstorages: []*at.Vmstorage{vmstorage1, vmstorage2},
vmcluster := &apptest.Vmcluster{
Vmstorages: []*apptest.Vmstorage{vmstorage1, vmstorage2},
Vminsert: vminsert,
}
@@ -295,16 +295,16 @@ func testClusterActiveTimeseriesMetric(t *testing.T, disablePerDayIndex bool) {
})
}
func testActiveTimeseriesMetric(tc *at.TestCase, sut at.PrometheusWriteQuerier, getActiveTimeseries func() int) {
func testActiveTimeseriesMetric(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier, getActiveTimeseries func() int) {
t := tc.T()
const numSamples = 1000
samples := make([]string, numSamples)
for i := range numSamples {
samples[i] = fmt.Sprintf("metric_%03d %d", i, i)
}
sut.PrometheusAPIV1ImportPrometheus(t, samples, at.QueryOpts{})
sut.PrometheusAPIV1ImportPrometheus(t, samples, apptest.QueryOpts{})
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: `unexpected vm_cache_entries{type="storage/hour_metric_ids"} metric value`,
Got: func() any {
return getActiveTimeseries()

View File

@@ -7,13 +7,13 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/fs"
pb "github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
)
func TestSingleIngestionWithRelabeling(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
const relabelFileName = "relabel_config.yaml"
relabelingRules := `
@@ -53,29 +53,29 @@ func TestSingleIngestionWithRelabeling(t *testing.T) {
qtime string
step string
wantMetrics []map[string]string
wantSamples []*at.Sample
wantSamples []*apptest.Sample
}
f := func(sut at.PrometheusQuerier, opts *opts) {
f := func(sut apptest.PrometheusQuerier, opts *opts) {
t.Helper()
wantResult := []*at.QueryResult{}
wantResult := []*apptest.QueryResult{}
for idx, wm := range opts.wantMetrics {
wantResult = append(wantResult, &at.QueryResult{
wantResult = append(wantResult, &apptest.QueryResult{
Metric: wm,
Samples: []*at.Sample{opts.wantSamples[idx]},
Samples: []*apptest.Sample{opts.wantSamples[idx]},
})
}
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query response",
Got: func() any {
return sut.PrometheusAPIV1Query(t, opts.query, at.QueryOpts{
return sut.PrometheusAPIV1Query(t, opts.query, apptest.QueryOpts{
Time: opts.qtime,
Step: opts.step,
})
},
Want: &at.PrometheusAPIV1QueryResponse{Data: &at.QueryData{Result: wantResult}},
Want: &apptest.PrometheusAPIV1QueryResponse{Data: &apptest.QueryData{Result: wantResult}},
CmpOpts: []cmp.Option{
cmpopts.IgnoreFields(at.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
cmpopts.IgnoreFields(apptest.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
},
})
}
@@ -83,7 +83,7 @@ func TestSingleIngestionWithRelabeling(t *testing.T) {
sut.PrometheusAPIV1ImportPrometheus(t, []string{
`importprometheus_series{label="foo"} 10 1707123456700`, // 2024-02-05T08:57:36.700Z
`must_drop_series{label="foo"} 20 1707123456800`, // 2024-02-05T08:57:36.800Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
sut.ForceFlush(t)
f(sut, &opts{
query: `{label="foo"}[120ms]`,
@@ -99,7 +99,7 @@ func TestSingleIngestionWithRelabeling(t *testing.T) {
"ingestion_protocol": "importprometheus",
},
},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123456700, Value: 10},
},
})
@@ -109,7 +109,7 @@ func TestSingleIngestionWithRelabeling(t *testing.T) {
sut.InfluxWrite(t, []string{
`influxline,label=foo1 series1=10,series2=30 1707123456700`, // 2024-02-05T08:57:36.700Z
`must_drop,label=foo1 series1=20,series2=40 1707123456800`, // 2024-02-05T08:57:36.800Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
sut.ForceFlush(t)
f(sut, &opts{
query: `{label="foo1"}[120ms]`,
@@ -133,15 +133,15 @@ func TestSingleIngestionWithRelabeling(t *testing.T) {
"label4": "value4",
"ingestion_protocol": "influxline"},
},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123456700, Value: 10},
{Timestamp: 1707123456700, Value: 30},
},
})
pbData := []pb.TimeSeries{
pbData := []prompbmarshal.TimeSeries{
{
Labels: []pb.Label{
Labels: []prompbmarshal.Label{
{
Name: "__name__",
Value: "prometheusrw_series",
@@ -151,7 +151,7 @@ func TestSingleIngestionWithRelabeling(t *testing.T) {
Value: "foo2",
},
},
Samples: []pb.Sample{
Samples: []prompbmarshal.Sample{
{
Value: 10,
Timestamp: 1707123456700, // 2024-02-05T08:57:36.700Z
@@ -160,7 +160,7 @@ func TestSingleIngestionWithRelabeling(t *testing.T) {
},
},
{
Labels: []pb.Label{
Labels: []prompbmarshal.Label{
{
Name: "__name__",
Value: "must_drop_series",
@@ -170,7 +170,7 @@ func TestSingleIngestionWithRelabeling(t *testing.T) {
Value: "foo2",
},
},
Samples: []pb.Sample{
Samples: []prompbmarshal.Sample{
{
Value: 20,
Timestamp: 1707123456800, // 2024-02-05T08:57:36.800Z
@@ -178,7 +178,7 @@ func TestSingleIngestionWithRelabeling(t *testing.T) {
},
},
}
sut.PrometheusAPIV1Write(t, pbData, at.QueryOpts{})
sut.PrometheusAPIV1Write(t, pbData, apptest.QueryOpts{})
sut.ForceFlush(t)
f(sut, &opts{
query: `{label="foo2"}[120ms]`,
@@ -194,7 +194,7 @@ func TestSingleIngestionWithRelabeling(t *testing.T) {
"ingestion_protocol": "prometheusrw",
},
},
wantSamples: []*at.Sample{
wantSamples: []*apptest.Sample{
{Timestamp: 1707123456700, Value: 10}, // 2024-02-05T08:57:36.700Z
},
})

View File

@@ -7,28 +7,29 @@ import (
"testing"
"time"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
)
type clusterWithReplication struct {
vmstorages []*at.Vmstorage
vminsert *at.Vminsert
vmselect *at.Vmselect
vmselectDedup *at.Vmselect
vmselectRF *at.Vmselect
vmselectRFSkip *at.Vmselect
vmstorages []*apptest.Vmstorage
vminsert *apptest.Vminsert
vmselect *apptest.Vmselect
vmselectDedup *apptest.Vmselect
vmselectRF *apptest.Vmselect
vmselectRFSkip *apptest.Vmselect
}
func newClusterWithReplication(tc *at.TestCase, replicationFactor int) *clusterWithReplication {
func newClusterWithReplication(tc *apptest.TestCase, replicationFactor int) *clusterWithReplication {
tc.T().Helper()
c := &clusterWithReplication{}
vmstorageCount := 2*replicationFactor + 1
c.vmstorages = make([]*at.Vmstorage, vmstorageCount)
c.vmstorages = make([]*apptest.Vmstorage, vmstorageCount)
vminsertAddrs := make([]string, vmstorageCount)
vmselectAddrs := make([]string, vmstorageCount)
for i := range vmstorageCount {
@@ -80,7 +81,7 @@ func newClusterWithReplication(tc *at.TestCase, replicationFactor int) *clusterW
//
// See: https://docs.victoriametrics.com/victoriametrics/cluster-victoriametrics/#replication-and-data-safety
func TestClusterReplication_DataIsWrittenSeveralTimes(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
const replicationFactor = 2
@@ -93,13 +94,13 @@ func TestClusterReplication_DataIsWrittenSeveralTimes(t *testing.T) {
for i := range numRecs {
recs[i] = fmt.Sprintf("metric_%d %d", i, rand.IntN(1000))
}
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, at.QueryOpts{})
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, apptest.QueryOpts{})
tc.ForceFlush(c.vmstorages...)
// Verify that each storage node has metrics and that total metric count across
// all vmstorages is replicationFactor*numRecs.
getMetricsReadTotal := func(app *at.Vmstorage) int {
getMetricsReadTotal := func(app *apptest.Vmstorage) int {
t.Helper()
got := app.GetIntMetric(t, "vm_vminsert_metrics_read_total")
if got <= 0 {
@@ -129,7 +130,7 @@ func TestClusterReplication_DataIsWrittenSeveralTimes(t *testing.T) {
//
// See: https://docs.victoriametrics.com/victoriametrics/cluster-victoriametrics/#replication-and-data-safety
func TestClusterReplication_Deduplication(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
const replicationFactor = 2
@@ -150,7 +151,7 @@ func TestClusterReplication_Deduplication(t *testing.T) {
ts = ts.Add(1 * time.Minute)
}
}
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, at.QueryOpts{})
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, apptest.QueryOpts{})
tc.ForceFlush(c.vmstorages...)
// Check /api/v1/series response.
@@ -158,17 +159,17 @@ func TestClusterReplication_Deduplication(t *testing.T) {
// vmselect is expected to return no duplicates regardless whether
// -dedup.minScrapeInterval is set or not.
assertSeries := func(app *at.Vmselect) {
assertSeries := func(app *apptest.Vmselect) {
t.Helper()
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/series response",
Got: func() any {
return app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, at.QueryOpts{
return app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, apptest.QueryOpts{
Start: "2024-01-01T00:00:00Z",
End: "2024-01-31T00:00:00Z",
}).Sort()
},
Want: &at.PrometheusAPIV1SeriesResponse{
Want: &apptest.PrometheusAPIV1SeriesResponse{
Status: "success",
IsPartial: false,
Data: []map[string]string{
@@ -188,24 +189,24 @@ func TestClusterReplication_Deduplication(t *testing.T) {
// For queries that do not return range vector, vmselect returns no
// duplicates regardless whether -dedup.minScrapeInterval is set or not.
assertQuery := func(app *at.Vmselect) {
assertQuery := func(app *apptest.Vmselect) {
t.Helper()
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query response",
Got: func() any {
return app.PrometheusAPIV1Query(t, "metric_1", at.QueryOpts{
return app.PrometheusAPIV1Query(t, "metric_1", apptest.QueryOpts{
Time: "2024-01-01T00:05:00Z",
Step: "5m",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "vector",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "metric_1"},
Sample: at.NewSample(t, "2024-01-01T00:05:00Z", 5),
Sample: apptest.NewSample(t, "2024-01-01T00:05:00Z", 5),
},
},
},
@@ -220,8 +221,8 @@ func TestClusterReplication_Deduplication(t *testing.T) {
// For queries that return range vector, vmselect is expected to
// return duplicates when -dedup.minScrapeInterval is not set.
duplicateNTimes := func(n int, samples []*at.Sample) []*at.Sample {
dupedSamples := make([]*at.Sample, len(samples)*n)
duplicateNTimes := func(n int, samples []*apptest.Sample) []*apptest.Sample {
dupedSamples := make([]*apptest.Sample, len(samples)*n)
for i, s := range samples {
for j := range n {
dupedSamples[n*i+j] = s
@@ -230,29 +231,29 @@ func TestClusterReplication_Deduplication(t *testing.T) {
return dupedSamples
}
assertQueryRangeVector := func(app *at.Vmselect, wantDuplicates int) {
assertQueryRangeVector := func(app *apptest.Vmselect, wantDuplicates int) {
t.Helper()
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query response",
Got: func() any {
return app.PrometheusAPIV1Query(t, "metric_1[5m]", at.QueryOpts{
return app.PrometheusAPIV1Query(t, "metric_1[5m]", apptest.QueryOpts{
Time: "2024-01-01T00:05:00Z",
Step: "5m",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "metric_1"},
Samples: duplicateNTimes(wantDuplicates, []*at.Sample{
at.NewSample(t, "2024-01-01T00:01:00Z", 1),
at.NewSample(t, "2024-01-01T00:02:00Z", 2),
at.NewSample(t, "2024-01-01T00:03:00Z", 3),
at.NewSample(t, "2024-01-01T00:04:00Z", 4),
at.NewSample(t, "2024-01-01T00:05:00Z", 5),
Samples: duplicateNTimes(wantDuplicates, []*apptest.Sample{
apptest.NewSample(t, "2024-01-01T00:01:00Z", 1),
apptest.NewSample(t, "2024-01-01T00:02:00Z", 2),
apptest.NewSample(t, "2024-01-01T00:03:00Z", 3),
apptest.NewSample(t, "2024-01-01T00:04:00Z", 4),
apptest.NewSample(t, "2024-01-01T00:05:00Z", 5),
}),
},
},
@@ -268,27 +269,27 @@ func TestClusterReplication_Deduplication(t *testing.T) {
// For range queries, vmselect is expected to return no duplicates
// regardless whether -dedup.minScrapeInterval is set or not.
assertQueryRange := func(app *at.Vmselect) {
tc.Assert(&at.AssertOptions{
assertQueryRange := func(app *apptest.Vmselect) {
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query_range response",
Got: func() any {
return app.PrometheusAPIV1QueryRange(t, "metric_1", at.QueryOpts{
return app.PrometheusAPIV1QueryRange(t, "metric_1", apptest.QueryOpts{
Start: "2024-01-01T00:00:00Z",
End: "2024-01-01T00:10:00Z",
Step: "5m",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "metric_1"},
Samples: []*at.Sample{
at.NewSample(t, "2024-01-01T00:00:00Z", 0),
at.NewSample(t, "2024-01-01T00:05:00Z", 5),
at.NewSample(t, "2024-01-01T00:10:00Z", 10),
Samples: []*apptest.Sample{
apptest.NewSample(t, "2024-01-01T00:00:00Z", 0),
apptest.NewSample(t, "2024-01-01T00:05:00Z", 5),
apptest.NewSample(t, "2024-01-01T00:10:00Z", 10),
},
},
},
@@ -304,27 +305,27 @@ func TestClusterReplication_Deduplication(t *testing.T) {
// // vmselect is expected to return duplicates when
// -dedup.minScrapeInterval is not set.
assertExport := func(app *at.Vmselect, wantDuplicates int) {
tc.Assert(&at.AssertOptions{
assertExport := func(app *apptest.Vmselect, wantDuplicates int) {
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/export response",
Got: func() any {
return app.PrometheusAPIV1Export(t, `{__name__="metric_1"}`, at.QueryOpts{
return app.PrometheusAPIV1Export(t, `{__name__="metric_1"}`, apptest.QueryOpts{
Start: "2024-01-01T00:00:00Z",
End: "2024-01-01T00:03:00Z",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "metric_1"},
Samples: duplicateNTimes(wantDuplicates, []*at.Sample{
at.NewSample(t, "2024-01-01T00:00:00Z", 0),
at.NewSample(t, "2024-01-01T00:01:00Z", 1),
at.NewSample(t, "2024-01-01T00:02:00Z", 2),
at.NewSample(t, "2024-01-01T00:03:00Z", 3),
Samples: duplicateNTimes(wantDuplicates, []*apptest.Sample{
apptest.NewSample(t, "2024-01-01T00:00:00Z", 0),
apptest.NewSample(t, "2024-01-01T00:01:00Z", 1),
apptest.NewSample(t, "2024-01-01T00:02:00Z", 2),
apptest.NewSample(t, "2024-01-01T00:03:00Z", 3),
}),
},
},
@@ -346,7 +347,7 @@ func TestClusterReplication_Deduplication(t *testing.T) {
//
// See: https://docs.victoriametrics.com/victoriametrics/cluster-victoriametrics/#replication-and-data-safety
func TestClusterReplication_PartialResponse(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
const replicationFactor = 2
@@ -359,24 +360,24 @@ func TestClusterReplication_PartialResponse(t *testing.T) {
for i := range numRecs {
recs[i] = fmt.Sprintf("metric_%d %d", i, rand.IntN(1000))
}
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, at.QueryOpts{})
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, apptest.QueryOpts{})
tc.ForceFlush(c.vmstorages...)
// Verify partial vs full response.
assertSeries := func(app *at.Vmselect, wantPartial bool) {
assertSeries := func(app *apptest.Vmselect, wantPartial bool) {
t.Helper()
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/series response",
Got: func() any {
return app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, at.QueryOpts{}).Sort()
return app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, apptest.QueryOpts{}).Sort()
},
Want: &at.PrometheusAPIV1SeriesResponse{
Want: &apptest.PrometheusAPIV1SeriesResponse{
Status: "success",
IsPartial: wantPartial,
},
CmpOpts: []cmp.Option{
cmpopts.IgnoreFields(at.PrometheusAPIV1SeriesResponse{}, "Data"),
cmpopts.IgnoreFields(apptest.PrometheusAPIV1SeriesResponse{}, "Data"),
},
})
}
@@ -426,7 +427,7 @@ func TestClusterReplication_PartialResponse(t *testing.T) {
//
// See: https://docs.victoriametrics.com/victoriametrics/cluster-victoriametrics/#replication-and-data-safety
func TestClusterReplication_SkipSlowReplicas(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
const replicationFactor = 2
@@ -436,7 +437,7 @@ func TestClusterReplication_SkipSlowReplicas(t *testing.T) {
const numRecs = 1000
recs := make([]string, numRecs)
wantSeries := &at.PrometheusAPIV1SeriesResponse{
wantSeries := &apptest.PrometheusAPIV1SeriesResponse{
Status: "success",
Data: make([]map[string]string, numRecs),
}
@@ -446,23 +447,23 @@ func TestClusterReplication_SkipSlowReplicas(t *testing.T) {
wantSeries.Data[i] = map[string]string{"__name__": name}
}
wantSeries.Sort()
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, at.QueryOpts{})
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, apptest.QueryOpts{})
tc.ForceFlush(c.vmstorages...)
// Verify skipping slow replicas by counting the number of skipSlowReplicas
// messages in request trace.
assertSeries := func(app *at.Vmselect, want int) {
assertSeries := func(app *apptest.Vmselect, want int) {
t.Helper()
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/series response",
Got: func() any {
return app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, at.QueryOpts{}).Sort()
return app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, apptest.QueryOpts{}).Sort()
},
Want: wantSeries,
})
res := app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, at.QueryOpts{Trace: "1"})
res := app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, apptest.QueryOpts{Trace: "1"})
got := res.Trace.Contains("cancel request because -search.skipSlowReplicas is set and every group returned the needed number of responses according to replicationFactor")
if got != want {
t.Errorf("unexpected number of skipSlowReplicas messages in request trace: got %d, want %d (full trace:\n%v)", got, want, res.Trace)
@@ -474,11 +475,11 @@ func TestClusterReplication_SkipSlowReplicas(t *testing.T) {
}
type storageGroup struct {
vmstorages []*at.Vmstorage
vminsert *at.Vminsert
vmstorages []*apptest.Vmstorage
vminsert *apptest.Vminsert
}
func (g *storageGroup) stopNodes(tc *at.TestCase, n int) {
func (g *storageGroup) stopNodes(tc *apptest.TestCase, n int) {
if n > len(g.vmstorages) {
n = len(g.vmstorages)
}
@@ -490,24 +491,24 @@ func (g *storageGroup) stopNodes(tc *at.TestCase, n int) {
type clusterWithGroupReplication struct {
storageGroups []*storageGroup
vminsert *at.Vminsert
vmselect *at.Vmselect
vmselectDedup *at.Vmselect
vmselectGroupRF *at.Vmselect
vmselectGlobalRF *at.Vmselect
vmselectGroupGlobalRF *at.Vmselect
vmselectGroupRFSkip *at.Vmselect
vmselectGlobalRFSkip *at.Vmselect
vmselectGroupGlobalRFSkip *at.Vmselect
vminsert *apptest.Vminsert
vmselect *apptest.Vmselect
vmselectDedup *apptest.Vmselect
vmselectGroupRF *apptest.Vmselect
vmselectGlobalRF *apptest.Vmselect
vmselectGroupGlobalRF *apptest.Vmselect
vmselectGroupRFSkip *apptest.Vmselect
vmselectGlobalRFSkip *apptest.Vmselect
vmselectGroupGlobalRFSkip *apptest.Vmselect
}
func (c *clusterWithGroupReplication) forceFlush(tc *at.TestCase) {
func (c *clusterWithGroupReplication) forceFlush(tc *apptest.TestCase) {
for _, g := range c.storageGroups {
tc.ForceFlush(g.vmstorages...)
}
}
func newClusterWithGroupReplication(tc *at.TestCase, groupRFs []int, globalRF int) *clusterWithGroupReplication {
func newClusterWithGroupReplication(tc *apptest.TestCase, groupRFs []int, globalRF int) *clusterWithGroupReplication {
tc.T().Helper()
if len(groupRFs) < 1 {
@@ -530,7 +531,7 @@ func newClusterWithGroupReplication(tc *at.TestCase, groupRFs []int, globalRF in
groupName := fmt.Sprintf("group%d", g)
vmstorageCount := 2*rf + 1
c.storageGroups[g] = &storageGroup{
vmstorages: make([]*at.Vmstorage, vmstorageCount),
vmstorages: make([]*apptest.Vmstorage, vmstorageCount),
}
groupVminsertAddrs := make([]string, vmstorageCount)
groupVmselectAddrs := make([]string, vmstorageCount)
@@ -625,7 +626,7 @@ func newClusterWithGroupReplication(tc *at.TestCase, groupRFs []int, globalRF in
// See: https://docs.victoriametrics.com/victoriametrics/cluster-victoriametrics/#vmstorage-groups-at-vmselect
// and https://docs.victoriametrics.com/victoriametrics/cluster-victoriametrics/#replication-and-data-safety
func TestClusterGroupReplication(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
// Feel free to change globalRF and groupRF,
@@ -653,7 +654,7 @@ func TestClusterGroupReplication(t *testing.T) {
numRecs = numMetrics * numSamples
)
var recs []string
wantSeries := &at.PrometheusAPIV1SeriesResponse{
wantSeries := &apptest.PrometheusAPIV1SeriesResponse{
Status: "success",
Data: make([]map[string]string, numMetrics),
}
@@ -667,7 +668,7 @@ func TestClusterGroupReplication(t *testing.T) {
}
}
wantSeries.Sort()
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, at.QueryOpts{})
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, apptest.QueryOpts{})
c.forceFlush(tc)
opts := &testGroupReplicationOpts{
@@ -693,15 +694,15 @@ type testGroupReplicationOpts struct {
numGroups int
numNodes int
numRecs int
wantSeries *at.PrometheusAPIV1SeriesResponse
wantSeries *apptest.PrometheusAPIV1SeriesResponse
}
// testGroupDataIsWrittenSeveralTimes checks that multiple
// copies of data is stored within the custer when the replication is enabled.
func testGroupDataIsWrittenSeveralTimes(tc *at.TestCase, opts *testGroupReplicationOpts) {
func testGroupDataIsWrittenSeveralTimes(tc *apptest.TestCase, opts *testGroupReplicationOpts) {
t := tc.T()
getMetricsReadTotal := func(app *at.Vmstorage) int {
getMetricsReadTotal := func(app *apptest.Vmstorage) int {
t.Helper()
got := app.GetIntMetric(t, "vm_vminsert_metrics_read_total")
if got <= 0 {
@@ -733,7 +734,7 @@ func testGroupDataIsWrittenSeveralTimes(tc *at.TestCase, opts *testGroupReplicat
// Most of the API endpoints remove duplicates by default. However, some API
// endpoints will return duplicates unless -dedup.minScrapeInterval flag is set.
// See mergeSortBlocks() in app/vmselect/netstorage/netstorage.go.
func testGroupDeduplication(tc *at.TestCase, opts *testGroupReplicationOpts) {
func testGroupDeduplication(tc *apptest.TestCase, opts *testGroupReplicationOpts) {
t := tc.T()
// Check /api/v1/series response.
@@ -741,12 +742,12 @@ func testGroupDeduplication(tc *at.TestCase, opts *testGroupReplicationOpts) {
// vmselect is expected to return no duplicates regardless whether
// -dedup.minScrapeInterval is set or not.
assertSeries := func(app *at.Vmselect) {
assertSeries := func(app *apptest.Vmselect) {
t.Helper()
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/series response",
Got: func() any {
return app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, at.QueryOpts{
return app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, apptest.QueryOpts{
Start: "2024-01-01T00:00:00Z",
End: "2024-01-31T00:00:00Z",
}).Sort()
@@ -762,24 +763,24 @@ func testGroupDeduplication(tc *at.TestCase, opts *testGroupReplicationOpts) {
// For queries that do not return range vector, vmselect returns no
// duplicates regardless whether -dedup.minScrapeInterval is set or not.
assertQuery := func(app *at.Vmselect) {
assertQuery := func(app *apptest.Vmselect) {
t.Helper()
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query response",
Got: func() any {
return app.PrometheusAPIV1Query(t, "metric_1", at.QueryOpts{
return app.PrometheusAPIV1Query(t, "metric_1", apptest.QueryOpts{
Time: "2024-01-01T00:05:00Z",
Step: "5m",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "vector",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "metric_1"},
Sample: at.NewSample(t, "2024-01-01T00:05:00Z", 5),
Sample: apptest.NewSample(t, "2024-01-01T00:05:00Z", 5),
},
},
},
@@ -794,8 +795,8 @@ func testGroupDeduplication(tc *at.TestCase, opts *testGroupReplicationOpts) {
// For queries that return range vector, vmselect is expected to
// return duplicates when -dedup.minScrapeInterval is not set.
duplicateNTimes := func(n int, samples []*at.Sample) []*at.Sample {
dupedSamples := make([]*at.Sample, len(samples)*n)
duplicateNTimes := func(n int, samples []*apptest.Sample) []*apptest.Sample {
dupedSamples := make([]*apptest.Sample, len(samples)*n)
for i, s := range samples {
for j := range n {
dupedSamples[n*i+j] = s
@@ -804,29 +805,29 @@ func testGroupDeduplication(tc *at.TestCase, opts *testGroupReplicationOpts) {
return dupedSamples
}
assertQueryRangeVector := func(app *at.Vmselect, wantDuplicates int) {
assertQueryRangeVector := func(app *apptest.Vmselect, wantDuplicates int) {
t.Helper()
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query response",
Got: func() any {
return app.PrometheusAPIV1Query(t, "metric_1[5m]", at.QueryOpts{
return app.PrometheusAPIV1Query(t, "metric_1[5m]", apptest.QueryOpts{
Time: "2024-01-01T00:05:00Z",
Step: "5m",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "metric_1"},
Samples: duplicateNTimes(wantDuplicates, []*at.Sample{
at.NewSample(t, "2024-01-01T00:01:00Z", 1),
at.NewSample(t, "2024-01-01T00:02:00Z", 2),
at.NewSample(t, "2024-01-01T00:03:00Z", 3),
at.NewSample(t, "2024-01-01T00:04:00Z", 4),
at.NewSample(t, "2024-01-01T00:05:00Z", 5),
Samples: duplicateNTimes(wantDuplicates, []*apptest.Sample{
apptest.NewSample(t, "2024-01-01T00:01:00Z", 1),
apptest.NewSample(t, "2024-01-01T00:02:00Z", 2),
apptest.NewSample(t, "2024-01-01T00:03:00Z", 3),
apptest.NewSample(t, "2024-01-01T00:04:00Z", 4),
apptest.NewSample(t, "2024-01-01T00:05:00Z", 5),
}),
},
},
@@ -842,27 +843,27 @@ func testGroupDeduplication(tc *at.TestCase, opts *testGroupReplicationOpts) {
// For range queries, vmselect is expected to return no duplicates
// regardless whether -dedup.minScrapeInterval is set or not.
assertQueryRange := func(app *at.Vmselect) {
tc.Assert(&at.AssertOptions{
assertQueryRange := func(app *apptest.Vmselect) {
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query_range response",
Got: func() any {
return app.PrometheusAPIV1QueryRange(t, "metric_1", at.QueryOpts{
return app.PrometheusAPIV1QueryRange(t, "metric_1", apptest.QueryOpts{
Start: "2024-01-01T00:00:00Z",
End: "2024-01-01T00:10:00Z",
Step: "5m",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "metric_1"},
Samples: []*at.Sample{
at.NewSample(t, "2024-01-01T00:00:00Z", 0),
at.NewSample(t, "2024-01-01T00:05:00Z", 5),
at.NewSample(t, "2024-01-01T00:10:00Z", 10),
Samples: []*apptest.Sample{
apptest.NewSample(t, "2024-01-01T00:00:00Z", 0),
apptest.NewSample(t, "2024-01-01T00:05:00Z", 5),
apptest.NewSample(t, "2024-01-01T00:10:00Z", 10),
},
},
},
@@ -878,27 +879,27 @@ func testGroupDeduplication(tc *at.TestCase, opts *testGroupReplicationOpts) {
// // vmselect is expected to return duplicates when
// -dedup.minScrapeInterval is not set.
assertExport := func(app *at.Vmselect, wantDuplicates int) {
tc.Assert(&at.AssertOptions{
assertExport := func(app *apptest.Vmselect, wantDuplicates int) {
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/export response",
Got: func() any {
return app.PrometheusAPIV1Export(t, `{__name__="metric_1"}`, at.QueryOpts{
return app.PrometheusAPIV1Export(t, `{__name__="metric_1"}`, apptest.QueryOpts{
Start: "2024-01-01T00:00:00Z",
End: "2024-01-01T00:03:00Z",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "metric_1"},
Samples: duplicateNTimes(wantDuplicates, []*at.Sample{
at.NewSample(t, "2024-01-01T00:00:00Z", 0),
at.NewSample(t, "2024-01-01T00:01:00Z", 1),
at.NewSample(t, "2024-01-01T00:02:00Z", 2),
at.NewSample(t, "2024-01-01T00:03:00Z", 3),
Samples: duplicateNTimes(wantDuplicates, []*apptest.Sample{
apptest.NewSample(t, "2024-01-01T00:00:00Z", 0),
apptest.NewSample(t, "2024-01-01T00:01:00Z", 1),
apptest.NewSample(t, "2024-01-01T00:02:00Z", 2),
apptest.NewSample(t, "2024-01-01T00:03:00Z", 3),
}),
},
},
@@ -918,17 +919,17 @@ func testGroupDeduplication(tc *at.TestCase, opts *testGroupReplicationOpts) {
// -replicationFactor and -globalReplicationFactor flags) it will still wait for
// results from all the vmstorage nodes. A vmselect can be configured to skip
// slow replicas using -search.skipSlowReplicas flag.
func testGroupSkipSlowReplicas(tc *at.TestCase, opts *testGroupReplicationOpts) {
func testGroupSkipSlowReplicas(tc *apptest.TestCase, opts *testGroupReplicationOpts) {
t := tc.T()
assertSeries := func(app *at.Vmselect, wantMin, wantMax int) {
assertSeries := func(app *apptest.Vmselect, wantMin, wantMax int) {
t.Helper()
// Ensure that the response contains full dataset.
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/series response",
Got: func() any {
return app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, at.QueryOpts{
return app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, apptest.QueryOpts{
Start: "2024-01-01T00:00:00Z",
End: "2024-01-31T00:00:00Z",
}).Sort()
@@ -936,7 +937,7 @@ func testGroupSkipSlowReplicas(tc *at.TestCase, opts *testGroupReplicationOpts)
Want: opts.wantSeries,
})
res := app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, at.QueryOpts{Trace: "1"})
res := app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, apptest.QueryOpts{Trace: "1"})
got := res.Trace.Contains("cancel request because -search.skipSlowReplicas is set and every group returned the needed number of responses according to replicationFactor")
if got < wantMin || got > wantMax {
t.Errorf("unexpected number of skipSlowReplicas messages in request trace: got %d, %d <= want <= %d (full trace:\n%v)", got, wantMin, wantMax, res.Trace)
@@ -1011,25 +1012,25 @@ func testGroupSkipSlowReplicas(tc *at.TestCase, opts *testGroupReplicationOpts)
// passing -replicationFactor=N and -globalReplicationFactor command-line flag
// to vmselect instructs it to not mark responses as partial even if less
// vmstorage nodes are unavailable during the query.
func testGroupPartialResponse(tc *at.TestCase, opts *testGroupReplicationOpts) {
func testGroupPartialResponse(tc *apptest.TestCase, opts *testGroupReplicationOpts) {
t := tc.T()
assertSeries := func(app *at.Vmselect, wantPartial bool) {
assertSeries := func(app *apptest.Vmselect, wantPartial bool) {
t.Helper()
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/series response",
Got: func() any {
return app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, at.QueryOpts{
return app.PrometheusAPIV1Series(t, `{__name__=~".*"}`, apptest.QueryOpts{
Start: "2024-01-01T00:00:00Z",
End: "2024-01-31T00:00:00Z",
}).Sort()
},
Want: &at.PrometheusAPIV1SeriesResponse{
Want: &apptest.PrometheusAPIV1SeriesResponse{
Status: "success",
IsPartial: wantPartial,
},
CmpOpts: []cmp.Option{
cmpopts.IgnoreFields(at.PrometheusAPIV1SeriesResponse{}, "Data"),
cmpopts.IgnoreFields(apptest.PrometheusAPIV1SeriesResponse{}, "Data"),
},
})
}
@@ -1113,7 +1114,7 @@ func testGroupPartialResponse(tc *at.TestCase, opts *testGroupReplicationOpts) {
//
// See: https://docs.victoriametrics.com/victoriametrics/cluster-victoriametrics/#replication-and-data-safety
func TestClusterReplication_PartialResponseMultitenant(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
const replicationFactor = 2
@@ -1127,30 +1128,30 @@ func TestClusterReplication_PartialResponseMultitenant(t *testing.T) {
recs[i] = fmt.Sprintf("metric_%d %d", i, rand.IntN(1000))
}
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, at.QueryOpts{
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, apptest.QueryOpts{
Tenant: "0",
})
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, at.QueryOpts{
c.vminsert.PrometheusAPIV1ImportPrometheus(t, recs, apptest.QueryOpts{
Tenant: "1",
})
tc.ForceFlush(c.vmstorages...)
// Verify partial vs full response.
assertSeries := func(app *at.Vmselect, wantPartial bool) {
assertSeries := func(app *apptest.Vmselect, wantPartial bool) {
t.Helper()
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query response",
Got: func() any {
qo := at.QueryOpts{Tenant: "multitenant", Trace: "1"}
qo := apptest.QueryOpts{Tenant: "multitenant", Trace: "1"}
return app.PrometheusAPIV1Query(t, `{__name__=~"metric_.*"}`, qo)
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
IsPartial: wantPartial,
},
CmpOpts: []cmp.Option{
cmpopts.IgnoreFields(at.PrometheusAPIV1QueryResponse{}, "Data"),
cmpopts.IgnoreFields(apptest.PrometheusAPIV1QueryResponse{}, "Data"),
},
})
}

View File

@@ -7,7 +7,7 @@ import (
"github.com/google/go-cmp/cmp"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
)
// snapshotNameRE covers years 1970-2099.
@@ -17,7 +17,7 @@ import (
var snapshotNameRE = regexp.MustCompile(`^(19[789]\d|20[0-9]{2})(0\d|1[0-2])([0-2]\d|3[01])([01]\d|2[0-3])[0-5]\d[0-5]\d-[0-9,A-F]{16}$`)
func TestSingleSnapshots_CreateListDelete(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartDefaultVmsingle()
@@ -28,7 +28,7 @@ func TestSingleSnapshots_CreateListDelete(t *testing.T) {
for i := range numSamples {
samples[i] = fmt.Sprintf("metric_%03d %d", i, i)
}
sut.PrometheusAPIV1ImportPrometheus(t, samples, at.QueryOpts{})
sut.PrometheusAPIV1ImportPrometheus(t, samples, apptest.QueryOpts{})
sut.ForceFlush(t)
// Create several snapshots using VictoriaMetrics and Prometheus endpoints.
@@ -60,7 +60,7 @@ func TestSingleSnapshots_CreateListDelete(t *testing.T) {
assertSnapshotList := func(want []string) {
gotRes := sut.SnapshotList(t)
wantRes := &at.SnapshotListResponse{
wantRes := &apptest.SnapshotListResponse{
Status: "ok",
Snapshots: want,
}
@@ -72,7 +72,7 @@ func TestSingleSnapshots_CreateListDelete(t *testing.T) {
// Delete non-existent snapshot.
gotDeletedSnapshot := sut.SnapshotDelete(t, "does-not-exist")
wantDeletedSnapshot := &at.SnapshotDeleteResponse{
wantDeletedSnapshot := &apptest.SnapshotDeleteResponse{
Status: "error",
Msg: `cannot find snapshot "does-not-exist"`,
}
@@ -82,7 +82,7 @@ func TestSingleSnapshots_CreateListDelete(t *testing.T) {
// Delete the first snapshot.
gotDeletedSnapshot = sut.SnapshotDelete(t, snapshots[0])
wantDeletedSnapshot = &at.SnapshotDeleteResponse{
wantDeletedSnapshot = &apptest.SnapshotDeleteResponse{
Status: "ok",
}
if diff := cmp.Diff(wantDeletedSnapshot, gotDeletedSnapshot); diff != "" {
@@ -92,7 +92,7 @@ func TestSingleSnapshots_CreateListDelete(t *testing.T) {
// Delete the rest of the snapshots.
gotDeleteAllRes := sut.SnapshotDeleteAll(t)
wantDeleteAllRes := &at.SnapshotDeleteAllResponse{
wantDeleteAllRes := &apptest.SnapshotDeleteAllResponse{
Status: "ok",
}
if diff := cmp.Diff(wantDeleteAllRes, gotDeleteAllRes); diff != "" {
@@ -102,7 +102,7 @@ func TestSingleSnapshots_CreateListDelete(t *testing.T) {
}
func TestClusterSnapshots_CreateListDelete(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartDefaultCluster()
@@ -113,7 +113,7 @@ func TestClusterSnapshots_CreateListDelete(t *testing.T) {
for i := range numSamples {
samples[i] = fmt.Sprintf("metric_%03d %d", i, i)
}
sut.PrometheusAPIV1ImportPrometheus(t, samples, at.QueryOpts{})
sut.PrometheusAPIV1ImportPrometheus(t, samples, apptest.QueryOpts{})
sut.ForceFlush(t)
// Create several snapshots for both vmstorage replicas using
@@ -142,7 +142,7 @@ func TestClusterSnapshots_CreateListDelete(t *testing.T) {
assertSnapshotList := func(i int, wantNames []string) {
t.Helper()
got := sut.Vmstorages[i].SnapshotList(t)
want := &at.SnapshotListResponse{
want := &apptest.SnapshotListResponse{
Status: "ok",
Snapshots: wantNames,
}
@@ -157,7 +157,7 @@ func TestClusterSnapshots_CreateListDelete(t *testing.T) {
assertDeleteNonExistent := func(i int) {
t.Helper()
got := sut.Vmstorages[i].SnapshotDelete(t, "does-not-exist")
want := &at.SnapshotDeleteResponse{
want := &apptest.SnapshotDeleteResponse{
Status: "error",
Msg: `cannot find snapshot "does-not-exist"`,
}
@@ -172,7 +172,7 @@ func TestClusterSnapshots_CreateListDelete(t *testing.T) {
deleteSnapshot := func(i int, snapshotName string) {
t.Helper()
got := sut.Vmstorages[i].SnapshotDelete(t, snapshotName)
want := &at.SnapshotDeleteResponse{
want := &apptest.SnapshotDeleteResponse{
Status: "ok",
}
if diff := cmp.Diff(want, got); diff != "" {
@@ -188,7 +188,7 @@ func TestClusterSnapshots_CreateListDelete(t *testing.T) {
deleteAllSnapshots := func(i int) {
t.Helper()
got := sut.Vmstorages[i].SnapshotDeleteAll(t)
want := &at.SnapshotDeleteAllResponse{
want := &apptest.SnapshotDeleteAllResponse{
Status: "ok",
}
if diff := cmp.Diff(want, got); diff != "" {

View File

@@ -7,7 +7,7 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/fs"
)
@@ -16,7 +16,7 @@ import (
// Most of these cases are based on user feedback. Refer to the corresponding GitHub issue for details on each case.
func TestSingleSpecialQueryRegression(t *testing.T) {
fs.MustRemoveDir(t.Name())
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartDefaultVmsingle()
@@ -24,10 +24,10 @@ func TestSingleSpecialQueryRegression(t *testing.T) {
}
func TestClusterSpecialQueryRegression(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
sut := tc.MustStartCluster(&at.ClusterOptions{
sut := tc.MustStartCluster(&apptest.ClusterOptions{
Vmstorage1Instance: "vmstorage1",
Vmstorage1Flags: []string{
"-storageDataPath=" + filepath.Join(tc.Dir(), "vmstorage1"),
@@ -45,7 +45,7 @@ func TestClusterSpecialQueryRegression(t *testing.T) {
testSpecialQueryRegression(tc, sut)
}
func testSpecialQueryRegression(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
func testSpecialQueryRegression(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier) {
// prometheus
testCaseSensitiveRegex(tc, sut)
testDuplicateLabel(tc, sut)
@@ -61,7 +61,7 @@ func testSpecialQueryRegression(tc *at.TestCase, sut at.PrometheusWriteQuerier)
testSubqueryAggregation(tc, sut)
}
func testCaseSensitiveRegex(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
func testCaseSensitiveRegex(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier) {
t := tc.T()
// case-sensitive-regex
@@ -69,102 +69,102 @@ func testCaseSensitiveRegex(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
sut.PrometheusAPIV1ImportPrometheus(t, []string{
`prometheus.sensitiveRegex{label="sensitiveRegex"} 10 1707123456700`, // 2024-02-05T08:57:36.700Z
`prometheus.sensitiveRegex{label="SensitiveRegex"} 10 1707123456700`, // 2024-02-05T08:57:36.700Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/export response",
Got: func() any {
return sut.PrometheusAPIV1Export(t, `{label=~'(?i)sensitiveregex'}`, at.QueryOpts{
return sut.PrometheusAPIV1Export(t, `{label=~'(?i)sensitiveregex'}`, apptest.QueryOpts{
Start: "2024-02-05T08:50:00.700Z",
End: "2024-02-05T09:00:00.700Z",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "prometheus.sensitiveRegex", "label": "SensitiveRegex"},
Samples: []*at.Sample{{Timestamp: 1707123456700, Value: 10}},
Samples: []*apptest.Sample{{Timestamp: 1707123456700, Value: 10}},
},
{
Metric: map[string]string{"__name__": "prometheus.sensitiveRegex", "label": "sensitiveRegex"},
Samples: []*at.Sample{{Timestamp: 1707123456700, Value: 10}},
Samples: []*apptest.Sample{{Timestamp: 1707123456700, Value: 10}},
},
},
},
},
CmpOpts: []cmp.Option{
cmpopts.IgnoreFields(at.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
cmpopts.IgnoreFields(apptest.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
},
})
}
func testDuplicateLabel(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
func testDuplicateLabel(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier) {
t := tc.T()
// duplicate_label
// https://github.com/VictoriaMetrics/VictoriaMetrics/issues/172
sut.PrometheusAPIV1ImportPrometheus(t, []string{
`prometheus.duplicate_label{label="duplicate", label="duplicate"} 10 1707123456700`, // 2024-02-05T08:57:36.700Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/export response",
Got: func() any {
return sut.PrometheusAPIV1Export(t, `{__name__='prometheus.duplicate_label'}`, at.QueryOpts{
return sut.PrometheusAPIV1Export(t, `{__name__='prometheus.duplicate_label'}`, apptest.QueryOpts{
Start: "2024-02-05T08:50:00.700Z",
End: "2024-02-05T09:00:00.700Z",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "prometheus.duplicate_label", "label": "duplicate"},
Samples: []*at.Sample{{Timestamp: 1707123456700, Value: 10}},
Samples: []*apptest.Sample{{Timestamp: 1707123456700, Value: 10}},
},
},
},
},
CmpOpts: []cmp.Option{
cmpopts.IgnoreFields(at.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
cmpopts.IgnoreFields(apptest.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
},
})
}
func testTooBigLookbehindWindow(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
func testTooBigLookbehindWindow(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier) {
t := tc.T()
// too big look-behind window
// https://github.com/VictoriaMetrics/VictoriaMetrics/issues/5553
sut.PrometheusAPIV1ImportPrometheus(t, []string{
`prometheus.too_big_lookbehind{label="foo"} 10 1707123456700`, // 2024-02-05T08:57:36.700Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query response",
Got: func() any {
return sut.PrometheusAPIV1Query(t, `prometheus.too_big_lookbehind{label="foo"}[100y]`, at.QueryOpts{
return sut.PrometheusAPIV1Query(t, `prometheus.too_big_lookbehind{label="foo"}[100y]`, apptest.QueryOpts{
Step: "5m",
Time: "2024-02-05T08:57:36.700Z",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "prometheus.too_big_lookbehind", "label": "foo"},
Samples: []*at.Sample{
at.NewSample(t, "2024-02-05T08:57:36.700Z", 10),
Samples: []*apptest.Sample{
apptest.NewSample(t, "2024-02-05T08:57:36.700Z", 10),
},
},
},
@@ -179,30 +179,30 @@ func testTooBigLookbehindWindow(tc *at.TestCase, sut at.PrometheusWriteQuerier)
`prometheus.too_big_lookbehind_range{label="foo"} 12 1707123466700`, // 2024-02-05T08:57:46.700Z
`prometheus.too_big_lookbehind_range{label="foo"} 11 1707123436700`, // 2024-02-05T08:57:16.700Z
`prometheus.too_big_lookbehind_range{label="foo"} 10 1707123406700`, // 2024-02-05T08:56:46.700Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query_range response",
Got: func() any {
return sut.PrometheusAPIV1QueryRange(t, `prometheus.too_big_lookbehind_range{label="foo"}`, at.QueryOpts{
return sut.PrometheusAPIV1QueryRange(t, `prometheus.too_big_lookbehind_range{label="foo"}`, apptest.QueryOpts{
Start: "2024-02-05T08:56:46.700Z",
End: "2024-02-05T08:58:16.700Z",
Step: "30s",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "prometheus.too_big_lookbehind_range", "label": "foo"},
Samples: []*at.Sample{
at.NewSample(t, "2024-02-05T08:56:46.700Z", 10),
at.NewSample(t, "2024-02-05T08:57:16.700Z", 11),
at.NewSample(t, "2024-02-05T08:57:46.700Z", 12),
at.NewSample(t, "2024-02-05T08:58:16.700Z", 13),
Samples: []*apptest.Sample{
apptest.NewSample(t, "2024-02-05T08:56:46.700Z", 10),
apptest.NewSample(t, "2024-02-05T08:57:16.700Z", 11),
apptest.NewSample(t, "2024-02-05T08:57:46.700Z", 12),
apptest.NewSample(t, "2024-02-05T08:58:16.700Z", 13),
},
},
},
@@ -211,7 +211,7 @@ func testTooBigLookbehindWindow(tc *at.TestCase, sut at.PrometheusWriteQuerier)
})
}
func testMatchSeries(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
func testMatchSeries(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier) {
t := tc.T()
// match_series
@@ -221,18 +221,18 @@ func testMatchSeries(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
`GenBearTemp{db="TenMinute",Park="2",TurbineType="V112"} 10 1707123456700`, // 2024-02-05T08:57:36.700Z
`GenBearTemp{db="TenMinute",Park="3",TurbineType="V112"} 10 1707123456700`, // 2024-02-05T08:57:36.700Z
`GenBearTemp{db="TenMinute",Park="4",TurbineType="V112"} 10 1707123456700`, // 2024-02-05T08:57:36.700Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/series response",
Got: func() any {
return sut.PrometheusAPIV1Series(t, `{__name__="GenBearTemp"}`, at.QueryOpts{
return sut.PrometheusAPIV1Series(t, `{__name__="GenBearTemp"}`, apptest.QueryOpts{
Start: "2024-02-04T08:57:36.700Z",
End: "2024-02-05T08:57:36.700Z",
}).Sort()
},
Want: &at.PrometheusAPIV1SeriesResponse{
Want: &apptest.PrometheusAPIV1SeriesResponse{
Status: "success",
IsPartial: false,
Data: []map[string]string{
@@ -245,7 +245,7 @@ func testMatchSeries(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
})
}
func testNegativeIncrease(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
func testNegativeIncrease(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier) {
t := tc.T()
// negative increase when user overrides staleness interval
@@ -256,34 +256,34 @@ func testNegativeIncrease(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
// gap 75s
`foo 1 1750109333514`, // 2025-06-16 21:28:53:514
`foo 1 1750109348514`, // 2025-06-16 21:29:08:514
}, at.QueryOpts{})
}, apptest.QueryOpts{})
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "regression for https://github.com/VictoriaMetrics/VictoriaMetrics/issues/8935#issuecomment-2978728661",
DoNotRetry: true,
Got: func() any {
return sut.PrometheusAPIV1QueryRange(t, `increase(foo[1m])`, at.QueryOpts{
return sut.PrometheusAPIV1QueryRange(t, `increase(foo[1m])`, apptest.QueryOpts{
Start: "2025-06-16T21:28:40.700Z",
End: "2025-06-16T21:29:30.700Z",
Step: "9s",
MaxLookback: "65s",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{},
Samples: []*at.Sample{
at.NewSample(t, "2025-06-16T21:28:40.700Z", 0),
at.NewSample(t, "2025-06-16T21:28:49.700Z", 0),
at.NewSample(t, "2025-06-16T21:28:58.700Z", 1),
at.NewSample(t, "2025-06-16T21:29:07.700Z", 1),
at.NewSample(t, "2025-06-16T21:29:16.700Z", 0),
at.NewSample(t, "2025-06-16T21:29:25.700Z", 0),
Samples: []*apptest.Sample{
apptest.NewSample(t, "2025-06-16T21:28:40.700Z", 0),
apptest.NewSample(t, "2025-06-16T21:28:49.700Z", 0),
apptest.NewSample(t, "2025-06-16T21:28:58.700Z", 1),
apptest.NewSample(t, "2025-06-16T21:29:07.700Z", 1),
apptest.NewSample(t, "2025-06-16T21:29:16.700Z", 0),
apptest.NewSample(t, "2025-06-16T21:29:25.700Z", 0),
},
},
},
@@ -292,7 +292,7 @@ func testNegativeIncrease(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
})
}
func testComparisonNotInfNotNan(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
func testComparisonNotInfNotNan(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier) {
t := tc.T()
// comparison-not-inf-not-nan
@@ -303,8 +303,8 @@ func testComparisonNotInfNotNan(tc *at.TestCase, sut at.PrometheusWriteQuerier)
"not_nan_not_inf;item=x 1 1707123455", // 2024-02-05T08:57:35.000Z
"not_nan_not_inf;item=y 3 1707123456", // 2024-02-05T08:57:36.000Z
"not_nan_not_inf;item=y 1 1707123455", // 2024-02-05T08:57:35.000Z
}, at.QueryOpts{})
tc.Assert(&at.AssertOptions{
}, apptest.QueryOpts{})
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected row inserted metrics check",
Got: func() any {
return (getRowsInsertedTotal(t, sut) - rowInserted) >= 4
@@ -313,24 +313,24 @@ func testComparisonNotInfNotNan(tc *at.TestCase, sut at.PrometheusWriteQuerier)
})
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query_range response",
Got: func() any {
return sut.PrometheusAPIV1QueryRange(t, `1/(not_nan_not_inf-1)!=inf!=nan`, at.QueryOpts{
return sut.PrometheusAPIV1QueryRange(t, `1/(not_nan_not_inf-1)!=inf!=nan`, apptest.QueryOpts{
Start: "2024-02-05T06:50:36.000Z",
End: "2024-02-05T09:58:37.000Z",
Step: "60",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"item": "y"},
Samples: []*at.Sample{
at.NewSample(t, "2024-02-05T08:58:00.000Z", 0.5),
Samples: []*apptest.Sample{
apptest.NewSample(t, "2024-02-05T08:58:00.000Z", 0.5),
},
},
},
@@ -339,7 +339,7 @@ func testComparisonNotInfNotNan(tc *at.TestCase, sut at.PrometheusWriteQuerier)
})
}
func testEmptyLabelMatch(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
func testEmptyLabelMatch(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier) {
t := tc.T()
// empty-label-match
@@ -349,8 +349,8 @@ func testEmptyLabelMatch(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
"empty_label_match 1 1707123456", // 2024-02-05T08:57:36.000Z
"empty_label_match;foo=bar 2 1707123456", // 2024-02-05T08:57:36.000Z
"empty_label_match;foo=baz 3 1707123456", // 2024-02-05T08:57:36.000Z
}, at.QueryOpts{})
tc.Assert(&at.AssertOptions{
}, apptest.QueryOpts{})
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected row inserted metrics check",
Got: func() any {
return (getRowsInsertedTotal(t, sut) - rowInserted) >= 3
@@ -359,30 +359,30 @@ func testEmptyLabelMatch(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
})
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query_range response",
Got: func() any {
return sut.PrometheusAPIV1QueryRange(t, `empty_label_match{foo=~'bar|'}`, at.QueryOpts{
return sut.PrometheusAPIV1QueryRange(t, `empty_label_match{foo=~'bar|'}`, apptest.QueryOpts{
Start: "2024-02-05T08:55:36.000Z",
End: "2024-02-05T08:57:36.000Z",
Step: "60s",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "empty_label_match"},
Samples: []*at.Sample{
at.NewSample(t, "2024-02-05T08:57:36.000Z", 1),
Samples: []*apptest.Sample{
apptest.NewSample(t, "2024-02-05T08:57:36.000Z", 1),
},
},
{
Metric: map[string]string{"__name__": "empty_label_match", "foo": "bar"},
Samples: []*at.Sample{
at.NewSample(t, "2024-02-05T08:57:36.000Z", 2),
Samples: []*apptest.Sample{
apptest.NewSample(t, "2024-02-05T08:57:36.000Z", 2),
},
},
},
@@ -391,7 +391,7 @@ func testEmptyLabelMatch(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
})
}
func testMaxLookbehind(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
func testMaxLookbehind(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier) {
t := tc.T()
// max_lookback_set
@@ -402,8 +402,8 @@ func testMaxLookbehind(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
"max_lookback_set 2 1707123396", // 2024-02-05T08:56:36.000Z
"max_lookback_set 3 1707123336", // 2024-02-05T08:55:36.000Z",
"max_lookback_set 4 1707123306", // 2024-02-05T08:55:06.000Z
}, at.QueryOpts{})
tc.Assert(&at.AssertOptions{
}, apptest.QueryOpts{})
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected row inserted metrics check",
Got: func() any {
return (getRowsInsertedTotal(t, sut) - rowInserted) >= 4
@@ -412,28 +412,28 @@ func testMaxLookbehind(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
})
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query_range response",
Got: func() any {
return sut.PrometheusAPIV1QueryRange(t, `max_lookback_set{foo=~'bar|'}`, at.QueryOpts{
return sut.PrometheusAPIV1QueryRange(t, `max_lookback_set{foo=~'bar|'}`, apptest.QueryOpts{
Start: "2024-02-05T08:55:06.000Z",
End: "2024-02-05T08:57:37.000Z",
Step: "10s",
MaxLookback: "1s",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "max_lookback_set"},
Samples: []*at.Sample{
at.NewSample(t, "2024-02-05T08:55:06.000Z", 4),
at.NewSample(t, "2024-02-05T08:55:36.000Z", 3),
at.NewSample(t, "2024-02-05T08:56:36.000Z", 2),
at.NewSample(t, "2024-02-05T08:57:06.000Z", 1),
Samples: []*apptest.Sample{
apptest.NewSample(t, "2024-02-05T08:55:06.000Z", 4),
apptest.NewSample(t, "2024-02-05T08:55:36.000Z", 3),
apptest.NewSample(t, "2024-02-05T08:56:36.000Z", 2),
apptest.NewSample(t, "2024-02-05T08:57:06.000Z", 1),
},
},
},
@@ -449,8 +449,8 @@ func testMaxLookbehind(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
"max_lookback_unset 2 1707123396", // 2024-02-05T08:56:36.000Z
"max_lookback_unset 3 1707123336", // 2024-02-05T08:55:36.000Z
"max_lookback_unset 4 1707123306", // 2024-02-05T08:55:06.000Z
}, at.QueryOpts{})
tc.Assert(&at.AssertOptions{
}, apptest.QueryOpts{})
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected row inserted metrics check",
Got: func() any {
return (getRowsInsertedTotal(t, sut) - rowInserted) >= 4
@@ -459,38 +459,38 @@ func testMaxLookbehind(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
})
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query_range response",
Got: func() any {
return sut.PrometheusAPIV1QueryRange(t, `max_lookback_unset{foo=~'bar|'}`, at.QueryOpts{
return sut.PrometheusAPIV1QueryRange(t, `max_lookback_unset{foo=~'bar|'}`, apptest.QueryOpts{
Start: "2024-02-05T08:55:06.000Z",
End: "2024-02-05T08:57:37.000Z",
Step: "10s",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "max_lookback_unset"},
Samples: []*at.Sample{
at.NewSample(t, "2024-02-05T08:55:06.000Z", 4),
at.NewSample(t, "2024-02-05T08:55:16.000Z", 4),
at.NewSample(t, "2024-02-05T08:55:26.000Z", 4),
at.NewSample(t, "2024-02-05T08:55:36.000Z", 3),
at.NewSample(t, "2024-02-05T08:55:46.000Z", 3),
at.NewSample(t, "2024-02-05T08:55:56.000Z", 3),
at.NewSample(t, "2024-02-05T08:56:06.000Z", 3),
at.NewSample(t, "2024-02-05T08:56:16.000Z", 3),
at.NewSample(t, "2024-02-05T08:56:36.000Z", 2),
at.NewSample(t, "2024-02-05T08:56:46.000Z", 2),
at.NewSample(t, "2024-02-05T08:56:56.000Z", 2),
at.NewSample(t, "2024-02-05T08:57:06.000Z", 1),
at.NewSample(t, "2024-02-05T08:57:16.000Z", 1),
at.NewSample(t, "2024-02-05T08:57:26.000Z", 1),
at.NewSample(t, "2024-02-05T08:57:36.000Z", 1),
Samples: []*apptest.Sample{
apptest.NewSample(t, "2024-02-05T08:55:06.000Z", 4),
apptest.NewSample(t, "2024-02-05T08:55:16.000Z", 4),
apptest.NewSample(t, "2024-02-05T08:55:26.000Z", 4),
apptest.NewSample(t, "2024-02-05T08:55:36.000Z", 3),
apptest.NewSample(t, "2024-02-05T08:55:46.000Z", 3),
apptest.NewSample(t, "2024-02-05T08:55:56.000Z", 3),
apptest.NewSample(t, "2024-02-05T08:56:06.000Z", 3),
apptest.NewSample(t, "2024-02-05T08:56:16.000Z", 3),
apptest.NewSample(t, "2024-02-05T08:56:36.000Z", 2),
apptest.NewSample(t, "2024-02-05T08:56:46.000Z", 2),
apptest.NewSample(t, "2024-02-05T08:56:56.000Z", 2),
apptest.NewSample(t, "2024-02-05T08:57:06.000Z", 1),
apptest.NewSample(t, "2024-02-05T08:57:16.000Z", 1),
apptest.NewSample(t, "2024-02-05T08:57:26.000Z", 1),
apptest.NewSample(t, "2024-02-05T08:57:36.000Z", 1),
},
},
},
@@ -499,7 +499,7 @@ func testMaxLookbehind(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
})
}
func testNonNanAsMissingData(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
func testNonNanAsMissingData(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier) {
t := tc.T()
// not-nan-as-missing-data
@@ -510,8 +510,8 @@ func testNonNanAsMissingData(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
"not_nan_as_missing_data;item=x 1 1707123455", // 2024-02-05T08:57:35.000Z
"not_nan_as_missing_data;item=y 4 1707123454", // 2024-02-05T08:57:34.000Z
"not_nan_as_missing_data;item=y 3 1707123455", // 2024-02-05T08:57:35.000Z
}, at.QueryOpts{})
tc.Assert(&at.AssertOptions{
}, apptest.QueryOpts{})
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected row inserted metrics check",
Got: func() any {
return (getRowsInsertedTotal(t, sut) - rowInserted) >= 4
@@ -520,32 +520,32 @@ func testNonNanAsMissingData(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
})
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query_range response",
Got: func() any {
return sut.PrometheusAPIV1QueryRange(t, `not_nan_as_missing_data>1`, at.QueryOpts{
return sut.PrometheusAPIV1QueryRange(t, `not_nan_as_missing_data>1`, apptest.QueryOpts{
Start: "2024-02-05T08:57:34.000Z",
End: "2024-02-05T08:57:36.000Z",
Step: "1s",
})
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "matrix",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"__name__": "not_nan_as_missing_data", "item": "x"},
Samples: []*at.Sample{
at.NewSample(t, "2024-02-05T08:57:34.000Z", 2),
Samples: []*apptest.Sample{
apptest.NewSample(t, "2024-02-05T08:57:34.000Z", 2),
},
},
{
Metric: map[string]string{"__name__": "not_nan_as_missing_data", "item": "y"},
Samples: []*at.Sample{
at.NewSample(t, "2024-02-05T08:57:34.000Z", 4),
at.NewSample(t, "2024-02-05T08:57:35.000Z", 3),
at.NewSample(t, "2024-02-05T08:57:36.000Z", 3),
Samples: []*apptest.Sample{
apptest.NewSample(t, "2024-02-05T08:57:34.000Z", 4),
apptest.NewSample(t, "2024-02-05T08:57:35.000Z", 3),
apptest.NewSample(t, "2024-02-05T08:57:36.000Z", 3),
},
},
},
@@ -554,7 +554,7 @@ func testNonNanAsMissingData(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
})
}
func testSubqueryAggregation(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
func testSubqueryAggregation(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier) {
t := tc.T()
// subquery-aggregation
@@ -565,8 +565,8 @@ func testSubqueryAggregation(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
"forms_daily_count;item=x 2 1707123336", // 2024-02-05T08:55:36.000Z
"forms_daily_count;item=y 3 1707123396", // 2024-02-05T08:56:36.000Z
"forms_daily_count;item=y 4 1707123336", // 2024-02-05T08:55:36.000Z
}, at.QueryOpts{})
tc.Assert(&at.AssertOptions{
}, apptest.QueryOpts{})
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected row inserted metrics check",
Got: func() any {
return (getRowsInsertedTotal(t, sut) - rowInserted) >= 4
@@ -575,28 +575,28 @@ func testSubqueryAggregation(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
})
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: "unexpected /api/v1/query response",
Got: func() any {
got := sut.PrometheusAPIV1Query(t, `min by (item) (min_over_time(forms_daily_count[10m:1m]))`, at.QueryOpts{
got := sut.PrometheusAPIV1Query(t, `min by (item) (min_over_time(forms_daily_count[10m:1m]))`, apptest.QueryOpts{
Time: "2024-02-05T08:56:35.000Z",
LatencyOffset: "1ms",
})
got.Sort()
return got
},
Want: &at.PrometheusAPIV1QueryResponse{
Want: &apptest.PrometheusAPIV1QueryResponse{
Status: "success",
Data: &at.QueryData{
Data: &apptest.QueryData{
ResultType: "vector",
Result: []*at.QueryResult{
Result: []*apptest.QueryResult{
{
Metric: map[string]string{"item": "x"},
Sample: at.NewSample(t, "2024-02-05T08:56:35.000Z", 2),
Sample: apptest.NewSample(t, "2024-02-05T08:56:35.000Z", 2),
},
{
Metric: map[string]string{"item": "y"},
Sample: at.NewSample(t, "2024-02-05T08:56:35.000Z", 4),
Sample: apptest.NewSample(t, "2024-02-05T08:56:35.000Z", 4),
},
},
},
@@ -604,14 +604,14 @@ func testSubqueryAggregation(tc *at.TestCase, sut at.PrometheusWriteQuerier) {
})
}
func getRowsInsertedTotal(t *testing.T, sut at.PrometheusWriteQuerier) int {
func getRowsInsertedTotal(t *testing.T, sut apptest.PrometheusWriteQuerier) int {
t.Helper()
selector := `vm_rows_inserted_total{type="graphite"}`
switch tt := sut.(type) {
case *at.Vmsingle:
case *apptest.Vmsingle:
return tt.GetIntMetric(t, selector)
case *at.Vmcluster:
case *apptest.Vmcluster:
return tt.Vminsert.GetIntMetric(t, selector)
default:
t.Fatalf("unexpected type: got %T, want *Vmsingle or *Vminsert", sut)

View File

@@ -7,13 +7,13 @@ import (
"sync"
"testing"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/fs"
)
// TestSingleVMAgentReloadConfigs verifies that vmagent reload new configurations on SIGHUP signal
func TestSingleVMAgentReloadConfigs(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
vmsingle := tc.MustStartDefaultVmsingle()
@@ -35,19 +35,19 @@ func TestSingleVMAgentReloadConfigs(t *testing.T) {
vmagent.APIV1ImportPrometheus(t, []string{
"foo_bar 1 1652169600000", // 2022-05-10T08:00:00Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
vmsingle.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: `unexpected metrics stored on vmagent remote write`,
Got: func() any {
return vmsingle.PrometheusAPIV1Series(t, `{__name__="foo_bar"}`, at.QueryOpts{
return vmsingle.PrometheusAPIV1Series(t, `{__name__="foo_bar"}`, apptest.QueryOpts{
Start: "2022-05-10T00:00:00Z",
End: "2022-05-10T23:59:59Z",
}).Sort()
},
Want: &at.PrometheusAPIV1SeriesResponse{
Want: &apptest.PrometheusAPIV1SeriesResponse{
Status: "success",
Data: []map[string]string{{"__name__": "foo_bar", "label1": "value1"}},
},
@@ -64,19 +64,19 @@ func TestSingleVMAgentReloadConfigs(t *testing.T) {
vmagent.APIV1ImportPrometheus(t, []string{
"bar_foo 1 1652169600001", // 2022-05-10T08:00:00Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
vmsingle.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: `unexpected metrics stored on vmagent remote write`,
Got: func() any {
return vmsingle.PrometheusAPIV1Series(t, `{__name__="bar_foo"}`, at.QueryOpts{
return vmsingle.PrometheusAPIV1Series(t, `{__name__="bar_foo"}`, apptest.QueryOpts{
Start: "2022-05-10T00:00:00Z",
End: "2022-05-10T23:59:59Z",
}).Sort()
},
Want: &at.PrometheusAPIV1SeriesResponse{
Want: &apptest.PrometheusAPIV1SeriesResponse{
Status: "success",
Data: []map[string]string{{"__name__": "bar_foo", "label1": "value2"}},
},
@@ -96,7 +96,7 @@ func TestSingleVMAgentSnappyRemoteWrite(t *testing.T) {
}
func testSingleVMAgentRemoteWrite(t *testing.T, forcePromProto bool) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
vmsingle := tc.MustStartDefaultVmsingle()
@@ -110,19 +110,19 @@ func testSingleVMAgentRemoteWrite(t *testing.T, forcePromProto bool) {
vmagent.APIV1ImportPrometheus(t, []string{
"foo_bar 1 1652169600000", // 2022-05-10T08:00:00Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
vmsingle.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: `unexpected metrics stored on vmagent remote write`,
Got: func() any {
return vmsingle.PrometheusAPIV1Series(t, `{__name__="foo_bar"}`, at.QueryOpts{
return vmsingle.PrometheusAPIV1Series(t, `{__name__="foo_bar"}`, apptest.QueryOpts{
Start: "2022-05-10T00:00:00Z",
End: "2022-05-10T23:59:59Z",
}).Sort()
},
Want: &at.PrometheusAPIV1SeriesResponse{
Want: &apptest.PrometheusAPIV1SeriesResponse{
Status: "success",
Data: []map[string]string{{"__name__": "foo_bar"}},
},
@@ -133,7 +133,7 @@ func testSingleVMAgentRemoteWrite(t *testing.T, forcePromProto bool) {
// - Starts with Prometheus remote write protocol using `snappy`.
// - Does not retry `snappy`-encoded requests if they fail; instead, they are dropped.
func TestSingleVMAgentUnsupportedMediaTypeDropIfSnappy(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
var remoteWriteContentEncodingsMux sync.Mutex
@@ -159,13 +159,13 @@ func TestSingleVMAgentUnsupportedMediaTypeDropIfSnappy(t *testing.T) {
vmagent.APIV1ImportPrometheusNoWaitFlush(t, []string{
"foo_bar 1 1652169600000", // 2022-05-10T08:00:00Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
vmagent.APIV1ImportPrometheusNoWaitFlush(t, []string{
"foo_bar 1 1652169600000", // 2022-05-10T08:00:00Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: `unexpected content encoding headers sent to remote write server; expected zstd`,
Got: func() any {
remoteWriteContentEncodingsMux.Lock()
@@ -192,7 +192,7 @@ func TestSingleVMAgentUnsupportedMediaTypeDropIfSnappy(t *testing.T) {
// - Re-packs and retries failed requests.
// - Sends all subsequent requests using `snappy`.
func TestSingleVMAgentDowngradeRemoteWriteProtocol(t *testing.T) {
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
var remoteWriteContentEncodings []string
@@ -223,14 +223,14 @@ func TestSingleVMAgentDowngradeRemoteWriteProtocol(t *testing.T) {
// Send request encoded with `zstd`; it fails, gets repacked as `snappy`, and retries successfully.
vmagent.APIV1ImportPrometheus(t, []string{
"foo_bar 1 1652169600000", // 2022-05-10T08:00:00Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
// Send request encoded with `snappy` immediately; it succeeds without retries.
vmagent.APIV1ImportPrometheus(t, []string{
"foo_bar 1 1652169600000", // 2022-05-10T08:00:00Z
}, at.QueryOpts{})
}, apptest.QueryOpts{})
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
Msg: `unexpected content encoding headers sent to remote write server`,
Got: func() any {
return remoteWriteContentEncodings

View File

@@ -8,14 +8,14 @@ import (
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/prometheus/prometheus/prompb"
at "github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/apptest"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/fs"
)
func TestSingleVmctlRemoteReadProtocol(t *testing.T) {
fs.MustRemoveDir(t.Name())
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
vmsingleDst := tc.MustStartDefaultVmsingle()
@@ -35,7 +35,7 @@ func TestSingleVmctlRemoteReadProtocol(t *testing.T) {
func TestSingleVmctlRemoteReadStreamProtocol(t *testing.T) {
fs.MustRemoveDir(t.Name())
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
vmsingleDst := tc.MustStartDefaultVmsingle()
@@ -56,7 +56,7 @@ func TestSingleVmctlRemoteReadStreamProtocol(t *testing.T) {
func TestClusterVmctlRemoteReadProtocol(t *testing.T) {
fs.MustRemoveDir(t.Name())
tc := at.NewTestCase(t)
tc := apptest.NewTestCase(t)
defer tc.Stop()
clusterDst := tc.MustStartDefaultCluster()
@@ -75,7 +75,7 @@ func TestClusterVmctlRemoteReadProtocol(t *testing.T) {
testRemoteReadProtocol(tc, clusterDst, newRemoteReadServer, vmctlFlags)
}
func testRemoteReadProtocol(tc *at.TestCase, sut at.PrometheusWriteQuerier, newRemoteReadServer func(t *testing.T) *RemoteReadServer, vmctlFlags []string) {
func testRemoteReadProtocol(tc *apptest.TestCase, sut apptest.PrometheusWriteQuerier, newRemoteReadServer func(t *testing.T) *RemoteReadServer, vmctlFlags []string) {
t := tc.T()
t.Helper()
@@ -84,14 +84,14 @@ func testRemoteReadProtocol(tc *at.TestCase, sut at.PrometheusWriteQuerier, newR
expectedResult := transformSeriesToQueryResult(rrs.storage.store)
cmpOpt := cmpopts.IgnoreFields(at.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType")
cmpOpt := cmpopts.IgnoreFields(apptest.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType")
// test for empty data request
got := sut.PrometheusAPIV1Query(t, `{__name__=~".*"}`, at.QueryOpts{
got := sut.PrometheusAPIV1Query(t, `{__name__=~".*"}`, apptest.QueryOpts{
Step: "5m",
Time: "2025-06-02T17:14:00Z",
})
want := at.NewPrometheusAPIV1QueryResponse(t, `{"data":{"result":[]}}`)
want := apptest.NewPrometheusAPIV1QueryResponse(t, `{"data":{"result":[]}}`)
if diff := cmp.Diff(want, got, cmpOpt); diff != "" {
t.Errorf("unexpected response (-want, +got):\n%s", diff)
}
@@ -101,12 +101,12 @@ func testRemoteReadProtocol(tc *at.TestCase, sut at.PrometheusWriteQuerier, newR
sut.ForceFlush(t)
tc.Assert(&at.AssertOptions{
tc.Assert(&apptest.AssertOptions{
// For cluster version, we need to wait longer for the metrics to be stored
Retries: 300,
Msg: `unexpected metrics stored on vmsingle via the prometheus protocol`,
Got: func() any {
expected := sut.PrometheusAPIV1Export(t, `{__name__=~".*"}`, at.QueryOpts{
expected := sut.PrometheusAPIV1Export(t, `{__name__=~".*"}`, apptest.QueryOpts{
Start: "2025-06-11T15:31:10Z",
End: "2025-06-11T15:32:20Z",
})
@@ -115,7 +115,7 @@ func testRemoteReadProtocol(tc *at.TestCase, sut at.PrometheusWriteQuerier, newR
},
Want: expectedResult,
CmpOpts: []cmp.Option{
cmpopts.IgnoreFields(at.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
cmpopts.IgnoreFields(apptest.PrometheusAPIV1QueryResponse{}, "Status", "Data.ResultType"),
},
})
}
@@ -140,18 +140,18 @@ func newRemoteReadStreamServer(t *testing.T) *RemoteReadServer {
return rrServer
}
func transformSeriesToQueryResult(series []*prompb.TimeSeries) []*at.QueryResult {
result := make([]*at.QueryResult, len(series))
func transformSeriesToQueryResult(series []*prompb.TimeSeries) []*apptest.QueryResult {
result := make([]*apptest.QueryResult, len(series))
for i, s := range series {
metric := make(map[string]string, len(s.Labels))
for _, label := range s.Labels {
metric[label.Name] = label.Value
}
samples := make([]*at.Sample, len(s.Samples))
samples := make([]*apptest.Sample, len(s.Samples))
for j, sample := range s.Samples {
samples[j] = &at.Sample{Timestamp: sample.Timestamp, Value: sample.Value}
samples[j] = &apptest.Sample{Timestamp: sample.Timestamp, Value: sample.Value}
}
result[i] = &at.QueryResult{Metric: metric, Samples: samples}
result[i] = &apptest.QueryResult{Metric: metric, Samples: samples}
}
return result
}

View File

@@ -10,7 +10,7 @@ import (
"github.com/golang/snappy"
pb "github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
)
// Vminsert holds the state of a vminsert app and provides vminsert-specific
@@ -198,11 +198,11 @@ func (app *Vminsert) OpenTSDBAPIPut(t *testing.T, records []string, opts QueryOp
// PrometheusAPIV1Write is a test helper function that inserts a
// collection of records in Prometheus remote-write format by sending a HTTP
// POST request to /prometheus/api/v1/write vminsert endpoint.
func (app *Vminsert) PrometheusAPIV1Write(t *testing.T, records []pb.TimeSeries, opts QueryOpts) {
func (app *Vminsert) PrometheusAPIV1Write(t *testing.T, records []prompbmarshal.TimeSeries, opts QueryOpts) {
t.Helper()
url := fmt.Sprintf("http://%s/insert/%s/prometheus/api/v1/write", app.httpListenAddr, opts.getTenant())
wr := pb.WriteRequest{Timeseries: records}
wr := prompbmarshal.WriteRequest{Timeseries: records}
data := snappy.Encode(nil, wr.MarshalProtobuf(nil))
app.sendBlocking(t, len(records), func() {
_, statusCode := app.cli.Post(t, url, "application/x-protobuf", data)

View File

@@ -12,7 +12,7 @@ import (
"github.com/golang/snappy"
pb "github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
)
// Vmsingle holds the state of a vmsingle app and provides vmsingle-specific
@@ -209,10 +209,10 @@ func (app *Vmsingle) OpenTSDBAPIPut(t *testing.T, records []string, opts QueryOp
// PrometheusAPIV1Write is a test helper function that inserts a
// collection of records in Prometheus remote-write format by sending a HTTP
// POST request to /prometheus/api/v1/write vmsingle endpoint.
func (app *Vmsingle) PrometheusAPIV1Write(t *testing.T, records []pb.TimeSeries, _ QueryOpts) {
func (app *Vmsingle) PrometheusAPIV1Write(t *testing.T, records []prompbmarshal.TimeSeries, _ QueryOpts) {
t.Helper()
wr := pb.WriteRequest{Timeseries: records}
wr := prompbmarshal.WriteRequest{Timeseries: records}
data := snappy.Encode(nil, wr.MarshalProtobuf(nil))
_, statusCode := app.cli.Post(t, app.prometheusAPIV1WriteURL, "application/x-protobuf", data)
if statusCode != http.StatusNoContent {