I have a scenario in K6 where I get 2 values at the same timestamp from datadog. The first value is an incorrect spike. The second one is the correct value which I need.
I use K6 Trend to collect the values from Datadog. It collects it as 2 different values due to which my max value comes incorrectly as one of those spikes. How to collect it such that when the timestamp is same the latest value gets extracted?
Current implementation:
index.js:
//thresholds and scenarios get loaded in K6
export const getPaymentIdRPS = new Trend("get_payment_id_rps");
export function setup() {
//setup data
}
//this is the scenario with the issue
export function collectGetPaymentIdMetrics(data){
metrics.getPaymentIdRPS= payments.queryGetPaymentIdRPS();
metrics.getPaymentIdRPS.points.forEach(val => {
getPaymentIdRPS.add(val[1], tags);
});
}
In different library, payments.js:
function queryGetPaymentIdRPS() {
const queries = setQueryEnv();
const response = datadog.metricsQuery(queries.getPaymentIdRPS, 10); //(query, duration of extraction)
return parseMetricQuery(response); //these functions defined towards end of this Q.
}
The above returns:
INFO[0032] rps val= [1666798390000,54.5] source=console
INFO[0037] rps val= [1666798390000,15.571428571428573] source=console
INFO[0042] rps val= [1666798400000,19] source=console
INFO[0047] rps val= [1666798400000,5.428571428571429]
Hence max = 54.5 but I want max as 15.57 by storing [1666798390000,15.571428571428573] and [1666798400000,5.428571428571429] only.
metricsQuery:
metricsQuery(query, seconds) {
//Delay the query start/end timestamps to allow datadog to process metrics;
const DATADOG_INGESTION_DELAY = 30;
let start = Math.ceil(Date.now() / 1000) - seconds;
let end = Math.ceil(Date.now() / 1000);
start = start - DATADOG_INGESTION_DELAY;
end = end - DATADOG_INGESTION_DELAY;
//null body in 2nd param
let res = this.queryClient.get(
`/query?from=${start}&to=${end}&query=${query}`,
null,
{ tags: { name: `Datadog /metric` } }
);
check(res, {
"DD metric query OK": (r) => r.status === 200,
}) || fail(JSON.stringify(res));
check(res, {
"DD metric query valid": (r) => r.json().status != "error",
}) || fail(JSON.stringify(res.json().error));
return res;
}
};
parseMetricQuery:
export function parseMetricQuery(response) {
const responseBody = response.json();
//Check invalid response
if (responseBody.series === undefined || response.status != 200) {
fail("Failed to get Datadog metric");
}
//Check 0 response, map will fail if array is empty
if (responseBody.series.length === 0) {
return {
sum: 0,
length: 0,
average: 0,
points: [],
url: response.request.url,
};
} else {
const length = responseBody.series[0].length;
var series = responseBody.series[0].pointlist;
var sum = 0;
sum = series
.map((val) => {
return val[1];
})
.reduce((previousValue, currentValue) => previousValue + currentValue, 0);
return {
sum: sum,
length: length,
average: sum / length,
points: series,
url: response.request.url,
};
}
}
Since each iteration of K6 is like a fresh start, we can't have any global map whose values remain intact to compare timestamps already recorded. Hence I can't compare whether timestamp is already recorded and hence replace value of that timestamp rather than append to Trend list.