I have an application that has a real-time chart. Every 10-15 seconds, new data is available and the AWS S3 url sent down to the browser over a web socket.
The callback gets the S3 url of the CSV file and papaparse is used to download and process the data. Checking Chrome dev tools, the javascript object count continues to increase and I'm assuming this has something to do with the browser crashing. Here's the callback:
Note: initialData is an array with the default series properties including an empty "data" property
var shotCallback = function (data) {
if (hchart.series.length) {
for (var i = 0; i <= hchart.series.length - 1; i++) {
hchart.series[i].setData([]);
}
hchart.redraw();
} else {
initialData.forEach(function (item, i) {
hchart.addSeries(angular.extend({}, initialData[i]), false);
});
}
Papa.parse(data.url, {
download: true,
fastMode: true,
worker: true,
dynamicTyping: true,
skipEmptyLines: true,
step: function (results) {
if ( ! firstRow) { // first row has column names
results.data[0].forEach(function (item, i) {
if (i > 0 && i <= initialData.length) {
hchart.series[i - 1].addPoint(item, false);
}
})
}
firstRow = false;
},
complete: function () {
hchart.redraw();
}
});
};
This is loading several thousand data points but the performance is quite good.
My question is: when I clear the previous data set, am I doing it correctly by using .setData([])
or should I be doing it differently?
The browser continues to crash and I'm not sure what to do next.
Any help is appreciated.