For my use case I've found that the shift/slice methods are stressing my CPU way too much, as the array grows in size. In theory the array could be as big as 86400 items, although usually it would much lower - around 10000 array elements.
I've tried to illustrate it with a simple example. Imagine this at a very large scale. It'll run decently up until a point, but generally it seems highly ineffective to remove the first (or first n) item(s) like this.
Hopefully somebody with more knowledge in "why that is", can fill out one or more of the 3 functions in the snippet below:
- add()
- removeFirst()
- removeFirstN(n)
Immutability won't work here - or rather, since we're after the optimal performance, copying a growing and quite large datastructure (array in this case) definitely won't work.
Any good suggestions? :-)
let objArray = []
let maxCount = 10;
let i = 0;
function add(){
objArray.push({x: + new Date(), y: Math.floor(Math.random() * 10000) + 1});
console.log("add")
}
function removeFirst(){
objArray.shift();
console.log("removeFirst")
}
function removeFirstN(n){
objArray.splice(0,n)
console.log(`removeFirstN(${n})`)
}
// Every second and obj is added to the array
setInterval(function(){
if(objArray.length === maxCount){
removeFirst();
} else if(objArray.length > maxCount) { // this is possible since we're allowed to change maxCount
const diff = objArray.length+1 - maxCount;
removeFirstN(diff);
}
// Always add
add();
i++;
if(i === 15) {
maxCount--;
i = 0;
}
console.log(`length: ${[...objArray].length}`)
console.log([...objArray])
}, 1000)