var arr = ['test0','test2','test0'];
Like the above,there are two identical entries with value "test0",how to check it most efficiently?
var arr = ['test0','test2','test0'];
Like the above,there are two identical entries with value "test0",how to check it most efficiently?
If you sort the array, the duplicates are next to each other so that they are easy to find:
arr.sort();
var last = arr[0];
for (var i=1; i<arr.length; i++) {
if (arr[i] == last) alert('Duplicate : '+last);
last = arr[i];
}
===). Sorting an array of objects to bring duplicates to the start requires knowledge of the types of objects being compared in order to decide how to compare them.This will do the job on any array and is probably about as optimized as possible for handling the general case (finding a duplicate in any possible array). For more specific cases (e.g. arrays containing only strings) you could do better than this.
function hasDuplicate(arr) {
var i = arr.length, j, val;
while (i--) {
val = arr[i];
j = i;
while (j--) {
if (arr[j] === val) {
return true;
}
}
}
return false;
}
There are lots of answers here but not all of them "feel" nice... So I'll throw my hat in.
If you are using lodash:
function containsDuplicates(array) {
return _.uniq(array).length !== array.length;
}
If you can use ES6 Sets, it simply becomes:
function containsDuplicates(array) {
return array.length !== new Set(array).size
}
With vanilla javascript:
function containsDuplicates(array) {
return array
.sort()
.some(function (item, i, items) {
return item === items[i + 1]
})
}
However, sometimes you may want to check if the items are duplicated on a certain field.
This is how I'd handle that:
containsDuplicates([{country: 'AU'}, {country: 'UK'}, {country: 'AU'}], 'country')
function containsDuplicates(array, attribute) {
return array
.map(function (item) { return item[attribute] })
.sort()
.some(function (item, i, items) {
return item === items[i + 1]
})
}
Loop stops when found first duplicate:
function has_duplicates(arr) {
var x = {}, len = arr.length;
for (var i = 0; i < len; i++) {
if (x[arr[i]]) {
return true;
}
x[arr[i]] = true;
}
return false;
}
Edit (fix 'toString' issue):
function has_duplicates(arr) {
var x = {}, len = arr.length;
for (var i = 0; i < len; i++) {
if (x[arr[i]] === true) {
return true;
}
x[arr[i]] = true;
}
return false;
}
this will correct for case has_duplicates(['toString']); etc..
has_duplicates(['toString']) is true.[ {a: 1}, {b: 2} ] since both members of the array will become "[object Object]" when being stored as keys in x.Sorting is O(n log n) and not O(n). Building a hash map is O(n). It costs more memory than an in-place sort but you asked for the "fastest." (I'm positive this can be optimized but it is optimal up to a constant factor.)
function hasDuplicate(arr) {
var hash = {};
var hasDuplicate = false;
arr.forEach(function(val) {
if (hash[val]) {
hasDuplicate = true;
return;
}
hash[val] = true;
});
return hasDuplicate;
}
var index = myArray.indexOf(strElement);
if (index < 0) {
myArray.push(strElement);
console.log("Added Into Array" + strElement);
} else {
console.log("Already Exists at " + index);
}
You can convert the array to to a Set instance, then convert to an array and check if the length is same before and after the conversion.
const hasDuplicates = (array) => {
const arr = ['test0','test2','test0'];
const uniqueItems = new Set(array);
return array.length !== uniqueItems.size();
};
console.log(`Has duplicates : ${hasDuplicates(['test0','test2','test0'])}`);
console.log(`Has duplicates : ${hasDuplicates(['test0','test2','test3'])}`);
It depends on the input array size. I've done some performance tests with Node.js performance hooks and found out that for really small arrays (1,000 to 10,000 entries) Set solution might be faster. But if your array is bigger (like 100,000 elements) plain Object (i. e. hash) solution becomes faster. Here's the code so you can try it out for yourself:
const { performance } = require('perf_hooks');
function objectSolution(nums) {
let testObj = {};
for (var i = 0; i < nums.length; i++) {
let aNum = nums[i];
if (testObj[aNum]) {
return true;
} else {
testObj[aNum] = true;
}
}
return false;
}
function setSolution(nums) {
let testSet = new Set(nums);
return testSet.size !== nums.length;
}
function sortSomeSolution(nums) {
return nums
.sort()
.some(function (item, i, items) {
return item === items[i + 1]
})
}
function runTest(testFunction, testArray) {
console.log(' Running test:', testFunction.name);
let start = performance.now();
let result = testFunction(testArray);
let end = performance.now();
console.log(' Duration:', end - start, 'ms');
}
let arr = [];
let setSize = 100000;
for (var i = 0; i < setSize; i++) {
arr.push(i);
}
console.log('Set size:', setSize);
runTest(objectSolution, arr);
runTest(setSolution, arr);
runTest(sortSomeSolution, arr);
On my Lenovo IdeaPad with i3-8130U Node.js v. 16.6.2 gives me following results for the array of 1,000:
results for the array of 100,000:
Assuming all you want is to detect how many duplicates of 'test0' are in the array. I guess an easy way to do that is to use the join method to transform the array in a string, and then use the match method.
var arr= ['test0','test2','test0'];
var str = arr.join();
console.log(str) //"test0,test2,test0"
var duplicates = str.match(/test0/g);
var duplicateNumber = duplicates.length;
console.log(duplicateNumber); //2