You could use reduce
in combination with a Map
for keying your data:
const data = [ { "id": 1, "item": { "id": 1, "name": "itemA" } }, { "id": 2, "item": { "id": 1, "name": "itemA" } }, { "id": 3, "item": { "id": 2, "name": "itemB" } } ];
const [map, dupes] = data.reduce(([map, dupes], obj) =>
map.has(obj.item.id) ? [map, dupes.concat(obj)] : [map.set(obj.item.id, obj), dupes]
, [new Map, []]);
const uniques = [...map.values()];
console.log(uniques);
console.log('dupes:');
console.log(dupes);
After reading your comments, it seems you want to reject anything that has a duplicate, so not even keeping one original. So then the code could be:
const data = [ { "id": 1, "item": { "id": 1, "name": "itemA" } }, { "id": 2, "item": { "id": 1, "name": "itemA" } }, { "id": 3, "item": { "id": 2, "name": "itemB" } } ];
const map = new Map(data.map(obj => [obj.item.id, []]));
data.forEach(obj => map.get(obj.item.id).push(obj));
const uniques = [].concat(...[...map.values()].filter(arr => arr.length === 1));
const dupes = [].concat(...[...map.values()].filter(arr => arr.length > 1));
console.log(uniques);
console.log('dupes:');
console.log(dupes);