Closed as not planned
Description
What is the problem this feature will solve?
structuredClone
performance is quite slow compared to alternative object copying(while those also have their downsides in some capacity, the performance loss is worth polyfilling at the moment).
I also noticed in the source code there's already a TODO about performance and it would also have WebStreams ReadableStream
There was another discussion here which I thought was insightful #34355 (comment)
Consider code like the following:
var x = { "userId": "134374683951759360", "guild": "86908735326281728", "status": "online", "activities": [{ "name": "Twitch", "type": 1, "url": "https://www.twitch.tv/craggle25", "details": "Weekend dirt naps with @mart0k and Cat. Hunt Showdown", "state": "Hunt: Showdown", "applicationId": null, "timestamps": null, "party": null, "assets": { "largeText": null, "smallText": null, "largeImage": "twitch:craggle25", "smallImage": null }, "flags": 0, "emoji": null, "buttons": [], "createdTimestamp": 1697880668811 }], "clientStatus": { "desktop": "online" } };
function isArr(x) {
return Array.isArray(x);
}
function isObject(x) {
return typeof x === 'object';
}
function shallowClone(obj) {
let clone = {};
for (let key in obj) {
let r = obj[key];
let isArray = isArr(r);
if (!isArray && isObject(r)) {
continue;
}
if (isArray) {
clone[key] = [];
for (let i = 0; i < r.length; i++) {
clone[key][i] = shallowClone(r[i]);
}
}
else {
clone[key] = r;
}
}
return clone;
}
// pulled this from https://github.com/nodejs/node/issues/34355#issuecomment-658394617
function deepClone(o) {
if (typeof o !== "object") {
return o
}
if (!o) {
return o
}
// https://jsperf.com/deep-copy-vs-json-stringify-json-parse/25
if (Array.isArray(o)) {
const newO = []
for (let i = 0; i < o.length; i += 1) {
const val = !o[i] || typeof o[i] !== "object" ? o[i] : deepClone(o[i])
newO[i] = val === undefined ? null : val
}
return newO
}
const newO = {}
for (const i of Object.keys(o)) {
const val = !o[i] || typeof o[i] !== "object" ? o[i] : deepClone(o[i])
if (val === undefined) {
continue
}
newO[i] = val
}
return newO
}
let iterations = [100, 10000, 1000000, 10000000];
function profile(name, func) {
for (let iterationCount of iterations) {
let n = name + " at " + iterationCount + " iterations";
console.time(n);
for (let i = 0; i < iterationCount; i++) {
func();
}
console.timeEnd(n);
}
console.log("");
}
profile('StructuredClone', () => {
structuredClone(x);
});
profile('ParseStringify', () => {
JSON.parse(JSON.stringify(x))
});
profile('ShallowClone1', () => {
shallowClone(x);
});
profile('DeepClone', () => {
deepClone(x);
});
The output gives me:
StructuredClone at 100 iterations: 5.935ms
StructuredClone at 10000 iterations: 131.071ms
StructuredClone at 1000000 iterations: 9.968s
StructuredClone at 10000000 iterations: 1:45.852 (m:ss.mmm)
ParseStringify at 100 iterations: 1.362ms
ParseStringify at 10000 iterations: 87.677ms
ParseStringify at 1000000 iterations: 8.104s
ParseStringify at 10000000 iterations: 1:33.020 (m:ss.mmm)
ShallowClone1 at 100 iterations: 0.589ms
ShallowClone1 at 10000 iterations: 18.628ms
ShallowClone1 at 1000000 iterations: 739.982ms
ShallowClone1 at 10000000 iterations: 6.942s
DeepClone at 100 iterations: 1.079ms
DeepClone at 10000 iterations: 46.382ms
DeepClone at 1000000 iterations: 2.914s
DeepClone at 10000000 iterations: 26.039s
What is the feature you are proposing to solve the problem?
Increase throughput on structuredClone
calls
What alternatives have you considered?
Polyfilling my own object clone methods