Given 2 dimensional array a:
给定二维数组a:
let a = [
[0, 0, 1, 0],
[0, 1, 1, 1],
[0, 0, 1, 0],
[0, 0, 1, 1]
]
How can I scale it by a given factor? For example, array b is array a scaled by 4:
如何用给定的因子进行缩放?例如,数组b是数组a的倍数为4:
let b =[
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1]
]
This is the code I wrote to perform this operation but it is slow (client browser: Chrome) when dealing with large arrays (200 x 200) and scaling lets say by a facor of 16.
这是我编写的用于执行此操作的代码,但是在处理大型数组(200 x 200)和按facor(16)进行缩放时速度很慢。
// scale an array by a factor of 'scale'
const scaledMatrixArray = (arr, scale) => {
let newArr = [];
arr.forEach((el) => {
let newArrRow = [];
el.forEach((el) => {
for (let j = 0; j < scale; j++) {
newArrRow.push(el);
}
});
for(let i = 0; i < scale ; i++) {
newArr.push(newArrRow);
}
});
return newArr;
};
I understand my implementation is some variant of O(n^2) and is highly inefficient. I am looking for a better way to do this or a library that does it better and faster. My end result is that my N X N array with over N > 200 can scale to an array of 800 x 800 in the most efficient, fastest and least memory intensive way.
我理解实现的一些变体O(n ^ 2)和高度效率低下。我正在寻找一种更好的方法来做这件事,或者寻找一个做得越来越好、越来越快的库。我的最终结果是我的N X N数组在超过N > 200的情况下,可以以最高效、最快和最少的内存密集型方式扩展到一个800 X 800的数组。
4 个解决方案
#1
4
Here's a very reduced way, using Array().fill
, It's running faster than the other answers at least in my browser.
这里有一个非常简化的方法,使用Array()。填充,它比其他答案运行得快至少在我的浏览器中是这样。
I added two versions, one using spread operator, and the other ussing .apply
. I'm getting faster results with apply
.
我添加了两个版本,一个使用扩展运算符,另一个使用。apply。通过应用,我得到了更快的结果。
function scaleSpread(array, factor) {
const scaled = [];
for(const row of array) {
let x = [];
for(const item of row)
x.push(...Array(factor).fill(item));
scaled.push(...Array(factor).fill(x));
}
return scaled;
}
function scaleApply(array, factor) {
const scaled = [];
for(const row of array) {
let x = [];
for(const item of row)
x.push.apply(x, Array(factor).fill(item));
scaled.push.apply(scaled, Array(factor).fill(x));
}
return scaled;
}
function scaleConcat(array, factor) {
let scaled = [];
for(const row of array) {
let x = [];
for(const item of row)
x = x.concat(Array(factor).fill(item));
scaled = scaled.concat(Array(factor).fill(x));
}
return scaled;
}
var a = [ [0, 0, 1, 0], [0, 1, 1, 1], [0, 0, 1, 0], [0, 0, 1, 1] ]
console.time('spread');
scaleSpread(a, 10000);
console.timeEnd('spread');
console.time('apply');
scaleApply(a, 10000);
console.timeEnd('apply');
console.time('concat');
scaleConcat(a, 10000);
console.timeEnd('concat');
EDIT: Added a version using .concat
since apply and spread causes Maximum call stack size exceeded
with very large arrays.
编辑:添加了一个使用.concat的版本,因为应用和扩展导致最大的调用堆栈大小超过了非常大的数组。
#2
3
This approach is using a for loop
, to iterate an n-dimensional array for the decided n
times.
这种方法是使用for循环,对一个n维数组迭代n次。
This uses Array.splice method, by grabbing the source value and inserting it to the array at certain index.
它使用数组。splice方法,通过获取源值并将其插入到某个索引处的数组中。
PS: The source array (which is a
), is mutated here. But, you can always clone the original array and create b
for the result as you wanted.
源数组(即a)在这里发生了突变。但是,您总是可以克隆原始数组并为结果创建b。
var a = [
[0, 0, 1, 0],
[0, 1, 1, 1],
[0, 0, 1, 0],
[0, 0, 1, 1]
],
scale = 4,
scaleTheArray = function (arrayToScale, nTimes) {
for (var idx = 0, i = 0, len = arrayToScale.length * nTimes; i < len; i++) {
var elem = arrayToScale[idx];
/* Insert the element into (idx + 1) */
arrayToScale.splice(idx + 1, 0, elem);
/* Add idx for the next elements */
if ((i + 1) % nTimes === 0) {
idx += nTimes + 1;
}
}
};
console.time('testScale');
/* 1. Expand each of the a[n] length */
for (var i = 0, len = a.length; i < len; i++) {
var arr = a[i];
scaleTheArray(arr, scale - 1);
}
/* 2. Expand each of the a length */
scaleTheArray(a, scale - 1);
console.timeEnd('testScale');
#3
3
In general, less function calls = less overhead :
一般来说,函数调用越少=开销越少:
function scale1D(arr, n)
{
for (var i = arr.length *= n; i; )
arr[--i] = arr[i / n | 0]
}
function scale2D(arr, n)
{
for (var i = arr.length; i; )
scale1D(arr[--i], n)
scale1D(arr, n)
}
var a = [ [0, 0, 1, 0], [0, 1, 1, 1], [0, 0, 1, 0], [0, 0, 1, 1] ]
console.time( 1e6 )
scale2D(a, 1e6)
console.timeEnd( 1e6 )
var b = [ [0, 0, 1, 0], [0, 1, 1, 1], [0, 0, 1, 0], [0, 0, 1, 1] ]
scale2D(b, 4)
console.log( JSON.stringify( b ).replace(/],/g, '],\n ') )
#4
2
A bit of fun, you can do it lazily if you're not accessing many values. Haven't tested this code much but should work
有趣的是,如果你不访问很多值,你可以延迟访问。还没有对这段代码进行过多的测试,但是应该可以工作
var a = [
[0, 0, 1, 0],
[0, 1, 1, 1],
[0, 0, 1, 0],
[0, 0, 1, 42]
],
scale = 4;
for (var idx = 0; idx < a.length; idx++) {
a[idx] = new Proxy(a[idx], {
get: function(target, i) {
return target[Math.floor(i/scale)];
}
});
}
a = new Proxy(a, {
get: function(target, i) {
return target[Math.floor(i/scale)];
}
});
console.log(a[16-1][16-1])
for (var ii = 0; ii < 16;ii++) {
for(var j=0;j<16;j++){
console.log(a[ii][j])
}
}
#1
4
Here's a very reduced way, using Array().fill
, It's running faster than the other answers at least in my browser.
这里有一个非常简化的方法,使用Array()。填充,它比其他答案运行得快至少在我的浏览器中是这样。
I added two versions, one using spread operator, and the other ussing .apply
. I'm getting faster results with apply
.
我添加了两个版本,一个使用扩展运算符,另一个使用。apply。通过应用,我得到了更快的结果。
function scaleSpread(array, factor) {
const scaled = [];
for(const row of array) {
let x = [];
for(const item of row)
x.push(...Array(factor).fill(item));
scaled.push(...Array(factor).fill(x));
}
return scaled;
}
function scaleApply(array, factor) {
const scaled = [];
for(const row of array) {
let x = [];
for(const item of row)
x.push.apply(x, Array(factor).fill(item));
scaled.push.apply(scaled, Array(factor).fill(x));
}
return scaled;
}
function scaleConcat(array, factor) {
let scaled = [];
for(const row of array) {
let x = [];
for(const item of row)
x = x.concat(Array(factor).fill(item));
scaled = scaled.concat(Array(factor).fill(x));
}
return scaled;
}
var a = [ [0, 0, 1, 0], [0, 1, 1, 1], [0, 0, 1, 0], [0, 0, 1, 1] ]
console.time('spread');
scaleSpread(a, 10000);
console.timeEnd('spread');
console.time('apply');
scaleApply(a, 10000);
console.timeEnd('apply');
console.time('concat');
scaleConcat(a, 10000);
console.timeEnd('concat');
EDIT: Added a version using .concat
since apply and spread causes Maximum call stack size exceeded
with very large arrays.
编辑:添加了一个使用.concat的版本,因为应用和扩展导致最大的调用堆栈大小超过了非常大的数组。
#2
3
This approach is using a for loop
, to iterate an n-dimensional array for the decided n
times.
这种方法是使用for循环,对一个n维数组迭代n次。
This uses Array.splice method, by grabbing the source value and inserting it to the array at certain index.
它使用数组。splice方法,通过获取源值并将其插入到某个索引处的数组中。
PS: The source array (which is a
), is mutated here. But, you can always clone the original array and create b
for the result as you wanted.
源数组(即a)在这里发生了突变。但是,您总是可以克隆原始数组并为结果创建b。
var a = [
[0, 0, 1, 0],
[0, 1, 1, 1],
[0, 0, 1, 0],
[0, 0, 1, 1]
],
scale = 4,
scaleTheArray = function (arrayToScale, nTimes) {
for (var idx = 0, i = 0, len = arrayToScale.length * nTimes; i < len; i++) {
var elem = arrayToScale[idx];
/* Insert the element into (idx + 1) */
arrayToScale.splice(idx + 1, 0, elem);
/* Add idx for the next elements */
if ((i + 1) % nTimes === 0) {
idx += nTimes + 1;
}
}
};
console.time('testScale');
/* 1. Expand each of the a[n] length */
for (var i = 0, len = a.length; i < len; i++) {
var arr = a[i];
scaleTheArray(arr, scale - 1);
}
/* 2. Expand each of the a length */
scaleTheArray(a, scale - 1);
console.timeEnd('testScale');
#3
3
In general, less function calls = less overhead :
一般来说,函数调用越少=开销越少:
function scale1D(arr, n)
{
for (var i = arr.length *= n; i; )
arr[--i] = arr[i / n | 0]
}
function scale2D(arr, n)
{
for (var i = arr.length; i; )
scale1D(arr[--i], n)
scale1D(arr, n)
}
var a = [ [0, 0, 1, 0], [0, 1, 1, 1], [0, 0, 1, 0], [0, 0, 1, 1] ]
console.time( 1e6 )
scale2D(a, 1e6)
console.timeEnd( 1e6 )
var b = [ [0, 0, 1, 0], [0, 1, 1, 1], [0, 0, 1, 0], [0, 0, 1, 1] ]
scale2D(b, 4)
console.log( JSON.stringify( b ).replace(/],/g, '],\n ') )
#4
2
A bit of fun, you can do it lazily if you're not accessing many values. Haven't tested this code much but should work
有趣的是,如果你不访问很多值,你可以延迟访问。还没有对这段代码进行过多的测试,但是应该可以工作
var a = [
[0, 0, 1, 0],
[0, 1, 1, 1],
[0, 0, 1, 0],
[0, 0, 1, 42]
],
scale = 4;
for (var idx = 0; idx < a.length; idx++) {
a[idx] = new Proxy(a[idx], {
get: function(target, i) {
return target[Math.floor(i/scale)];
}
});
}
a = new Proxy(a, {
get: function(target, i) {
return target[Math.floor(i/scale)];
}
});
console.log(a[16-1][16-1])
for (var ii = 0; ii < 16;ii++) {
for(var j=0;j<16;j++){
console.log(a[ii][j])
}
}