如何迭代mongodb中的100多个项目

时间:2021-06-27 14:30:44

I am using express and mongodb npm module to insert data into a collection with more than 1300 collection items. I am extracting data from a json file that also has over 1300 objects. With the following code, everything is get correctly inserted until I reach 100 items in the mongodb collection. Is there a way around this without breaking things up into multiple collections?

我使用express和mongodb npm模块将数据插入到包含超过1300个集合项的集合中。我从json文件中提取数据,该文件也有1300多个对象。使用以下代码,一切都被正确插入,直到我在mongodb集合中达到100项。有没有办法绕过这个而不会破坏多个集合?

I'm using the following code in node:

我在节点中使用以下代码:

 MongoClient.connect(url, function(err, db)  {
  db.collection('players').find().forEach(function(myDoc) {
    for(var i = 0; i < jsonfile.length; i++)  {
      if (myDoc.player_ID ==  jsonfile[i].playerID && myDoc.stint_ID == 1)  {
        db.collection('players').updateOne(
          { 'player_ID' : jsonfile[i].playerID},
          { $set: {
            'strikeOut' : jsonfile[i].SO }
          }, function(err, result)  {
            console.log(err);
            db.close();
          }
        );
      } else {
        return;
      }
    }
  });
});

1 个解决方案

#1


1  

Best to use the bulkWrite API here which greatly improves performance since write operations are sent to the server only once in bulk. Efficiency is achieved since the method does not send every write request to the server (as with the current update statement within the forEach() loop) but just once in every 1000 requests, thus making updates more efficient and quicker than currently is:

最好在这里使用bulkWrite API,这样可以大大提高性能,因为写操作只能批量发送到服务器一次。由于该方法不向服务器发送每个写入请求(与forEach()循环中的当前更新语句一样),但每1000个请求只发送一次,因此实现了效率,从而使更新比当前更高效,更快:

var MongoClient = require('mongodb').MongoClient,
    bulkUpdateOps = [];

MongoClient.connect(url, function(err, db) {
    // Get the collection
    var col = db.collection('players');
    col.find().forEach(function(myDoc) { 
        for(var i = 0; i < jsonfile.length; i++)  {
            if (myDoc.player_ID ==  jsonfile[i].playerID && myDoc.stint_ID == 1)  {
                bulkUpdateOps.push({ 
                    "updateOne": {
                        "filter": { "player_ID": jsonfile[i].playerID },
                        "update": { "$set": { "strikeOut" : jsonfile[i].SO  } }
                     }
                });    
                if (bulkUpdateOps.length === 1000) {
                    col.bulkWrite(bulkUpdateOps).then(function(r) {
                        // do something with the result
                        console.log(r);
                    });
                    bulkUpdateOps = [];
                }
            }
        }
    });         

    if (bulkUpdateOps.length > 0) {
        col.bulkWrite(bulkUpdateOps).then(function(r) { 
            console.log(r); 
            db.close();
        });
    }   
}

#1


1  

Best to use the bulkWrite API here which greatly improves performance since write operations are sent to the server only once in bulk. Efficiency is achieved since the method does not send every write request to the server (as with the current update statement within the forEach() loop) but just once in every 1000 requests, thus making updates more efficient and quicker than currently is:

最好在这里使用bulkWrite API,这样可以大大提高性能,因为写操作只能批量发送到服务器一次。由于该方法不向服务器发送每个写入请求(与forEach()循环中的当前更新语句一样),但每1000个请求只发送一次,因此实现了效率,从而使更新比当前更高效,更快:

var MongoClient = require('mongodb').MongoClient,
    bulkUpdateOps = [];

MongoClient.connect(url, function(err, db) {
    // Get the collection
    var col = db.collection('players');
    col.find().forEach(function(myDoc) { 
        for(var i = 0; i < jsonfile.length; i++)  {
            if (myDoc.player_ID ==  jsonfile[i].playerID && myDoc.stint_ID == 1)  {
                bulkUpdateOps.push({ 
                    "updateOne": {
                        "filter": { "player_ID": jsonfile[i].playerID },
                        "update": { "$set": { "strikeOut" : jsonfile[i].SO  } }
                     }
                });    
                if (bulkUpdateOps.length === 1000) {
                    col.bulkWrite(bulkUpdateOps).then(function(r) {
                        // do something with the result
                        console.log(r);
                    });
                    bulkUpdateOps = [];
                }
            }
        }
    });         

    if (bulkUpdateOps.length > 0) {
        col.bulkWrite(bulkUpdateOps).then(function(r) { 
            console.log(r); 
            db.close();
        });
    }   
}