我正在尝试解析并将一个大的csv文件插入到MongoDB中,但是当文件扩展到100'000行时,我从服务器得到了错误的响应.我需要插入的文件通常超过200'000行.
我已经尝试了批量插入(insertMany)和Babyparse(Papaparse)流方法来逐行插入文件.但结果不佳.
节点api:
router.post('/csv-upload/:id', multipartMiddleware, function(req, res) {
// Post vartiables
var fileId = req.params.id;
var csv = req.files.files.path;
// create a queue object with concurrency 5
var q = async.queue(function(row, callback) {
var entry = new Entry(row);
entry.save();
callback();
}, 5);
baby.parseFiles(csv, {
header: true, // Includes header in JSON
skipEmptyLines: true,
fastMode: true,
step: function(results, parser) {
results.data[0].id = fileId;
q.push(results.data[0], function (err) {
if (err) {throw err};
});
},
complete: function(results, file) {
console.log("Parsing complete:", results, file); …Run Code Online (Sandbox Code Playgroud)