javascript - node.js large number of requests throwing error -
i got troubles image downloader script, have array images names (size around 5000 elements), , cycle in array, , every iteration download image requests module.
all works fine have arrays size not bigger 500+- elements.
if run script 5000+ elements, see many errors spamed request module(err or underfined responce object) , , finnaly application fails empty file error. think there async troubles cause node.js didnt handles many operation @ time.
maybe can solve splitting large 5000 size array 300 items, , dont iterate over(and dont call fetchimage()) on next chunck before previous chunk. or maybe there exists more nicer way solving problem.?
products.map(function (product) { fetchimage(product.imageurl,'./static/' + product.filename + '_big.', 0, 0); return; }); function fetchimage(url, localpath, index, iteration) { var extensions = ['jpg', 'png', 'jpeg', 'bmp' , '']; if (iteration > 2 || index === extensions.length) { // try extensions iteration++; if(iteration < 3) { settimeout(function(){ fetchimage(url, localpath, 0, iteration); }, 3000); }else{ console.log('fetching ' + url + ' failed or no image exists '); return; } return; } var fileextension; if(extensions[index] === '' ) { fileextension = extensions[0]; }else{ fileextension = extensions[index]; } request.get(url + extensions[index], function(err,response,body) { if(err || undefined === response){ // if err try after 3 sec timeout settimeout(function(){ console.log('error url : ' + url + extensions[index]); fetchimage(url, localpath, index, iteration); }, 3000); return; }else{ if(response.statuscode === 200) { request(url + extensions[index]) .on('error', function(err) { console.log("errrrrror " + url + extensions[index] + " " + err); settimeout(function(){ console.log('error url : ' + url + extensions[index]); fetchimage(url, localpath, index, iteration); }, 3000); return; }) .pipe(fs.createwritestream(localpath + fileextension ));// write image file console.log('successfully downloaded file ' + localpath + fileextension); return; }else { fetchimage(url, localpath, index + 1, iteration); } } }); };
fixed using settimeout beetween each request
settimeout( function () { fetchimage(imageurl,'./static/' + filename + '_big.', 0, 0); }, 300 * (i + 1) // each progressively wait 300 msec more each );
Comments
Post a Comment