Question

I tried to copy several file with node js.

Here is my an example of what i'm trying to do :

var request = require('request');
 va photos [{ 'url': 'http://xxxx.com/im1', 'name' : 'name1' }, { 'url': 'http://xxxx.com/im12', 'name' : 'name2'    }, 
 for (var i = 0; i < photos.length; i++) {
                        request(photos[i].source).pipe(fs.createWriteStream(photos[i].name));
  }

After maybe 1000 call i have a socket hang out error.

Following @Timothy Strimple advice i decided to use async module.

My code is now something like this :

async.whilst(function () { return !stop; },
                function (callback) {
                    console.log("get next 20 image");
                    JM.api('/' + album.id + '/photos', { after: next }, function (resf) {
                        if (!resf || resf.error) {
                            console.log(!resf ? 'error occurred' : resf.error);
                        }
                        console.log("albums" + album.id + " " + resf.data.length + " dir" + dir);

                        async.eachSeries(resf.data, function (photo, done) {

                            request(photo.source).pipe(fs.createWriteStream(dir + "/" +photo.name));
                            console.log("copy of image " + img_basename);
                        }, function (err) {
                            if (err) {
                                console.log('An images failed to copy');
                            } else {
                                console.log('All 20 image have been copied successfully');
                            }
                            if (resf.paging && resf.paging.cursors) {
                                console.log("suite de l'album à venir");
                                next = resf.paging.cursors.after;
                                setTimeout(function () { callback(); }, 5000);
                            }
                            else {
                                console.log("Fin de l'album");
                                stop = true;
                                setTimeout(function () { callback(); }, 5000);
                            }
                        });
                    });
                },
                function (err) {
                    if (err) {
                        console.log('An images failed to process');
                        albumcallback();
                    } else {
                        console.log('All images in this group have been processed successfully');
                        albumcallback();
                    }
                }
            );// end while

I still having a crash after maybe 1 00 file copied. I'm sure that async.whilst and async.eachSeries are weel because my log show that each call is on series. But i have a crash. I temporary solved the proble by ading a wait after each copy like this :

request(photo.source).pipe(fs.createWriteStream(dir + "/" + img_basename));
                            console.log("copy of image " + img_basename);
                            setTimeout(function () { done(); }, 5000);

Is it a limit of request module ? How to change this fea line to make sure that each connection are closed before continung the program ?

Was it helpful?

Solution 2

The request call and pipe call are asyncrhon. So i have to rewrite this line : request(photos[i].source).pipe(fs.createWriteStream(photos[i].name));

See here : Downloading N number of remote files using Node.js synchronously

OTHER TIPS

You probably need to move to an asynchronous loop. Something like eachLimit from the async module would probably be ideal.

async.eachLimit(photos, 10, function(photo, done) {
    var r = request(photos[i].source).pipe(fs.createWriteStream(photos[i].name));
    r.on('finish', done);
}, function(err) {
    // All images done or there was an error
});

Now it will process all the items in your photos list, but it will only process 10 of them concurrently. This will prevent it from spinning up hundreds or thousands of concurrent outgoing connections.

Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top