SzymonPoltorak SzymonPoltorak - 5 months ago 779
Node.js Question

nodejs multiple http requests in loop

I'm trying to make simple feed reader in node and I'm facing a problem with multiple requests in node.js.
For example, I got table with urls something like:

urls = [
"http://url1.com/rss.xml",
"http://url2.com",
"http://url3.com"];


Now I want to get contents of each url. First idea was to use
for(var i in urls)
but it's not good idea. the best option would be to do it asynchronously but I don't know how to make it.

Any ideas?

EDIT:

I got this code:

var data = [];
for(var i = 0; i<urls.length; i++){
http.get(urls[i], function(response){
console.log('Reponse: ', response.statusCode, ' from url: ', urls[i]);
var body = '';
response.on('data', function(chunk){
body += chunk;
});

response.on('end', function() {
data.push(body);
});
}).on('error', function(e){
console.log('Error: ', e.message);
});
}


Problem is that first is call line "http.get..." for each element in loop and after that event response.on('data') is called and after that response.on('end'). It makes mess and I don't know how to handle this.

Answer

By default node http requests are asynchronous. You can start them sequentially in your code and call a function that'll start when all requests are done. You can either do it by hand (count the finished vs started request) or use async.js

This is the no-dependency way (error checking omitted):

var http = require('http');    
var urls = ["http://www.google.com", "http://www.example.com"];
var responses = [];
var completed_requests = 0;

for (i in urls) {
    http.get(urls[i], function(res) {
        responses.push(res);
        completed_requests++;
        if (completed_requests == urls.length) {
            // All download done, process responses array
            console.log(responses);
        }
    });
}