Node.js file download gets truncated by "require" statement - node.js

I'm trying to download and require a .js file using a node.js script, but only part of the file is being downloaded.
Specifically, this is the part that seems to be causing the problem:
response.on('data', function (chunk) {
out.write(chunk);
var theModule = require(__dirname + "/" + filename);
//less than half of the file is downloaded when the above line is included.
});
Here is the full source code:
var http = require('http');
var fs = require('fs');
downloadModule("functionChecker.js");
function downloadModule(filename) {
var google = http.createClient(80, 'www.google.com');
var request = google.request('GET', '/svn/' + filename, {
'host': 'javascript-modules.googlecode.com'
});
request.end();
out = fs.createWriteStream(filename);
request.on('response', function (response) {
response.setEncoding('utf8');
response.on('data', function (chunk) {
out.write(chunk);
var theModule = require(__dirname + "/" + filename);
//less than half of the file is downloaded when the above line is included.
//If the import statement is omitted, then the file is downloaded normally.
});
});
}

The data event can be called multiple times. You need to wait until all of the data is written.
response.setEncoding('utf8');
response.on('data', function (chunk) {
out.write(chunk);
});
response.on('end', function(){
out.end();
var theModule = require(__dirname + "/" + filename);
});
Also, createClient is deprecated, as mentioned in the docs. I'd also recommend using pipe to simplify your logic.
function downloadModule(filename) {
http.get({
hostname: 'javascript-modules.googlecode.com',
path: '/svn/' + filename
}, function(res){
var out = fs.createWriteStream(filename);
out.on('close', function(){
var theModule = require(__dirname + "/" + filename);
});
res.pipe(out);
});
}

Related

Dialogflow Node.js is not getting the value of the https.get inside app.inent

I am having trouble getting the value returned by my https.get function inside the my Dialogflow intent to close the conversation. Regardless whether I execute this call in the app.intent or pass it one to an external function, it fails. I new to node.js but have used angular.js and javascript before but not having success in being able to close the conversation with a response. Google Actions emulator gives me the error
MalformedResponse
expected_inputs[0].input_prompt.rich_initial_prompt: 'rich_response'
must contain at least one item.
Below is my code:
app.intent('mywebhook', (conv, params) => {
const stateName = params['geo-state-us'];
console.log("My State is " + stateName);
var pathString = 'api path' + encodeURIComponent(stateName);
var request = https.get({
host: 'www.mydomainame.com',
path: pathString
}, function (response){
var json = "";
response.on('data', function(chunk) {
json += chunk;
});
response.on('end', function(){
var jsonData = JSON.parse(json);
var myfirstvar = jsonData[0].firstvar;
var chat = "the value of first var is " + chat;
console.log(chat); // this works fine
conv.close(chat);
});
}).on('error', (e) => {
console.error(e);
});
}
I even tried doing conv.close(chat) outside and JSON.stringify(request) to get the value of myfirstvar but nothing worked. Spent a whole day trying different things but no avail.
Try the refactored code below ( and comment if that works or not ):
app.intent('mywebhook', myWebHookFunction);
function myWebHookFunction(conv, params) {
return new Promise(function (resolve, reject) {
const stateName = params['geo-state-us'];
console.log("My State is " + stateName);
var pathString = 'api path' + encodeURIComponent(stateName);
var request = https.get({
host: 'www.mydomainame.com',
path: pathString
}, function (response) {
var json = "";
response.on('data', function (chunk) {
json += chunk;
});
response.on('end', function () {
var jsonData = JSON.parse(json);
var myfirstvar = jsonData[0].firstvar;
var chat = "the value of first var is " + chat; // chat or myfirstvar?
console.log(chat); // this works fine
conv.close(chat);
resolve();
});
}).on('error', (e) => {
console.error(e);
reject();
});
});
}

Download many files asynchronously

I want to download all the mp3 files contained in this xml, so I created this code using Node.js and JavaScript:
var https = require('https');
var fs = require('fs');
var xml2js = require('xml2js');
var parser = new xml2js.Parser();
var request = require('request');
const xmlUrl = 'https://deejayreloadedpodcast.maxxer.it/podcast/pinocchio.xml';
var download = async function(url, dest, callback) {
// download if only the file is not existing yet
if(!fs.existsSync(dest)) {
await request.get(url)
.on('error', function(err) {
console.log(err);
})
.pipe(fs.createWriteStream(dest))
.on('close', callback);
}
};
https.get(xmlUrl, function(res) {
var response_data = '';
res.setEncoding('utf8');
res.on('data', function(chunk) {
response_data += chunk;
});
res.on('end', function() {
parser.parseString(response_data, function(err, result) {
if(err) {
console.log('Got error: ' + err.message);
}
else {
var json = JSON.stringify(result, null, 2);
var channels = result['rss']['channel'];
var items = channels[0]['item'];
var urlsTemp = [];
var namesTemp = [];
for(var elem in items) {
var obj = items[elem];
var name = obj['title'][0];
var url = obj['enclosure'][0]['$']['url'];
urlsTemp.push(url);
namesTemp.push(name);
}
var urls = [];
var names = [];
for(i in urlsTemp) {
urls.push(urlsTemp[i]);
names.push(namesTemp[i]);
}
for(var i = 10; i < 20/*urls.length*/; i++) {
var dirPath = './puntate/';
var filename = names[i] + '.mp3';
download(urls[i], dirPath + filename, function() {
console.log('Finished downloading \'' + filename);
});
}
}
});
});
res.on('error', function(err) {
console.log('Got error: ' + err.message);
});
});
This code takes the contents of the XML file, processes it by saving the links and file names in two arrays (urls and names) and then downloads the audio files.
The problem is that it only works if you download a few mp3s at a time (in the example, there are only 10).
If I let it loop from 0 to the full length of the array urls, the program no longer works. It does not generate errors but saves all mp3s with size 0 (ie empty).
Why? I thought the problem was asynchronous code, but I used async/await in the download method.
What's the problem?
Thank you
var i = 0;
var dirPath = './puntate/';
var filename = names[i] + '.mp3';
var fn = function(i) {
console.log('(A)', i, urls.length);
download(urls[i], dirPath + filename, function() {
console.log('Finished downloading \'' + filename);
console.log('(B)', i, urls.length);
if(i < urls.length) {
i++;
console.log('(C)', i, urls.length);
fn(i);
}
});
}
fn(i);
and:
(A) 0 3095
Finished downloading 'Puntata del 17 Settembre 2018.mp3
(B) 0 3095
(C) 1 3095
(A) 1 3095
I suggest you to modify the for loop, since it gives a synchronous function:
for(var i = 10; i < 20/*urls.length*/; i++) {
var dirPath = './puntate/';
var filename = names[i] + '.mp3';
download(urls[i], dirPath + filename, function() {
console.log('Finished downloading \'' + filename);
});
}
to a continuous passing style:
var i=0; /*i starts from 0*/
var dirPath = './puntate/';
var fn=function(i){
var filename = names[i] + '.mp3';
download(urls[i], dirPath + filename, function() {
console.log('Finished downloading \'' + filename);
/*if not finish downloading all the links*/
if(i<urls.length){
i++;
fn(i);
}
});
}
fn(i);
Here is enhanced code version:
Improvements:
deleted a unneccessary for loop
if file already exists, skip it until the next not exists one and print.
var https = require('https');
var fs = require('fs');
var xml2js = require('xml2js');
var parser = new xml2js.Parser();
var request = require('request');
var urls = [];
var names = [];
const xmlUrl = 'https://deejayreloadedpodcast.maxxer.it/podcast/pinocchio.xml';
var download = async function(url, dest, callback) {
request.get(url)
.on('error', function(err) {
console.log(err);
})
.pipe(fs.createWriteStream(dest))
.on('close', callback);
};
https.get(xmlUrl, function(res) {
var response_data = '';
res.setEncoding('utf8');
res.on('data', function(chunk) {
response_data += chunk;
});
res.on('end', function() {
parser.parseString(response_data, function(err, result) {
if(err) {
console.log('Got error: ' + err.message);
}
else {
var json = JSON.stringify(result, null, 2);
var channels = result['rss']['channel'];
var items = channels[0]['item'];
// var urlsTemp = []; //you don't need both of temp arrays
// var namesTemp = []; //push items directly into urls[] and names[]
for(var elem in items) {
var obj = items[elem];
var name = obj['title'][0];
var url = obj['enclosure'][0]['$']['url'];
urls.push(url);
names.push(name);
}
var i = 0;
var dirPath = './puntate/';
var fn = function(i) {
var filename = names[i] + '.mp3';
var fileExist=fs.existsSync(dirPath + filename);
// skip downloading if the file exists already
if(fileExist){
console.log('File exists', i, urls.length);
i++;
fn(i);
}else{ // download if only the file is not existing yet
console.log('(A)', i, urls.length);
download(urls[i], dirPath + filename, function() {
console.log('Finished downloading \'' + filename);
console.log('(B)', i, urls.length);
if(i < urls.length) {
i++;
console.log('(C)', i, urls.length);
fn(i);
}
});
}
}
fn(i);
}
});
});
res.on('error', function(err) {
console.log('Got error: ' + err.message);
});
});

Node.js native file upload form

i have a question: Is there any way to create native file upload system in node.js? Without modules like multer, busboy and others. I just want save it from file form. Like:
<form action="/files" method="post">
<input type="file" name="file1">
</form>
It's possible to acces this file native in node.js? Maybe im wrong, but if this modules do it, it must be possible, right?
It is possible. An example is below.
const http = require('http');
const fs = require('fs');
const filename = "logo.jpg";
const boundary = "MyBoundary12345";
fs.readFile(filename, function (err, content) {
if (err) {
console.log(err);
return
}
let data = "";
data += "--" + boundary + "\r\n";
data += "Content-Disposition: form-data; name=\"file1\"; filename=\"" + filename + "\"\r\nContent-Type: image/jpeg\r\n";
data += "Content-Type:application/octet-stream\r\n\r\n";
const payload = Buffer.concat([
Buffer.from(data, "utf8"),
Buffer.from(content, 'binary'),
Buffer.from("\r\n--" + boundary + "--\r\n", "utf8"),
]);
const options = {
host: "localhost",
port: 8080,
path: "/upload",
method: 'POST',
headers: {
"Content-Type": "multipart/form-data; boundary=" + boundary,
},
}
const chunks = [];
const req = http.request(options, response => {
response.on('data', (chunk) => chunks.push(chunk));
response.on('end', () => console.log(Buffer.concat(chunks).toString()));
});
req.write(payload)
req.end()
})
The question is interesting. I wonder why it is not already answered (4 years, 9 months).

node.js wait for file operation to complete

I am brand new at node.js and I need to write a web server that takes data and write it to a file. that file is then used in an external program.
my problem is that node.js executes the external program before the data is completely written to the file. How can I wait for it to complete before executing the external program
var http = require('http')
var map = require('through2-map')
var fs = require('fs')
var str = ""
var sh = require('execSync');
var tmp_filename = ""
function random (low, high) {
return Math.random() * (high - low) + low;
}
function execute_program(){
var command = 'ruby ' + __dirname + '/check_content.rb --file '+ tmp_filename
var result = sh.exec(command);
console.log('return code ' + result.code);
console.log('stdout + stderr ' + result.stdout);
return result.stdout
}
function write_to_file (chunk){
str = chunk.toString();
fs.appendFile(tmp_filename, str, function (err){
if (err) throw err;
console.log('The "data to append" was appended to file!');
}
)
}
var server = http.createServer( function(req, res){
tmp_filename = '/tmp/tmp_template_' + random(1000, 99999) + '.template'
res.writeHead(200, {'Content-Type': 'text/plain'});
message = req.pipe(map(function (chunk, res) {
write_to_file(chunk);
}))
var out = execute_program();
res.write(out)
message.pipe(res)
}
)
server.listen(8000)
Note: I'm not sure what you're attempting with res.write(out) and then message.pipe(res). That will write the output from your ruby script to the response and then it looks like you're piping the request body in after that. As it is, the message.pipe(res) line won't work because you'll be trying to pipe from a writable stream, which you can't do. For the purpose of this answer I'm going to assume you don't want the message.pipe(res) line; please correct me if I'm wrong.
Simple / Quick Answer
The simplest answer is to listen for the end event on req and then execute your program, etc. when that event fires:
var server = http.createServer( function(req, res){
tmp_filename = '/tmp/tmp_template_' + random(1000, 99999) + '.template'
res.writeHead(200, {'Content-Type': 'text/plain'});
req.on('end', function() {
var out = execute_program();
res.write(out);
res.end();
});
req.pipe(map(function (chunk, res) {
write_to_file(chunk);
}));
}
)
Optimization: fs.createWriteStream()
You could also use fs.createWriteStream() instead of appending chunks with fs.appendFile(). That would look like:
var server = http.createServer( function(req, res){
tmp_filename = '/tmp/tmp_template_' + random(1000, 99999) + '.template'
res.writeHead(200, {'Content-Type': 'text/plain'});
req.on('end', function() {
var out = execute_program();
res.write(out);
res.end();
});
req.pipe(fs.createWriteStream(tmp_filename));
}
)
With that in place, you can remove your entire write_to_file function.
That will also prevent a problem you might have if you happen to get the same random number for your temp file; with your current code, you'll append to the existing file, which is probably not what you want. If for some reason you do want to append to the file in that case, you can just pass the a flag (for append) to fs.createWriteStream: fs.createWriteStream(tmp_filename, {flags: 'a'});.
Thanks Mike S. The message.pipe(res) and res.write(out) is redundant...but i did some research and used async.waterfall package and the following worked for me. but will take your suggestion for optimization
var http = require('http');
var map = require('through2-map');
var fs = require('fs');
var str = "";
var sh = require('execSync');
var async = require('async');
var tmp_filename = "";
function random (low, high) {
return Math.random() * (high - low) + low;
}
function write_to_file (chunk){
str = chunk.toString();
fs.appendFile(tmp_filename, str, function (err){
if (err) throw err;
console.log('The "data to append" was appended to file!');
}
)
}
function execute_program(){
var command = 'ruby ' + __dirname + '/check_content --file ' + tmp_filename
var result = sh.exec(command);
console.log('return code ' + result.code);
console.log('stdout + stderr ' + result.stdout);
return result.stdout
}
async.waterfall([
function(callback){
var server = http.createServer(function(req, res){
callback(null, req, res)
}
).listen(8000);
},
function(req, res, callback){
tmp_filename = '/tmp/tmp_template_' + random(1000, 99999) + '.template'
var message = req.pipe(map(function (chunk) {
write_to_file(chunk);
}))
setTimeout(function () {
callback(null, req, res, message);
}, 666);
},
function(req, res, message, callback){
var out = execute_program()
res.write(out)
message.pipe(res)
callback(null, 'done');
}
])

How do I download a bunch of files from a remote server, and concatenate them in a certain order using Node.js?

I'm trying to read the contents of a bunch of javascript files on a server, and then concatenate them into a new local file. The files have to be concatenated in a specific order (specified in an array). Here's what I have so far:
var http = require('http');
var fs = require('fs');
var commonWebFiles = getCommonWebDependenciesInOrder();
var fileContents = [];
var path = '/folder/';
fs.mkdir("target");
for(var i = 0, l = commonWebFiles.length; i < l; ++i){
getFileContents(path, commonWebFiles[i]);
}
function getCommonWebDependenciesInOrder(){
//Hit manager to get an correctly ordered array of common web dependencies
//Stub
return [
'file1.js',
'file2.js',
'file3.js'
];
};
function getFileContents(path, filename){
var contents = "";
var writeStream = fs.createWriteStream("target/" + filename, {'flags': 'a'});
var options = {
host: 'ahost.net',
port: 80,
path: path + filename
};
var req = http.get(options, function(res) {
res.on('data', function(chunk) {
contents += chunk;
});
res.on('end', function() {
writeStream.write(contents, encoding='binary');
writeStream.end();
fileContents[filename] = contents;
});
}).on('error', function(e) {
console.log("Got error: " + e.message);
});
};
This downloads the files and recreates them locally, but it seems a little clunky. When I tried to just write a single file directly from a looped set of requests, I got the chunks out of order....I feel like there must be an easier way....
Thanks in advance.
Use async and request:
var fs = require('fs'),
async = require('async'),
request = require('request');
// Utility function to overcome request's callback (err, response, body) where we're only interested in err and body
function simpleRequest(url, callback) {
request(url, function(err, response, body) {
callback(err, body);
});
}
async.map(urls, simpleRequest, function(err, results) {
if(err)
return console.error(err);
fs.writeFile(outfile, results.join(''));
});

Resources