I'm trying to upload a file through curl --upload-file
For some reason, curl freezes and upload.backup get created empty. Any ideas how this code should be changed to make it work? I must be misunderstanding the APIs
var http = require('http');
var fs = require('fs');
var server = http.createServer(function(request, response) {
var fileBackup = fs.createWriteStream("upload.backup");
var fileBytes = request.headers['content-length'];
var uploadedBytes = 0;
request.on('readable', function() {
var chunk = null;
while (null !== (chunk = request.read())) {
uploadedBytes += chunk.length;
var progress = uploadedBytes / fileBytes * 100;
response.write("progress: " + parseInt(progress, 10) + "%\n");
}
});
request.pipe(fileBackup);
}).listen(8080);
One issue is that you're not ending your response. Secondly, you're reading the data from the request stream before it can be written to the file.
Try something like this instead:
var http = require('http');
var fs = require('fs');
var server = http.createServer(function(request, response) {
var fileBackup = fs.createWriteStream('upload.backup');
var fileBytes = parseInt(request.headers['content-length'], 10);
var uploadedBytes = 0;
request.pipe(fileBackup);
if (!isNaN(fileBytes)) {
request.on('data', function(chunk) {
uploadedBytes += chunk.length;
response.write('progress: ' + (uploadedBytes / fileBytes * 100) + '%\n');
});
}
request.on('end', function() {
response.end();
});
}).listen(8080);
Related
I want to download all the mp3 files contained in this xml, so I created this code using Node.js and JavaScript:
var https = require('https');
var fs = require('fs');
var xml2js = require('xml2js');
var parser = new xml2js.Parser();
var request = require('request');
const xmlUrl = 'https://deejayreloadedpodcast.maxxer.it/podcast/pinocchio.xml';
var download = async function(url, dest, callback) {
// download if only the file is not existing yet
if(!fs.existsSync(dest)) {
await request.get(url)
.on('error', function(err) {
console.log(err);
})
.pipe(fs.createWriteStream(dest))
.on('close', callback);
}
};
https.get(xmlUrl, function(res) {
var response_data = '';
res.setEncoding('utf8');
res.on('data', function(chunk) {
response_data += chunk;
});
res.on('end', function() {
parser.parseString(response_data, function(err, result) {
if(err) {
console.log('Got error: ' + err.message);
}
else {
var json = JSON.stringify(result, null, 2);
var channels = result['rss']['channel'];
var items = channels[0]['item'];
var urlsTemp = [];
var namesTemp = [];
for(var elem in items) {
var obj = items[elem];
var name = obj['title'][0];
var url = obj['enclosure'][0]['$']['url'];
urlsTemp.push(url);
namesTemp.push(name);
}
var urls = [];
var names = [];
for(i in urlsTemp) {
urls.push(urlsTemp[i]);
names.push(namesTemp[i]);
}
for(var i = 10; i < 20/*urls.length*/; i++) {
var dirPath = './puntate/';
var filename = names[i] + '.mp3';
download(urls[i], dirPath + filename, function() {
console.log('Finished downloading \'' + filename);
});
}
}
});
});
res.on('error', function(err) {
console.log('Got error: ' + err.message);
});
});
This code takes the contents of the XML file, processes it by saving the links and file names in two arrays (urls and names) and then downloads the audio files.
The problem is that it only works if you download a few mp3s at a time (in the example, there are only 10).
If I let it loop from 0 to the full length of the array urls, the program no longer works. It does not generate errors but saves all mp3s with size 0 (ie empty).
Why? I thought the problem was asynchronous code, but I used async/await in the download method.
What's the problem?
Thank you
var i = 0;
var dirPath = './puntate/';
var filename = names[i] + '.mp3';
var fn = function(i) {
console.log('(A)', i, urls.length);
download(urls[i], dirPath + filename, function() {
console.log('Finished downloading \'' + filename);
console.log('(B)', i, urls.length);
if(i < urls.length) {
i++;
console.log('(C)', i, urls.length);
fn(i);
}
});
}
fn(i);
and:
(A) 0 3095
Finished downloading 'Puntata del 17 Settembre 2018.mp3
(B) 0 3095
(C) 1 3095
(A) 1 3095
I suggest you to modify the for loop, since it gives a synchronous function:
for(var i = 10; i < 20/*urls.length*/; i++) {
var dirPath = './puntate/';
var filename = names[i] + '.mp3';
download(urls[i], dirPath + filename, function() {
console.log('Finished downloading \'' + filename);
});
}
to a continuous passing style:
var i=0; /*i starts from 0*/
var dirPath = './puntate/';
var fn=function(i){
var filename = names[i] + '.mp3';
download(urls[i], dirPath + filename, function() {
console.log('Finished downloading \'' + filename);
/*if not finish downloading all the links*/
if(i<urls.length){
i++;
fn(i);
}
});
}
fn(i);
Here is enhanced code version:
Improvements:
deleted a unneccessary for loop
if file already exists, skip it until the next not exists one and print.
var https = require('https');
var fs = require('fs');
var xml2js = require('xml2js');
var parser = new xml2js.Parser();
var request = require('request');
var urls = [];
var names = [];
const xmlUrl = 'https://deejayreloadedpodcast.maxxer.it/podcast/pinocchio.xml';
var download = async function(url, dest, callback) {
request.get(url)
.on('error', function(err) {
console.log(err);
})
.pipe(fs.createWriteStream(dest))
.on('close', callback);
};
https.get(xmlUrl, function(res) {
var response_data = '';
res.setEncoding('utf8');
res.on('data', function(chunk) {
response_data += chunk;
});
res.on('end', function() {
parser.parseString(response_data, function(err, result) {
if(err) {
console.log('Got error: ' + err.message);
}
else {
var json = JSON.stringify(result, null, 2);
var channels = result['rss']['channel'];
var items = channels[0]['item'];
// var urlsTemp = []; //you don't need both of temp arrays
// var namesTemp = []; //push items directly into urls[] and names[]
for(var elem in items) {
var obj = items[elem];
var name = obj['title'][0];
var url = obj['enclosure'][0]['$']['url'];
urls.push(url);
names.push(name);
}
var i = 0;
var dirPath = './puntate/';
var fn = function(i) {
var filename = names[i] + '.mp3';
var fileExist=fs.existsSync(dirPath + filename);
// skip downloading if the file exists already
if(fileExist){
console.log('File exists', i, urls.length);
i++;
fn(i);
}else{ // download if only the file is not existing yet
console.log('(A)', i, urls.length);
download(urls[i], dirPath + filename, function() {
console.log('Finished downloading \'' + filename);
console.log('(B)', i, urls.length);
if(i < urls.length) {
i++;
console.log('(C)', i, urls.length);
fn(i);
}
});
}
}
fn(i);
}
});
});
res.on('error', function(err) {
console.log('Got error: ' + err.message);
});
});
I am trying out NodeJS server app for getting AWS S3 bucket file (object) names list and return as HTTP response text.
below is the full code I wrote.
var AWS = require('aws-sdk');
var http = require("http");
function onRequest(request, response) {
response.writeHead(200, {
'Content-Type': 'application/json'
});
var credentials = new AWS.SharedIniFileCredentials();
AWS.config.credentials = credentials;
AWS.config.logger = console;
var str = "";
var s3 = new AWS.S3();
var params = {
Bucket: 'bucketname',
Delimiter: '/',
Prefix: 'path/to/the/folder'
}
//var data = s3.listObjectsV2(params); returns data as [object Object]
s3.listObjects(params, function(err, data) {
console.log("Data : " + data);
for (var i = 0; i < data.Contents.length; i++) {
var tokens = data.Contents[i].Key.split("/");
str = str + tokens[tokens.length - 1];
if (i != data.Contents.length) {
str = str + ",";
}
}
console.log("Final text:" + str);
});
response.write(str);
response.end();
}
http.createServer(onRequest).listen(8081);
console.log('Server running at http://127.0.0.1:8081/');'
Problem: Due to asynchronous call to listObjects my HTTP response writing ends before I get callback value.
Can anyone help me to make it synchronous so that I can add str as part of the response body?
Thanks in advance.
You can just put res.write and res.end inside the callback that will work:
var AWS = require('aws-sdk'); var http = require("http");
function onRequest(request, response) {
response.writeHead(200, { 'Content-Type': 'application/json' });
var credentials = new AWS.SharedIniFileCredentials();
AWS.config.credentials = credentials;
AWS.config.logger = console;
var str = "";
var s3 = new AWS.S3();
var params = { Bucket: 'bucketname', Delimiter: '/', Prefix: 'path/to/the/folder' }
//var data = s3.listObjectsV2(params);
returns data as [object Object]
s3.listObjects(params, function(err, data) {
console.log("Data : " + data);
for (var i = 0; i < data.Contents.length; i++) {
var tokens = data.Contents[i].Key.split("/");
str = str + tokens[tokens.length - 1];
if (i != data.Contents.length) {
str = str + ",";
}
}
console.log("Final text:" + str);
response.write(str); // And putting response write() and end() here
response.end();
});
// taking off write() and end() from here
}
http.createServer(onRequest).listen(8081);
console.log('Server running at http://127.0.0.1:8081/');
You only need response.end(str), and it can go inside the callback. Node will keep the HTTP stream open until the S3 call returns.
When I create the first serve for the first node practise, it shows the error that:
_http_outgoing.js:458
throw new TypeError('First argument must be a string or Buffer');"
I'd want to know how to deal with?
var http = require('http');
var fs = require('fs');
var mime = require('mime');
var url = require('url');
function serve(request,response) {
var urlobj = url.parse(request.url,true);
var pathname = urlobj.pathname;
if(pathname =='/'){
response.statusCode = 200;
response.setHeader('Content-Type','text/html;charset=utf-8');
response.setHeader('keyname','zfpx');
fs.readFile('index.html',function (err,data) {
response.write(data);
response.end();
});
}else {
static(pathname,response);
}
function static(url,response) {
response.setHeader('Content-Type',mime.getType(url)+';charset=utf-8');
fs.readFile(url.slice(1),function (err,data) {
response.write(data);
response.end();
});
}
}
var server = http.createServer(serve);
server.listen(8080,'localhost');
Basically,
I first initiate socket.io like this:
var io = require('socket.io')(1337);
Then, after using http to get a POST request and check some info, I try this:
var updateArray = {timer:"start"};
jsonUpdate = JSON.stringify(updateArray);
io.emit('update', jsonUpdate);
But it doesn't send the sockets, and I really can't understand the socket.io documentation sadly, so I'd be happy if someone can help me out.
Server code:
var http = require('http');
var fs = require('fs');
var io = require('socket.io')(1337);
var initialBomb = 0;
function now() {
return Math.floor(new Date() / 1000);
}
http.createServer(function (req, res) {
var body = "";
req.on('data', function (chunk) {
if (req.method == 'POST') {
body += chunk;
}
});
req.on('end', function () {
parsedBody = JSON.parse(body);
if (parsedBody.round["bomb"] == "planted") {
var rightNow = now();
var initialCheck = initialBomb + 41;
if (rightNow > initialCheck) {
initialBomb = now();
var updateArray = {timer:"start"};
jsonUpdate = JSON.stringify(updateArray);
io.emit('update', jsonUpdate);
console.log(jsonUpdate);
}
}
});
}).listen(3000);
Client Code:
<script>
var socket = io('87.98.219.48:1337');
socket.on('update', function(payload) {
var data = JSON.parse(payload);
console.log(payload);
if (data['timer'] == 'start') {
initTick = timerNow();
setTimeout(tick, delay);
}
});
</script>
I am new to Node.js world, kind of stuck in situation.
below code is for reference:
var http = require('http');
var step = require('step');
var request = require('request');
exports.readimage2 = function(req, res){
//res.send(200,'OK');
//var image_url = 'http://www.letsgodigital.org/images/artikelen/39/k20d-image.jpg'; //--- 10mb
//var image_url = 'http://upload.wikimedia.org/wikipedia/commons/2/2d/Snake_River_(5mb).jpg';
//var image_url = 'http://www.sandia.gov/images2005/f4_image1.jpg'; //--- 2mb
var image_url = 'http://www.fas.org/nuke/guide/pakistan/pakistan.gif'; // --- some KB
http.get(image_url,
function(responseData) {
var data = new Buffer(parseInt(responseData.headers['content-length'],10));
var pos = 0;
responseData.on('data', function(chunk) {
chunk.copy(data, pos);
pos += chunk.length;
});
responseData.on('end', function () {
res.send(200, data);
});
});
};
Above code fails working for large files if i use it with step module.
Anyone suggest how to do it properly with step.
Here how i did it using step..... although the request module did same for image buffer download thanks to a post on stackoverflow just need to set encoding to null in request to work for buffer response.
var canvas = new Canvas(3000, 3000),
ctx = canvas.getContext('2d'),
Image = Canvas.Image;
var image_url = "http://www.a2hosting.com/images/uploads/landing_images/node.js-hosting.png";
//var image_url = 'http://upload.wikimedia.org/wikipedia/commons/1/16/AsterNovi-belgii-flower-1mb.jpg';
step(
function() {
request.get({
url: image_url,
encoding: null
}, this);
},
function(err, response, body) {
var img = new Image;
img.src = body;
ctx.drawImage(img, 0, 0, img.width, img.height);
//res.send(200, data);
res.send(200, '<img src="' + canvas.toDataURL() + '" />');
}
);
Below is the code working for simple http module of node.
var http = require('http');
var step = require('step');
var request = require('request');
exports.imagedownload = function(req, res){
step(
function(){
console.log('*********** image download start ***********');
fndownload(this);
},
function(err, result){
if(err) {
}
console.log('*********** image download end ***********');
res.send(200, result);
}
);
};
function fndownload(callback) {
var image_url = 'http://upload.wikimedia.org/wikipedia/commons/2/2d/Snake_River_(5mb).jpg'; // --- some KB
http.get(image_url,
function(responseData) {
var data = new Buffer(parseInt(responseData.headers['content-length'],10));
var pos = 0;
responseData.on('data', function(chunk) {
chunk.copy(data, pos);
pos += chunk.length;
});
responseData.on('end', function () {
//res.send(200, data);
callback(null, data);
});
});
};