I have some files on a CDN that I want to bring into a local project. I was trying to automate this in my gulpfile by using npm's request module.
But I'm having a difficult time getting it to work with the blob of files;
var core = request({
url: `${cdn}/core/**/*.scss`,
headers: requestHeaders})
.pipe(gulp.dest(`${local}`));
Is it just not possible to do this with request? It's not a mandatory requirement, but it sure would be convenient.
Using gulp-remote-src
var gulp = require('gulp');
var request = require('request');
var remoteSrc = require('gulp-remote-src');
gulp.task('download', function() {
remoteSrc(['file.scss'], {
base: 'http://domain.com/path/'
})
.pipe(gulp.dest('.'));
});
Downloads http://domain.com/path/file.scss into current directory.
Using fs
var gulp = require('gulp');
var request = require('request');
var fs = require('fs');
gulp.task('download', function() {
request
.get('http://domain.com/file.scss')
.on('error', function (error) {
console.log("Error: ", error);
})
.pipe(fs.createWriteStream('f.scss'));
});
Downloads http://domain.com/path/file.scss into ./f.scss.
Using vinyl-source-stream
var gulp = require('gulp');
var request = require('request');
var source = require('vinyl-source-stream');
gulp.task('download', function() {
request
.get('http://domain.com/path/file.scss')
.on('error', function (error) {
console.log("Error: ", error);
})
.pipe(source('.'))
.pipe(gulp.dest('f.scss'));
});
Downloads http://domain.com/path/file.scss into ./f.scss.
Related
I am trying to automate PDF by its URL and my code is:
var http = require('http');
var fs = require('fs');
browser.getAllWindowHandles().then(function(handles) {
browser.switchTo().window(handles[1]);
console.log(handles[1]);
browser.driver.getCurrentUrl().then(function(url) {
console.log(url);
var file = fs.createWriteStream(url);
console.log(file);
var request = http.get(url, function(response) {
response.pipe(file);
});
});
})
I am getting error:
Protocol "blob:" not supported. Expected "http:".
Anyone can help with this issue?
UPDATE
Thanks to #robertklep and #vallo for pointing out that I was not parsing the multipart request properly.
Here's the updated server code with some re-worked sample code from Busboy:
'use strict';
// Require
var http = require('http');
var Busboy = require('busboy');
var fs = require('fs');
// Server
var server = http.createServer(function(request, response) {
if (request.method === 'POST') {
var busboy = new Busboy({ headers: request.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.pipe(fs.createWriteStream(`../db/images/${filename}`));
});
busboy.on('finish', function() {
response.writeHead(200, { 'Connection': 'close' });
response.end("That's all folks!");
});
return request.pipe(busboy);
}
response.writeHead(404);
response.end();
});
server.listen(8000, '192.168.7.25', () => {});
I am trying to post a jpg to an endpoint but the resulting image cannot be opened :
The file “image_copy.jpg” could not be opened. It may be damaged or
use a file format that Preview doesn’t recognize.
Some background:
Everything (servers, storage) are being hosted locally
Have made a decision to only use native Node modules like http and fs due to storage constraints on a microcontroller board
Am using form-data as it eases the pain of multi-part forms and uploads as well as sets the correct request headers
Here is some sample code broken into two scripts:
Server
'use strict';
// Require
var http = require('http');
// Server
var server = http.createServer((request, response) => {
var body = [];
request.on('data', function(chunk) {
body.push(chunk);
});
request.on('end', function() {
saveImage(Buffer.concat(body),null);
response.statusCode = 200;
response.end('thanks')
});
});
server.listen(8000, '192.168.7.25', () => {});
// Process
function saveImage(data,callback) {
var fs = require('fs');
fs.writeFile('../db/images/image_copy.jpg', data, function(err) {});
}
Client
'use strict';
// Require
var FormData = require('form-data');
var fs = require('fs');
var http = require('http');
// Vars
var form = new FormData();
// Process
form.append('my_file', fs.createReadStream('/temp/1.jpg'));
var request = http.request({
hostname: '192.168.7.25',
port: 8000,
path: '/api/premises/v1/image',
method: 'POST',
headers: form.getHeaders()
});
form.pipe(request);
request.on('response', function(res) {
console.log(res.statusCode);
});
After executing, the jpg is uploaded and saved to the correct file location (and also has the same file size as the source jpg) but the new image can't be opened.
Even if I encode the incoming chunk as binary and set the encoding on fs.writeFile to binary, I get a similar result.
What am I doing wrong? Thank you!
The client is uploading in multipart/form-data format, which is a format that can contain, amongst others, file data.
However, this means that the server should parse this format to extract the file data. Right now, the server is just taking the request body verbatim and writing it to a file.
The multiparty module can help you, and one of its usage examples shows you how to hook it up with http.Server: https://github.com/pillarjs/multiparty#usage
var multiparty = require('multiparty');
var http = require('http');
var util = require('util');
http.createServer(function(req, res) {
// parse a file upload
var form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
res.writeHead(200, {'content-type': 'text/plain'});
res.write('received upload:\n\n');
res.end(util.inspect({fields: fields, files: files}));
});
}).listen(8000);
Using that, you can extract the file data from (I think) files.my_file and write it to the file.
I'm in the process of making my own Discord bot and I have no previous experience with JS. I'm making a feature that downloads and posts an image from this website: http://random.dog
I have successfully downloaded images from elsewhere when I know the exact link to the picture. Here's the relevant part of my code:
var download = function(uri, filename, callback){
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);}
download('http://random.dog/14769-27888-18622.jpg', 'dog.png',
I'm using this request module https://www.npmjs.com/package/reques , is it possible to get images without the exact URL, with that module?
this who you can fetch URL dynamicaly and download picture with name you need these two module request-promise, cheerio,
var app = require('express')();
var http = require('http').Server(app);
fs = require('fs');
var fs = require('fs'),
request = require('request');
//save image
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
//get URL
var Imgpath="";
var rp = require('request-promise'),
cheerio = require('cheerio'),
url = require('url'),
base = 'http://random.dog/';
var options = {
uri: base,
method: 'GET',
resolveWithFullResponse: true
};
rp(options)
.then (function (response) {
$ = cheerio.load(response.body);
var relativeLinks = $("img");
relativeLinks.each( function() {
var link = $(this).attr('src');
var fullImagePath = url.resolve(base, link); // should be absolute
//pass to download to download image link is file name and base is base url
download(base+link,link,function()
{
console.log("wao great we done this...THINK DIFFERENT")
})
});
});
app.listen(3000);
console.log("server start");
app.get('/',function(req,res){
res.sendfile(__dirname + '/a.html');
});
I have a file in D: Drive of my local system. I need to download the file into the E: Drive. How to do this using node.js and http request? I am a beginner in node.js. Please give me valuable suggestions.
Note: The file may be in any type.
Here is an example:
// upload.js
var fs = require('fs')
var newPath = "E:\\newPath";
var oldPath = "D:\\oldPath";
exports.uploadFile = function (req, res) {
fs.readFile(oldPath, function(err, data) {
fs.writeFile(newPath, data, function(err) {
fs.unlink(oldPath, function(){
if(err) throw err;
res.send("File uploaded to: " + newPath);
});
});
});
};
// app.js
var express = require('express'), // fetch express js library
upload = require('./upload'); // fetch upload.js you have just written
var app = express();
app.get('/upload', upload.uploadFile);
app.listen(3000);
Basically there are two parts, one doing the copying from one drive to another, and the other one is for triggering. Once you run you app.js and make a GET request to localhost:3000/upload it will copy the file from newPath to the oldPath. For further information have a look to expressjs and fs.
Assuming it's a text file, you would have to write two node.js server.
The first would answer (all/specific, your choice) http get with the content of the file, the other would make a get and download the file.
server.js: Will work only for text file
var http = require('http'),
fs = require('fs'),
server = http.createServer(function (req, res){
res.writeHead(200, {'content-type': 'text/text'});
fs.readFile('E:/path/to/file.txt', function (data) {
res.write('' + data);
res.end();
});
}).listen(8080);
client.js
var http = require('http'),
fs = require('fs'),
file = fs.createWriteStream('D:/path/to/new.txt', {flags: 'w'});
http.get('http://localhost:8080', function (res) {
res.pipe(file, {end: 'false'});
res.on('end', function() {
file.end();
});
});
EDIT:
The only advantage versus anvarik's solution is that I don t use express...
When I am trying to run the code from below:
var express = require('express');
var app = express();
var port = process.env.PORT || 5000;
var request = require('request');
var zlib = require('zlib');
app.listen(port, function() {
console.log("Listening on " + port);
makeRequest();
});
function makeRequest(){
var url = 'https://api.stackexchange.com/2.1/search?pagesize=5&order=desc&sort=activity&intitle=ios development&site=stackoverflow';
var headers = {'Accept-Encoding': 'gzip'};
var response = request(url, headers);
gunzipJSON(response);
}
function gunzipJSON(response){
var gunzip = zlib.createGunzip();
var json = "";
gunzip.on('data', function(data){
json += data.toString();
});
gunzip.on('end', function(){
parseJSON(json);
});
response.pipe(gunzip);
}
function parseJSON(json){
var json = JSON.parse(json);
if(json.items.length){
for(var i in json.items){
console.log(json.items[i].title + '\n' + json.items[i].link);
}
}
}
I get error saying
Error: incorrect header check
at Zlib._binding.onerror (zlib.js:286:17)
I am passing my own gzipped url in the code.
Any help will be really useful.
Thanks
Old question (and request library is now deprecated), but better late than never:
Interestingly, the code in question does work for me on Node.js version 15.13.0, but not on 14.16.0 (keeping the version of request the same, which is the latest 2.88.2).
However, just using the following simple code works on 14.16.0 (live demo), but not on 15.13.0!:
request(url, function (error, response, body) {
console.log(JSON.parse(body));
});
This means that for some reason, on 14.16.0 the response body is automatically unzipped (hence the above snippet works), while on 15.13.0 the response body is kept compressed and so an active decompression is needed.