I would like to download a docx file content from a url to S3 using Node JS. Is there a suggested library for doing the same. I tried to download locally using something like this but it turns out the document contents are Gibberish. Is there anything I am missing here.
const axios = require("axios");
const fs = require("fs");
(async function () {
let article;
try {
const httpResponse = await axios.get("https://<url>/Go_Lang.docx?raw=true",
{responseType: 'blob', headers : { 'Accept': "application/vnd.openxmlformats-officedocument.wordprocessingml.document"}});
fs.writeFileSync(“./temp.docx", Buffer.from(httpResponse.data), function (err) {
if (err) console.log(err);
else console.log("done");
});
} catch (err) {
console.log(err)
}
})();
Change responseType to arraybuffer, and you don't need to convert it to Buffer.
Also, .writeFileSync does not take callback, so it's writeFile
try this:
(async function() {
let article;
try {
const httpResponse = await axios.get("https://<url>/Go_Lang.docx?raw=true", {
responseType: 'arraybuffer',
headers: {
'Accept': "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
}
});
fs.writeFile("./temp.docx", httpResponse.data, function (err) {
if (err) console.log(err);
else console.log("done");
});
} catch (err) {
console.log(err)
}
})();
Related
I'm trying to send a file from my NodeJS Lambda function to Dailymotion, and I'm using the following code:
// Download the original file to the `tmp` folder of Lambda.
await axios.get('https://cdn.mysite.com/source.mp4', {
responseType: 'stream'
})
.then( response => {
response.data.pipe(fs.createWriteStream('/tmp/video.mp4'));
})
.catch(error => {
console.log(error);
});
const form = new FormData();
form.append('file', fs.createReadStream('/tmp/video.mp4'));
// Post the file to Dailymotion API.
axios.post('https://upload-xx.xxx.dailymotion.com/upload?uuid=xxxxx&seal=xxxxx&extra=xxxxx', form, {
headers: {
...form.getHeaders,
'Content-Type': 'multipart/formdata',
'Content-Length': fs.statSync('/tmp/video.mp4').size
},
})
.then(response => {
console.log(response);
})
.catch(error => {
console.log(error);
});
I can use the following URL to check the upload progress of the file: https://upload-xx.xxx.dailymotion.com/progress?uuid=xxxxx, but it seems that it uploads only the first chunk of the file and then stops, and I'm not getting any error or response.
Did I miss anything here?
When you use await, the result in your case is not a promise but stream. So there's no sense in adding old-style .then and .catch to not-promise essense.
Try the following.
try {
const stream = await axios.get('https://cdn.mysite.com/source.mp4', {
responseType: 'stream'
});
stream.pipe(fs.createWriteStream('/tmp/video.mp4'));
stream.on('error', (err) => console.log({ err }));
stream.on('close', async () => {
try {
const form = new FormData();
form.append('file', fs.createReadStream('/tmp/video.mp4'));
// Post the file to Dailymotion API.
const postThis = await axios.post('https://upload-xx.xxx.dailymotion.com/upload?uuid=xxxxx&seal=xxxxx&extra=xxxxx', form, {
headers: {
...form.getHeaders,
'Content-Type': 'multipart/formdata',
'Content-Length': fs.statSync('/tmp/video.mp4').size
},
});
console.log({ postThis })
} catch (err) { console.log({ err }) }
})
} catch (err) { console.log({ err }) }
I am using node's module fs.
When I run the following code:
return new Promise((resolve, reject) => {
if (!fs.existsSync(`./${imageDescription}`)) {
axios.get(imageUrl).then((images) => {
fs.writeFile(`./${imageDescription}`, images.data['hits'][0]['largeImageURL'], function (err, data) {
if (err) {
return console.log(err);
}
resolve(data);
});
});
} else {
fs.readFile(`./${imageDescription}`, (err, data) => {
if (err) {
console.error(err);
return;
}
resolve(data);
});
}
});
Only the image url which starts with https:// and ends on .png is saved in a file, but I want the image itself to be saved there.
Below you can find a piece of code that fetchs an image from a url and saves that with a random name and its extension.
const fs = require('fs');
const { randomUUID } = require('crypto');
const axios = require('axios');
const imgUrl = 'https://i.stack.imgur.com/ILTQq.png';
const randomId = randomUUID();
const fileExtension = imgUrl.split('.').pop();
axios
.get(imgUrl, { responseType: 'stream' })
.then((response) => {
response.data.pipe(fs.createWriteStream(`${randomId}.${fileExtension}`));
})
.catch((error) => {
console.log(error);
});
As the title says, how do you make a direct download link with a file from mongoDB(GridFsBucket) using express?
The file should be downloadable from memory, as i dont want to save it temporarily on the server.
I have this method:
async function downloadFileFromDB(fileId) {
var gridfsbucket = new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
chunkSizeBytes: 1024,
bucketName: 'filesBucket'
});
try {
const stream = gridfsbucket.openDownloadStream(fileId)
const fileBuffer = Buffer.from(stream)
return fileBuffer
} catch (err) {
stream.on('error', () => {
console.log("Some error occurred in download:" + error);
})
console.log(err);
}
}
And this route:
router.get('/download-file', async (req,res) => {
const fileId = req.query.fileId
const ObjectFileId = new ObjectId(fileId)
const fileBuffer = await fileFacade.downloadFileFromDB(ObjectFileId)
res.download(fileBuffer)
})
But res.download wants a path and not a buffer. Aswell im not sure i can make a buffer directly from the openDownloadStream method.
Can anyone help?
I believe you need to write the data to your res object. I accomplished this like:
const readStream = gridfs.openDownloadStreamByName(filename);
readStream.on("data", (chunk) => {
res.write(chunk);
});
readStream.on("end", () => {
res.status(200).end();
mongoClient.close();
});
readStream.on("error", (err) => {
console.log(err);
res.status(500).send(err);
});
So, you may just have to do:
res.write(fileBuffer).end();
//// Instead of doing:
// res.download(fileBuffer);
Hello I am taking JSON passing it through zlib for compressing, storing it as a buffer in a flat file database, and then reading it and sending it out.
Except my problem is the data is all sorts of crazy characters. I've tried .toString() Ive tried the official StringDecoder from Node. I've tried a lot of things but I can't seem to get it in any format other then the .toJSON readable as a JSON buffer when I actually need the outputted final JSON.
Ideas?
WRITING TO FLAT FILE DB
export const writeCollection = (index, timespan, data) => {
zlib.gzip(JSON.stringify(data), (err, result) => {
if (err) {
console.log({ err });
} else {
const keyName = dbCollections.gzip[index].add({
result
});
collectionKeys.gzip[index] = keyName;
writeLogging(timespan, keyName, index, "gzip");
}
});
zlib.brotliCompress(JSON.stringify(data), (err, result) => {
if (err) {
console.log({ err });
} else {
const keyName = dbCollections.brotli[index].add({
result
});
collectionKeys.brotli[index] = keyName;
writeLogging(timespan, keyName, index, "brotli");
}
});
};
READING FROM FLAT FILE DB
export const readCollection = (index, encoding) => {
const encodedRead = encoding.includes("br")
? dbCollections.brotli[index].all()
: dbCollections.gzip[index].all();
return encodedRead[0].result;
};
TRYING TO CONVERT TO JSON
export const testGetQuakeData = (req, res) => {
const encoding = req.headers["accept-encoding"];
try {
const data = readCollection(0, encoding);
console.log(data)
const json = decoder.write(Buffer.from(data));
console.log(json)
// res.set({
// 'Content-Type': 'application/json',
// 'Content-Encoding': encoding.includes('br') ? 'br' : "gzip",
// })
res.send(json)
} catch (err) {
console.log({ err });
res.status(500).send(err);
}
};
FORGOT TO UNZIP WITH ZLIB!
export const testGetQuakeData = (req, res) => {
const encoding = req.headers["accept-encoding"];
try {
const data = readCollection(0, encoding);
encoding.includes("br")
? zlib.brotliDecompress(data, (err, result) => {
err ? res.status(500).send(err) : res.send(decoder.write(result));
})
: zlib.unzip(data, (err, result) => {
err ? res.status(500).send(err) : res.send(decoder.write(result));
});
} catch (err) {
console.log({ err });
res.status(500).send(err);
}
};
HERES THE FUTURE PROD FUNCTION!
export const testGetCompressedQuakeData = (req, res) => {
const encoding = req.headers["accept-encoding"];
try {
const data = readCollection(0, encoding);
encoding.includes("br")
? res.writeHead(200, {
"Content-Type": "application/json",
"Content-Encoding": "br",
"Content-Length": data.length,
})
: res.writeHead(200, {
"Content-Type": "application/json",
"Content-Encoding": "gzip",
"Content-Length": data.length,
})
res.end(data)
} catch (err) {
console.log({ err });
res.status(500).send(err);
}
};
Content Length 6319 (Brotli) vs 63148 (JSON)
Brotli Compression for the win!
Hi guys i'm trying to download a pdf file and save it on my disk. The API send me a string. But the following code not working.
axios.get('https://myapi.com/download', config).then((res) => {
var buff = Buffer.from(res.data, 'binary');
fs.writeFile('file.pdf', buff, function (err) {
if (err) throw err;
console.log('Saved!');
});
}).catch((e) => {
console.log(e);
})
I've tried it, and working ...
fs.readFile('./download.pdf','binary', function (err, data) {
var str = data.toString();
var buff = Buffer.from(str, 'binary');
fs.writeFile('novopdf.pdf',buff, () => {
console.log('ok');
})
});
You need to config axios get request as follows
const response = await Axios({
method: 'GET',
url: url,
responseType: 'stream'
})
response.data.pipe(Fs.createWriteStream(path)) // path is location where you want to write the file.
Then check for end event on the response object.