I can't understand why this function exit before image is downloaded and saved.
I need to wait image will be saved before exiting.
function downloadImagefromRemote(url_immagine, filename) {
console.log('[2] Salvo l\'immagine remota in locale');
const downloadFile = async (fileUrl, downloadFolder, filename) => {
// Get the file name
const fileName = path.basename(fileUrl);
// The path of the downloaded file on our machine
const localFilePath = path.resolve(__dirname, downloadFolder, filename);
try {
const response = await axios({ // <----- here jump out
method: "GET",
url: fileUrl,
responseType: "stream",
});
await response.data.pipe(fs.createWriteStream(localFilePath));
console.log("Successfully downloaded file!");
} catch (err) {
throw new Error(err);
}
};
const IMAGE_URL = 'https://www.kindacode.com/wp-content/uploads/2021/01/test.jpg';
let rel_path = __dirname + '/../../public/images/';
downloadFile(IMAGE_URL, rel_path, filename);
}
The main problem is that using
axios({...})
you need to specify
url: imgUrl
More generally speaking:
avoid unnecessary nested functions
clean your unused and unnecessary params
Here a functioning example:
'use strict'
const fs = require('fs')
const path = require('path')
const axios = require('axios')
async function downloadImage(imgUrl, saveRelPath, saveName) {
const _path = path.resolve(__dirname, saveRelPath, saveName)
const writer = fs.createWriteStream(_path)
const response = await axios({
url: imgUrl,
method: 'GET',
responseType: 'stream',
})
response.data.pipe(writer)
return new Promise((resolve, reject) => {
writer.on('finish', resolve)
writer.on('error', reject)
})
}
async function anotherFunction() {
const IMG_URL = 'https://www.kindacode.com/wp-content/uploads/2021/01/test.jpg'
const REL_PATH = '.'
const NAME = 'test.jpg'
console.log('Get and save image')
await downloadImage(IMG_URL, REL_PATH, NAME)
console.log('Image saving done, other stuff here')
}
anotherFunction()
EDIT: as response to
I can't understand why this function exit before image is downloaded and saved
the stream piping doesn't return a promise, so you can't await it.
You pipe a stream into a write stream that is an EventEmitter, so you should use a listener.
You can try that, it should work. for the pipeline() explaination, see this answer Promise completed before file is written. I promisified your flow to be able to resolve as soon as the stream process ends (note that i don t know about the other parts of your code, like if axios does not return a stream it will not work)
const { pipeline } = require('stream')
async function downloadImagefromRemote(url_immagine, filename) {
console.log('[2] Salvo l\'immagine remota in locale');
const downloadFile = (fileUrl, downloadFolder, filename) => {
return new Promise(async(resolve, reject) => {
// Get the file name
const fileName = path.basename(fileUrl);
// The path of the downloaded file on our machine
const localFilePath = path.resolve(__dirname, downloadFolder, filename);
try {
const response = await axios({ // <----- here jump out
method: "GET",
url: fileUrl,
responseType: "stream",
});
pipeline(
response.data,
fs.createWriteStream(localFilePath),
e => {
if(e) reject(e)
else resolve("Successfully downloaded file!")
}
)
} catch (err) {
reject(Error(err))
}
})
};
const IMAGE_URL = 'https://www.kindacode.com/wp-content/uploads/2021/01/test.jpg';
let rel_path = __dirname + '/../../public/images/';
try{
const confirmDownload = await downloadFile(IMAGE_URL, rel_path, filename);
console.log(confirmDownload)
} catch(e) {
console.log('catched err:', e)
}
}
Related
I need to download a file from a private gitlab server and I need the method to be synchronous. This was by previous async code and it works fine because I was using promises. But I'm having trouble converting it to synchronous. The other posts i've seen on SO either ended up using async code or didn't have options for headers.
const https = require('https');
const fs = require('fs');
const gitlabUrl = 'https://gitlab.custom.private.com';
const gitlabAcessToken = 'xmyPrivateTokenx';
const gLfilePath = '/api/v4/projects/1234/repository/files/FolderOne%2Ftest.txt/raw?ref=main';
const gLfileName='test.txt';
function downloadFileFromGitlab(filePath, fileName) {
return new Promise((resolve, reject) => {
var options = {
path: filePath,
headers: {
'PRIVATE-TOKEN': gitlabAccessToken
}
};
var url = gitlabUrl
var file = fs.createWriteStream(fileName);
const request = https.get(url, options, (response) => {
response.pipe(file);
file.on('finish', () => {
file.close();
resolve();
});
file.on('error', (err) => {
file.close();
reject(err);
});
});
request.on('error', error => {
throw console.error(error);
});
});
}
downloadFileFromGitlab(gLfilePath,gLfileName);
I was able to figure it out using curl
function downloadFileFromGitlab(filePath, fileName) {
let curlCommand = "curl -s " + gitlabUrl + filePath + " -H 'PRIVATE-TOKEN:" + gitlabAccessToken +"'";
let file = child_process.execSync(curlCommand);
fse.writeFileSync(fileName,file);
}
I have built a AWS Lambda function with custom container image. I am trying to convert an excel file to pdf with Libreoffice - getting the file from S3 and saving it to a file and converting it to pdf and then uploading it back to S3.
Here the code.
const fs = require('fs');
const getStream = require('get-stream');
const { Readable } = require('stream')
const { S3Client, GetObjectCommand, PutObjectCommand } = require("#aws-sdk/client-s3");
const libre = require('libreoffice-convert');
const path = require('path');
exports.handler = async (event) => {
const bucket = event.queryStringParameters.bucket;
const file = event.queryStringParameters.file;
const convertedFile = event.queryStringParameters.convertedFile;
if (event.queryStringParameters['warmup'] !== undefined) {
return {
result: true,
message: 'warmed up'
}
}
const client = new S3Client({ region: "ap-south-1" });
const command = new GetObjectCommand({ Bucket: bucket, Key: file });
const response = await client.send(command);
const objectData = response.Body;
const writeStream = fs.createWriteStream("/tmp/sample.xlsx");
objectData.pipe(writeStream);
var end = new Promise((resolve, reject) => {
objectData.on('close', resolve(true));
objectData.on('end', resolve(true));
objectData.on('error', reject(false));
});
let completed = await end;
if (completed) {
const extend = '.pdf'
const outputPath = `/tmp/sample${extend}`;
const enterPath = '/tmp/sample.xlsx';
var readingFile = new Promise((resolve, reject) => {
fs.readFile(enterPath, (err, data)=>{
if (err) {
reject(false);
}
resolve(data);
});
});
var fileData = await readingFile;
var converting = new Promise((resolve, reject) => {
libre.convert(fileData, extend, undefined, (err, done) => {
if (err) {
reject(false)
}
fs.writeFileSync(outputPath, done);
resolve(true)
});
})
var converted = await converting;
if (converted) {
var convertedFileStream = fs.createReadStream(outputPath);
const uploadCommand = new PutObjectCommand({ Bucket: bucket, Key: convertedFile, Body: convertedFileStream });
const lastResponse = await client.send(uploadCommand);
const returnResponse = {
result: true,
message: 'success',
bucket: event.queryStringParameters.bucket,
file: event.queryStringParameters.file,
convertedFile: event.queryStringParameters.convertedFile
};
if (event.queryStringParameters['returnEvent'] !== undefined) {
returnResponse['returnEvent'] = event;
}
return returnResponse;
}
}
return completed;
};
However, I am getting this error at time. Sometimes, it is success, but, sometimes it throws this error.
{
"errorType": "Error",
"errorMessage": "false",
"stack": [
"Error: false",
" at _homogeneousError (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:56:16)",
" at postError (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:72:34)",
" at done (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:99:13)",
" at fail (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:113:13)",
" at /function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:148:24",
" at processTicksAndRejections (internal/process/task_queues.js:97:5)"
]
}
I dont know Nodejs on a great deal so I think if the code is not written the correct way. Any ideas what I am doing wrong here ?
Like #hoangdv when I logged errors I came to know that the file saving to the disk was not correct. So, I changed the area of the code where it saves to like this and then it worked.
const client = new S3Client({ region: "ap-south-1" });
const command = new GetObjectCommand({ Bucket: bucket, Key: file });
const { Body } = await client.send(command);
await new Promise((resolve, reject) => {
Body.pipe(fs.createWriteStream(filePath))
.on('error', err => reject(err))
.on('close', () => resolve())
})
const excelFile = fs.readFileSync(filePath);
I have a function which is getting data from url as stream and then I am using pipe to write this data in file using fs.createWriteStream. I want when file is created then it should return something. My current implementation is not waiting for file creation and returns immediately. Here is the snippet
const fs = require('fs');
const path = require('path');
const axios = require("axios");
const FileUploadPath = "./uploads/";
const downloadFile = async (url, filePath) => {
const writer = fs.createWriteStream(filePath)
const response = await axios({
url,
method: 'GET',
responseType: 'stream'
})
response.data.pipe(writer)
return new Promise((resolve, reject) => {
writer.on('finish', async function (err) {
console.log(err);
console.log("file downloaded")
resolve(filePath)
}
)
writer.on('error', reject(null))
});
}
const file_resolvers = {
Query: {
async file_download(parent, args, context, info) {
const fileUrl = // some file url
var filePath = // path where downloaded file will be saved
console.log("Downloading File " + fileUrl);
const result = await downloadFile(fileUrl, filePath);
console.log("Result received: " + result);
},
},
};
module.exports = {
file_resolvers,
}
I am getting "file downloaded" message after "Result received: null". Which means it does not wait for "finish" to get complete and returns with null immediately.
I am aware that similar questions have been asked before. Still I am not able to solve the problem i have. I need to load a bunch of images before executing another part of code.
(async () => {
const urls = <array of urls>
await urls.map(url => {
const filename = path.basename(url);
const localPath = imagesPath + '/' + filename;
return loadImageToPath(url, localPath);
});
console.log('done');
})();
async function loadImageToPath(url, localPath) {
const file = fs.createWriteStream(localPath);
return await http.get(url, function (response) {
console.log('Image loaded: ' + localPath);
response.pipe(file);
});
}
Can someone please share some light on this!
thanks a lot
Map is returning an array of promises, to wait all promises to resolve use Promise.all(), MDN reference link
(async () => {
const urls = <array of urls>
const promises = await urls.map(url => {
const filename = path.basename(url);
const localPath = imagesPath + '/' + filename;
return loadImageToPath(url, localPath);
});
const responses = await Promise.all(promises) // this line waits all promises to resolve
console.log('done');
})();
I made some changes to the code and it is working now. I thought that http is returning a promise by itself. With a wrapper that returns a promise it is working now.
(async () => {
const urls = <array of urls>
await urls.map(url => {
const filename = path.basename(url);
const localPath = imagesPath + '/' + filename;
return loadImageToPath(url, localPath);
});
console.log('done');
})();
async function loadImageToPath(url, localPath) {
const file = fs.createWriteStream(localPath);
return new Promise((resolve, reject) => {
http.get(url, function (response) {
console.log('Image loaded: ' + localPath);
response.pipe(file);
resolve();
});
});
}
I'm using fs and phantomJS
const phantom = require('phantom');
const fs = require('fs');
I have 4 routes (urls) that get opened from phantom JS. When opened, the page content is read and then node.fs will write out that content into it's own html files.
const routes = [
'about',
'home',
'todo',
'lazy',
]
Question:
How do I loop over this async function for every value in const routes in parallel.
(async function() {
const instance = await phantom.create();
const page = await instance.createPage();
const status = await page.open(`http://localhost:3000/${routes}`);
const content = await page.property('content');
await fsPromise(`${routes}.html`, content);
await instance.exit();
}());
const fsPromise = (file, str) => {
return new Promise((resolve, reject) => {
fs.writeFile(file, str, function (err) {
if (err) return reject(err);
resolve(`${routes} > ${routes}.html`);
});
})
};
It took me a while to get this actually up and running in an environment that supports await and async. It turns out Node v7.5.0 supports them - way simpler than fighting with babel! The only other thorn in this investigation was that request-promise, which I was using to test, doesn't seem to fail gracefully when the promise isn't built properly. I saw a lot of errors like this when I tried to use await with it:
return await request.get(options).map(json => json.full_name + ' ' + json.stargazers_count);
^^^^^^^
SyntaxError: Unexpected identifier
In the end though, I realized that your promise function doesn't actually use async/await (which is why mine errored), so the premise should be the same. Here's the test that I got working — it's very similar to yours. The key is in the synchronous for() iteration:
var request = require('request-promise')
var headers = { 'User-Agent': 'YOUR_GITHUB_USERID' }
var repos = [
'brandonscript/usergrid-nodejs',
'facebook/react',
'moment/moment',
'nodejs/node',
'lodash/lodash'
]
function requestPromise(options) {
return new Promise((resolve, reject) => {
request.get(options).then(json => resolve(json.full_name + ' ' + json.stargazers_count))
})
}
(async function() {
for (let repo of repos) {
let options = {
url: 'https://api.github.com/repos/' + repo,
headers: headers,
qs: {}, // or you can put client_id / client secret here
json: true
};
let info = await requestPromise(options)
console.log(info)
}
})()
And while I can't test it, I'm pretty sure this will work:
const routes = [
'about',
'home',
'todo',
'lazy',
]
(async function() {
for (let route of routes) {
const instance = await phantom.create();
const page = await instance.createPage();
const status = await page.open(`http://localhost:3000/${route}`);
const content = await page.property('content');
await fsPromise(`${route}.html`, content);
await instance.exit();
}
}())
Since you're using ES7 syntax, you should also be able to get the fsPromise() function to perform without declaring a promise:
async const fsPromise = (file, str) => {
return await fs.writeFile(file, str)
}