I am getting this error while uploading file from postman.
(node:13648) [DEP0135] DeprecationWarning: ReadStream.prototype.open() is deprecated
My node version is 15.0. I'm using apollo-server-express. And this is my code
export const processUpload = async (file) => {
const {
createReadStream, mimetype, encoding, filename
} = await file;
const path = `uploads/${uuid()}${filename}`;
const stream = createReadStream();
return new Promise((resolve, reject) => {
stream
.pipe(fs.createWriteStream(path))
.on('finish', () => {
resolve({
success: true,
message: 'Successfully Uploaded',
mimetype,
filename,
encoding,
location: path
});
})
.on('error', (err) => {
console.log('Error Event Emitted', err);
});
});
};
Your node version is high, please add this in package.js file.
"resolutions": {
"**/**/fs-capacitor": "^6.2.0",
"**/graphql-upload": "^11.0.0"
}
Related
I read Pipe a stream to s3.upload()
but im having difficulty with I am not sure if that actually solves and I have tried.
What I am doing is a get call to www.example.com. this returns a stream, I want to upload that stream to s3.
heres my try.
fetch('https://www.example.com',fileName{
method: 'GET',
headers: {
'Authorization': "Bearer " + myAccessToken,
},
})
.then(function(response) {
return response.text();
})
.then(function(data) {
uploadToS3(data)
});
const uploadToS3 = (data) => {
// Setting up S3 upload parameters
const params = {
Bucket:myBucket,
Key: "fileName",
Body: data
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
output: ///File uploaded successfully. https://exampleBucket.s3.amazonaws.com/fileName.pdf
however this is blank.
I figured it out, but i did not keep using fetch.
and I actually download the file, then upload it. then delete the file.
function getNewFilesFromExampleDotCom(myAccessToken, fileName, fileKey) {
let url2 = 'https://example.com' + fileKey;
axios
.get(url2, {
headers: { 'Authorization': "Bearer " + myAccessToken },
responseType: 'stream',
})
.then(response => {
let file = fileName;
response.data.pipe(fs.createWriteStream(file))
let myFileInfo = [];
if( myFileInfo.length > 0){
myFileInfo.splice(0, myFileInfo.length)
}
myFileInfo.push(file)
processArray(myFileInfo)
console.log(file + " saved")
})
.catch(error => console.log(error));
}
async function processArray(array) {
for (const item of array) {
await delayedLog(item);
}
console.log('Downloaded!');
console.log('Uploading to s3!');
}
function delay() {
return new Promise(resolve => setTimeout(resolve, 300));
}
async function delayedLog(item) {
await delay();
uploadFiles(item)
}
async function uploadFiles(file){
uploadToS3List(file)
await new Promise((resolve, reject) => setTimeout(resolve, 1000));
deleteMyFiles(file)
}
const uploadToS3List = (fileName) => {
// Read content from the file
const fileContent = fs.readFileSync(fileName);
// Setting up S3 upload parameters
const params = {
Bucket:"myBucketName",
Key: fileName,
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
function deleteMyFiles(path){
fs.unlink(path, (err) => {
console.log(path + " has been deleted")
if (err) {
console.error(err)
return
}
})
}
I am using node's module fs.
When I run the following code:
return new Promise((resolve, reject) => {
if (!fs.existsSync(`./${imageDescription}`)) {
axios.get(imageUrl).then((images) => {
fs.writeFile(`./${imageDescription}`, images.data['hits'][0]['largeImageURL'], function (err, data) {
if (err) {
return console.log(err);
}
resolve(data);
});
});
} else {
fs.readFile(`./${imageDescription}`, (err, data) => {
if (err) {
console.error(err);
return;
}
resolve(data);
});
}
});
Only the image url which starts with https:// and ends on .png is saved in a file, but I want the image itself to be saved there.
Below you can find a piece of code that fetchs an image from a url and saves that with a random name and its extension.
const fs = require('fs');
const { randomUUID } = require('crypto');
const axios = require('axios');
const imgUrl = 'https://i.stack.imgur.com/ILTQq.png';
const randomId = randomUUID();
const fileExtension = imgUrl.split('.').pop();
axios
.get(imgUrl, { responseType: 'stream' })
.then((response) => {
response.data.pipe(fs.createWriteStream(`${randomId}.${fileExtension}`));
})
.catch((error) => {
console.log(error);
});
I'm working with a legacy project where MP4 files are uploaded one by one from a React app with axios to a NestJS API with busboy on an EC2 instance. Then, the file is uploaded to an S3 bucket.
When the AWS S3 library tries to load the file, an apparent "random" error is raised sometimes:
warn
module: videoservice
method: saveTostream
message: Error on Upload Success callback
{ [Error: ENOENT: no such file or directory, open 'file.mp4'] errno: -2, code: 'ENOENT', syscall: 'open', path: 'file.mp4' }
(node:26886) UnhandledPromiseRejectionWarning: Error: ENOENT: no such file or directory, open 'file.mp4'
(node:26886) UnhandledPromiseRejectionwarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catc h(). (rejection id: 20)
Here are the snippets:
API upload endpoint
#Post('upload/:id')
#ApiOperation({ ... })
#ApiResponse({ status: 201, type: Video })
#ApiConsumes('multipart/form-data')
#ApiImplicitFile({ name: 'file', required: true })
#ApiCreatedResponse({ type: UploadResponseDto })
async upload(
#Decoded() decoded: any,
#Param('id') videoId: number,
#Query('fileSize') fileSize :number,
#Req() req: Request,
#Res() res: Response,
#Body() uploadDto: UploadDto,
): Promise<any> {
try {
const busboy = new Busboy({
headers: req.headers,
});
const data = new Map();
const writtenFiles = [];
let upload;
busboy.on('field', (fieldname, val) => {
data.set(fieldname, val);
});
busboy.on(
'file',
async (fieldname, file, filename, encoding, mimetype) => {
try {
data.set('filename', filename);
data.set('mimetype', mimetype);
data.set('encoding', encoding);
const filepath = path.join(fieldname + path.extname(filename));
upload = filepath;
const writeStream = fs.createWriteStream(filepath);
file.pipe(writeStream);
const promise = new Promise((resolve, reject) => {
file.on('end', () => {
writeStream.end();
});
writeStream.on('finish', resolve);
writeStream.on('error', reject);
});
writtenFiles.push(promise);
} catch (err) {
this.logger.error(log('busboy on file', err.message));
return res.status(HttpStatus.INTERNAL_SERVER_ERROR).send({
statusCode: HttpStatus.INTERNAL_SERVER_ERROR,
message: err.message,
});
}
},
);
busboy.on('error', err => {
this.logger.warn(log('busboy on error', err.message));
});
busboy.on('finish', async () => {
await Promise.all(writtenFiles);
res.status(HttpStatus.CREATED).send('OK');
await this.bucketService.saveStream( // Next snippet
fs.createReadStream(upload),
data.get('filename'),
data.get('mimetype'),
data.get('fileSize'),
+data.get('programId'),
+data.get('videoId')
);
return;
fs.unlinkSync(upload);
});
req.pipe(busboy);
} catch (err) {
this.logger.error(log('catch', err.message));
return res.status(HttpStatus.INTERNAL_SERVER_ERROR).send({
statusCode: HttpStatus.INTERNAL_SERVER_ERROR,
message: err.message,
});
}
}
BucketService saveStream method
public async saveStream(
body: any
filename: string,
mimeType: string,
fileSize: number,
programId: number,
videoId: number
): Promise<any> {
try {
const callback = async (err: any, data: any) => {
if (err) {
this.logger.warn(log('Error on Upload Success callback', err));
throw err; // Here is where the error is raised
}
};
return this.s3
.upload(
this.setParams(body, filename, mimeType, programId, videoId),
(err, data) => callback(err, data),
)
} catch (err) {
this.logger.error(log('Error on S3 Upload', err));
throw err;
}
}
private setParams(
file: any,
filename: string,
mimeType: string,
programId: number,
videoId: number
): any {
return {
...awsBucketConfig,
Key: `${AWS_UPLOAD_DIR}${programId}/${videoId}/${Date.now()}${path.extname(
filename,
)}`,
Body: file,
ContentType: mimeType,
};
}
At some moment I thought perhaps this happens because the name of the temporal file on EC2 is always the same: file.mp4, and when two files are uploaded at the same time, the first one on finishing removes the file (fs.unlinkSync(upload); on the endpoint) leaving the other ongoing process without it, so when tries to upload it, this process won't find it. But it is not true because I performed tests where I ensured the files were uploaded one by one. However, I also ensured the name was always different by changing on the controller:
const filepath = path.join(fieldname + path.extname(filename));
by
const filepath = path.join(Math.floor(Math.random() * 10000) + path.extname(filename));
but the error is still happening. Another weird thing which is happening is that in my machine I can see (ls) the file meanwhile it is uploaded, but in EC2 not.
Facts:
EC2 : t2.xlarge (about free 4GB)
OS : Ubuntu 18
Node version: 10.21.0
Average file size: 2GB
Dependencies
"archiver": "^3.1.1",
"async-busboy": "^0.7.0",
"aws-sdk": "^2.553.0",
"axios": "^0.19.0",
"body-parser": "^1.19.0",
"busboy": "^0.3.1",
I have an API built with NodeJS, in that API there is a process where I download a large file using modules request-promise and then it is made a new buffer uploaded to minio. But I have a problem that my API always crashes if the file is above 80-100MB and its NodeJS Killed on the server, how to handle it?
This function is to download the file and convert it into a buffer :
const convertLink = async link => {
const options = {
uri: link,
encoding: null,
headers: {
'Content-type': 'application/octet-stream'
}
};
const res = rp.get(options)
.then((body) => {
console.log(body)
const a = new Buffer.from(body);
return a;
})
.catch(err => {
console.log(err)
return err;
});
return res;
};
this is function for uploading files to minio from miniosdk :
const streamUpload = async (bucketName, objectName, newBuffer) => {
try {
const isUploaded = await minioClient.putObject(bucketName, objectName, newBuffer);
if (isUploaded) {
return isUploaded;
}
} catch (err) {
return err;
}
};
I think the issue here is you are downloading the file and keeping it in the memory and then uploading it to your minioClient. which is not recommended for large files. you should download that file as a stream and then upload it as a stream too. keeping large files in memory can be the reason to kill your node.js server.
you can try as following example in which I am using request npm library and downloading the file and saving it as a stream to a temporary location and then reading the file from that temporary location and uploading to another URL:-
Downloading file:-
const downloadFile = async (url) => {
try {
let tempLocation = "./temp/";
let fileName="myfile";
return new Promise((resolve, reject) => {
request
.get(url)
.on('response', function (response) {
console.log(response.statusCode) // 200
console.log(response.headers['content-type'])
})
.on('error', function (error) {
console.log('downloading error', error)
reject()
})
.on('end', async function () {
console.log("donwload finished")
resolve();
})
.pipe(fs.createWriteStream(tempLocation + '/' + fileName))
});
} catch (error) {
console.log("error in downloadFile", error)
throw error;
}
}
now you can upload the file to your minioClient as a stream. you can use fs.createReadStream(file) to read the file as a stream data from that temporary location.
Im trying to get a id jpeg that i have stored in firebase storage and upload it to stripe in order to allow my customers to verify their identity. However i get several errors and the function stops
import * as https from 'https'
import * as fs from 'fs'
const stripe = Stripe('sk_test');
function getidFile(url: string):Promise<any>{
return new Promise((resolve,reject) => {
const file = fs.createWriteStream("test.jpg");
https.get(
url,
function(response: IncomingMessage){
console.log('Response ', response);
response.pipe(file);
file.on('finish', function() {
file.close();
resolve(file);
});
}
).on('error', function(err) { // Handle errors
reject(err);
});
});
}
function uploadFileToStripe(fileUrl:string,fileType:string,fileName:string):Promise<any>{
return getidFile(
fileUrl
).then(
(data:any) => {
console.log('File as blob ', data);
const upload: Promise<any> =
stripe.files.create(
{
file: {
data: data,
name: fileName,
type: 'image/jpeg',
},
purpose: 'identity_document',
}
);
return upload.then(
(document: any) => {
console.log('Document upload ', document);
return document;
}
).catch(
(_error: any) => {
console.log('Error uploading document ', _error);
return _error;
}
)
}
).catch(
(_error:any) => {
console.log('Error with response ', _error);
return _error;
}
);
}
Error: EROFS: read-only file system, open 'test.jpg'
and then my function stops.
I don't use URL. Use admin.storage() to access your file.
export const uploadFileToStripe = functions.https.onCall(async(data,context)=>{
const bucket = admin.storage().bucket('xyz.appspot.com');
const path = require('path');
const os = require('os');
const fs = require('fs');
const fileName = data['fileName'];
const tempFilePath = path.join(os.tmpdir(), fileName);
await bucket.file(fileName).download({destination:tempFilePath});
console.log('File created at', tempFilePath);
return await stripe.files.create({
purpose: 'identity_document',
file: {
data: fs.readFileSync(tempFilePath),
name: 'fileUploadDoc.png',
type: 'application/octet-stream',
},
});
});