node javascript file upload doesn't work on remote server - node.js

On my local dev machine accessing localhost the following code works beautifully even with network settings changed to "Slow 3G." However, when running on my VPS, it fails to process the file on the server. Here are two different codes blocks I tried (again, both work without issue on local dev machine accessing localhost)
profilePicUpload: async (parent, args) => {
const file = await args.file;
const fileName = `user-${nanoid(3)}.jpg`;
const tmpFilePath = path.join(__dirname, `../../tmp/${fileName}`);
file
.createReadStream()
.pipe(createWriteStream(tmpFilePath))
.on('finish', () => {
jimp
.read(`tmp/${fileName}`)
.then(image => {
image.cover(300, 300).quality(60);
image.writeAsync(`static/uploads/users/${fileName}`, jimp.AUTO);
})
.catch(error => {
throw new Error(error);
});
});
}
It seems like this code block doesn't wait long enough for the file upload to finish since if I check the storage location on the VPS, I see this:
I also tried the following with no luck:
profilePicUpload: async (parent, args) => {
const { createReadStream } = await args.file;
let data = '';
const fileStream = await createReadStream();
fileStream.setEncoding('binary');
// UPDATE: 11-2
let i = 0;
fileStream.on('data', chunk => {
console.log(i);
i++;
data += chunk;
});
fileStream.on('error', err => {
console.log(err);
});
// END UPDATE
fileStream.on('end', () => {
const file = Buffer.from(data, 'binary');
jimp
.read(file)
.then(image => {
image.cover(300, 300).quality(60);
image.writeAsync(`static/uploads/users/${fileName}`, jimp.AUTO);
})
.catch(error => {
throw new Error(error);
});
});
}
With this code, I don't even get a partial file.
jimp is a JS library for image manipulation.
If anyone has any hints to get this working properly, I'd appreciate it very much. Please let me know if I'm missing some info.

I was able to figure out a solution by referring to this article: https://nodesource.com/blog/understanding-streams-in-nodejs/
Here is my final, working code:
const { createWriteStream, unlink } = require('fs');
const path = require('path');
const { once } = require('events');
const { promisify } = require('util');
const stream = require('stream');
const jimp = require('jimp');
profilePicUpload: async (parent, args) => {
// have to wait while file is uploaded
const { createReadStream } = await args.file;
const fileStream = createReadStream();
const fileName = `user-${args.uid}-${nanoid(3)}.jpg`;
const tmpFilePath = path.join(__dirname, `../../tmp/${fileName}`);
const tmpFileStream = createWriteStream(tmpFilePath, {
encoding: 'binary'
});
const finished = promisify(stream.finished);
fileStream.setEncoding('binary');
// apparently async iterators is the way to go
for await (const chunk of fileStream) {
if (!tmpFileStream.write(chunk)) {
await once(tmpFileStream, 'drain');
}
}
tmpFileStream.end(() => {
jimp
.read(`tmp/${fileName}`)
.then(image => {
image.cover(300, 300).quality(60);
image.writeAsync(`static/uploads/users/${fileName}`, jimp.AUTO);
})
.then(() => {
unlink(tmpFilePath, error => {
console.log(error);
});
})
.catch(error => {
console.log(error);
});
});
await finished(tmpFileStream);
}

Related

Node PubSub: publish many messages without batching

I have a GCF that publishes messages from a new-line-delimited JSON from GCS:
const { PubSub } = require('#google-cloud/pubsub')
const { Storage } = require('#google-cloud/storage')
const readline = require('readline')
const pubSubClient = new PubSub()
const publish = async (topic, rowData) => {
const dataBuffer = Buffer.from(JSON.stringify(JSON.parse(rowData)))
try {
await topic.publishMessage({data: dataBuffer})
} catch (error) {
console.error(`Received error while publishing: ${error.message}`)
}
}
exports['gcs-to-pubsub'] = async fileRef => {
console.log(`processing ${fileRef.name}...`)
const storage = new Storage()
const myBucket = storage.bucket(fileRef.bucket)
const file = myBucket.file(fileRef.name)
const topicName = fileRef.name.split('.')[0]
console.log(`publishing to ${topicName}`)
const topicKey = `projects/${process.env.GCP_PROJECT}/topics/${topicName}`
const topic = await pubSubClient.topic(topicKey)
return await new Promise(resolve => {
const stream = file
.createReadStream()
.on('error', error => {
throw Error(error)
})
.on('end', () => {
console.log(`Parsed all rows`)
file.delete()
console.log('file deleted')
resolve()
})
const rl = readline.createInterface({
input: stream,
crlfDelay: Infinity
});
rl.on('line', (line) => {
publish(topic, line)
})
})
}
The problem is that when a file contains over 100k rows, the publishing part starts throwing errors:
Received error while publishing: Total timeout of API google.pubsub.v1.Publisher exceeded 60000 milliseconds before any response was received.
I see where the problem is, but I'm not sure how to solve it.

upload image array to firebase react native

it is uploding to firebase cloud stroge but not return downloade url to array
i want to returen ore set download url to arry
How can i solve this problem
const uploadImage = () => {
var promises = uploadUrl.map(async (image, index) => {
let filename = image.substring(image.lastIndexOf('/') + 1);
const task = storage().ref(`complaintPhotos/${filename}`).putFile(image);
// promises.push(task);
task.on('state_changed', taskSnapshot => {
console.log(
`${taskSnapshot.bytesTransferred} transferred out of ${taskSnapshot.totalBytes}`,
);
});
try {
await task;
await storage()
.ref(`complaintPhotos/${filename}`)
.getDownloadURL()
.then(url => {
setUploadUri(prevState => [...prevState, url]);
});
} catch (e) {
console.log(e);
}
});
};

How to Process Uploaded Image with Graphql Apollo with SharpJS in NodeJS?

I have a graphql mutation that gets an image from the frontend, and that then is processed and optimized on my server.
But I can't figure out how to pass my image to sharp.
Here is my code:
const Mutation = {
createImage: async (_, { data }) => {
const { file } = data
const image = await file
console.log(image)
const sharpImage = sharp(image)
}
}
I know the code doesn't work and sharp throws an error saying that the input is invalid. So how can I work with createReadStream and to create an instance of sharp?
When I console.log(image), here is what I see:
image {
filename: 'image.png',
mimetype: 'image/png',
encoding: '7bit',
createReadStream: [Function: createReadStream]
}
Thanks a lot in advance!
So I figured out the solution to my question.
First, I found out that I needed to add scalar Upload to typeDefs.
Then, I need to add a resolver for Upload like this:
const { GraphQLUpload } = require('graphql-upload');
const server = new ApolloServer({
resolvers: {
Upload: GraphQLUpload,
}
})
Then in my resolver, here is what I had to do:
// this is a utility function to promisify the stream and store the image in a buffer, which then is passed to sharp
const streamToString = (stream) => {
const chunks = [];
return new Promise((resolve, reject) => {
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('error', (err) => reject(err));
stream.on('end', () => resolve(Buffer.concat(chunks)));
})
}
const Mutation = {
createImage: async (_, { data }) => {
const { file } = data
const { createReadStream } = await file
const image = await streamToString(createReadStream())
const sharpImage = sharp(image)
}
}

PDF download fails using PDFMake in NodeJS

I added to my NodeJS API an endpoint to be able to generate and download a PDF using the library PDFMake. I'm able to generate and upload the PDF on the server-side but on browser site, the PDF is downloaded and failed with 0 KBytes on it and I stack trying to find a solution for it.
I understood that the file is downloaded before the writeStream finish to write it but adding an extra function as like:
pdf.on("finish", async () => {
res.download(pdf);
res.send("PDF generated");
});
This didn't help but added an extra error of:
UnhandledPromiseRejectionWarning: RangeError [ERR_HTTP_INVALID_STATUS_CODE]: Invalid status code: undefined
I have no idea how to solve this and hope for help. Most probably is something I'm missing.
The code responsible for the functionalities are:
Router
router.get("/pdf/all", async (req, res) => {
const movies = await movie.getMovies();
try {
const pdf = await generatePdf(movies, "all");
pdf.on("finish", async () => {
res.download(pdf);
res.send("PDF generated");
});
} catch (err) {
// Errors
res.status(err.status).json({ message: err.message });
throw new Error(error.message);
}
});
generatePdf
const generatePdf = (movies, name) => {
return new Promise((resolve, reject) => {
try {
let fonts = {
Roboto: {
normal: "Helvetica",
bold: "Helvetica-Bold",
italics: "Helvetica-Oblique",
bolditalics: "Helvetica-BoldOblique"
}
};
let printer = new pdfMaker(fonts);
const pdfTemplate = template(movies);
const pdfStream = printer.createPdfKitDocument(pdfTemplate, {});
const filePath = path.join(
__dirname,
uploads + "/" + pdfDir + `${name}.pdf`
);
console.log(filePath);
pdfStream.pipe(writeStream(filePath));
pdfStream.end();
resolve(filePath);
} catch (err) {
console.log(err);
reject(err);
}
});
};
I would suggest you try the following code for generatePdf:
const generatePdf = (movies, name) => {
return new Promise((resolve, reject) => {
try {
let fonts = {
Roboto: {
normal: "Helvetica",
bold: "Helvetica-Bold",
italics: "Helvetica-Oblique",
bolditalics: "Helvetica-BoldOblique"
}
};
let printer = new pdfMaker(fonts);
const pdfTemplate = template(movies);
const pdfStream = printer.createPdfKitDocument(pdfTemplate, {});
const filePath = path.join(
__dirname,
uploads + "/" + pdfDir + `${name}.pdf`
);
console.log(filePath);
let stream = pdfStream.pipe(writeStream(filePath));
stream.on('finish', function(){
pdfStream.end();
resolve(filePath);
}
} catch (err) {
console.log(err);
reject(err);
}
});
};
The code should wait for the pdfStream.pipe to finish before finishing the file and resolving the filePath
Also you cannot use res.send after res.download as the download sets the response to be the file and you can no longer send another response to the client.

Using the build-in streamWriter from the xmlbuilder package in nodejs

I want to create XML files and for this I am using the package xmlbuilder.
The initial code works fine, which looks like following:
const xmlStructure = {
// xml structure
};
const root = builder.create(xmlStructure, { encoding: 'utf-8' });
const xml = root.end({ pretty: true });
const readableStream = new Readable();
const writableStream = file.createWriteStream();
writableStream
.on('error', error => {
throw error;
})
.on('finish', async () => {
// do something in the DB
});
readableStream._read = () => {
readableStream.push(xml);
readableStream.push(null);
};
readableStream.pipe(writableStream);
but I saw in the wiki of the package, that there is a build in streamwriter.
I would like to add this to the project and remove some of the custom things I did in the code.
I tried the following approach:
const xmlStructure = {
// xml structure
};
const writableStream = file.createWriteStream();
const writer = xmlbuilder.streamWriter(writableStream);
writableStream
.on('error', error => {
throw error;
})
.on('finish', async () => {
// do something in the DB
});
const root = builder.create(xmlStructure, { encoding: 'utf-8' });
const xml = root.end(writer);
const readableStream = new Readable();
readableStream.pipe(writableStream);
Unfortunatly this didn't work, what am I doing wrong or forgetting?
The missing line of code was the following:
writableStream.end();
So the end result looks like this:
const xmlStructure = {
// xml structure
};
const root = builder.create(xmlStructure, { encoding: 'utf-8' });
const writableStream = file.createWriteStream();
const writer = xmlbuilder.streamWriter(writableStream);
const xml = root.end(writer);
writableStream.end();
writableStream
.on('error', error => {
throw error;
})
.on('finish', async () => {
// do something in the DB
});

Resources