Node PubSub: publish many messages without batching - node.js

I have a GCF that publishes messages from a new-line-delimited JSON from GCS:
const { PubSub } = require('#google-cloud/pubsub')
const { Storage } = require('#google-cloud/storage')
const readline = require('readline')
const pubSubClient = new PubSub()
const publish = async (topic, rowData) => {
const dataBuffer = Buffer.from(JSON.stringify(JSON.parse(rowData)))
try {
await topic.publishMessage({data: dataBuffer})
} catch (error) {
console.error(`Received error while publishing: ${error.message}`)
}
}
exports['gcs-to-pubsub'] = async fileRef => {
console.log(`processing ${fileRef.name}...`)
const storage = new Storage()
const myBucket = storage.bucket(fileRef.bucket)
const file = myBucket.file(fileRef.name)
const topicName = fileRef.name.split('.')[0]
console.log(`publishing to ${topicName}`)
const topicKey = `projects/${process.env.GCP_PROJECT}/topics/${topicName}`
const topic = await pubSubClient.topic(topicKey)
return await new Promise(resolve => {
const stream = file
.createReadStream()
.on('error', error => {
throw Error(error)
})
.on('end', () => {
console.log(`Parsed all rows`)
file.delete()
console.log('file deleted')
resolve()
})
const rl = readline.createInterface({
input: stream,
crlfDelay: Infinity
});
rl.on('line', (line) => {
publish(topic, line)
})
})
}
The problem is that when a file contains over 100k rows, the publishing part starts throwing errors:
Received error while publishing: Total timeout of API google.pubsub.v1.Publisher exceeded 60000 milliseconds before any response was received.
I see where the problem is, but I'm not sure how to solve it.

Related

how to work with response object in nodejs stream, exceljs and worker thread

I am using worker thread and stream at same time in node JS project. At initial I was not able to pass res object through main process to worker thread. I saw many stackoverflow question and solution and wrote a solution which works great. I created a Readable stream in main thread and writable stream in worker thread. while doing this, I have done a huge calculation from more than 10 table and export data which takes nearly 1 minutes for processing.
code:
router.get("/downloadAll", (req, res) => {
new Promise((resolve, reject) => {
const promise = [];
promise.push(Dashboard.DUser());
promise.push(Dashboard.DDUser());
promise.push(Dashboard.DDLUser());
promise.push(Dashboard.Din());
promise.push(Dashboard.Str());
promise.push(Dashboard.R());
promise.push(Dashboard.Q());
Promise.all(promise).catch(err => err)
.then(results => {
const worker = new Worker(`${process.cwd()}/src/route/modules/dashboard/worker.js`, {
workerData: { results }
});
const fileHeaders = [
{
name: "Content-Type",
value: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
}
];
res.setHeader("Content-Disposition", `attachment; filename="Stream.xlsx`);
fileHeaders.forEach(header => res.setHeader(header.name, header.value));
const readStream = new Readable({
read() {}
});
readStream.pipe(res);
worker.on("message", message => {
readStream.push(message);
});
worker.on("exit", code => {
console.log("exit", code);
resolve(true);
//if (code !== 0) reject(new Error(`stopped with ${code} exit code`));
});
});
})
.then(() => res.end())
.catch(err => console.log(err));
});
WORKER THREAD:
const { workerData, parentPort } = require("worker_threads");
const { Writable } = require("stream");
const Excel = require("exceljs");
const writableStream = new Writable();
// writableStream.on("message", () => {});
writableStream._write = (chunk, encoding, next) => {
parentPort.postMessage(chunk);
next();
};
const createWorkbook = () => {
const workbook = new Excel.stream.xlsx.WorkbookWriter({
stream: writableStream, // stream to server response
useStyles: true // not sure about this one, check with it turned off.
});
workbook.title = "Serious";
workbook.creator = "SS";
workbook.created = new Date();
return workbook;
};
const createSheet = workbook => {
workerData.results.forEach((result, index) => {
const worksheet = workbook.addWorksheet(result.title, {
properties: { outlineLevelCol: 1 }
});
worksheet.columns = Object.keys(result.data[0]).map(item => {
return { header: item, key: item };
});
result.data.forEach(row => worksheet.addRow(row).commit);
});
};
const workbook = createWorkbook();
createSheet(workbook);
workbook.commit();
The above code works fine and is fast for small calculation. when I have huge computation it is showing processing for 1 minutes and finish processing and download the xls file. so i updated the code to:
router.get("/downloadAll", (req, res) => {
const worker = new Worker(`${process.cwd()}/src/worker/worker.js`);
const fileHeaders = [
{
name: "Content-Type",
value: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
}
];
const today = new Date();
res.setHeader(
"Content-Disposition",
`attachment; filename=Q-${today.getFullYear()}${String(today.getMonth() + 1).padStart(2, "0")}${String(
today.getDate()
).padStart(2, "0")}.xlsx`
);
fileHeaders.forEach(header => res.setHeader(header.name, header.value));
const readStream = new Readable({
read() {}
});
readStream.pipe(res);
worker.on("message", message => {
readStream.push(message);
});
worker.on("exit", code => {
console.log("exit", code);
res.end();
//if (code !== 0) reject(new Error(`stopped with ${code} exit code`));
});
});
and worker thread code:
const { workerData, parentPort } = require("worker_threads");
const { Writable } = require("stream");
const Excel = require("exceljs");
const { resolve } = require("path");
const db = require(`${process.cwd()}/src/modules/db.module`);
const Dashboard = require(`${process.cwd()}/src/route/modules/dashboard.model`);
const promise = [];
promise.push(Dashboard.DUser());
promise.push(Dashboard.DDUser());
promise.push(Dashboard.DDLUser());
promise.push(Dashboard.Din());
promise.push(Dashboard.Str());
promise.push(Dashboard.R());
promise.push(Dashboard.Q());
Promise.all(promise).catch(err => err)
.then(results => { const writableStream = new Writable();
// writableStream.on("message", () => {});
writableStream._write = (chunk, encoding, next) => {
console.log(chunk.toString());
parentPort.postMessage(chunk);
next();
};
const createWorkbook = () => {
const workbook = new Excel.stream.xlsx.WorkbookWriter({
stream: writableStream, // stream to server response
useStyles: true // not sure about this one, check with it turned off.
});
workbook.creator = "ss";
workbook.created = new Date();
return workbook;
};
const createSheet = workbook => {
results.forEach((result, index) => {
// console.log(result);
const worksheet = workbook.addWorksheet(result.title, {
properties: { outlineLevelCol: 1 }
});
worksheet.columns = Object.keys(result.data[0]).map(item => {
return { header: item, key: item };
});
result.data.forEach(row => worksheet.addRow(row).commit);
});
};
The above code doesnot work correctly. I can get the data from callback from promise but when its downloading its shows 300kb , 200b,1byte and ends to 0 but it does not download.
if I try to insert the promise inside createsheet then i am getting error:
Error [ERR_UNHANDLED_ERROR]: Unhandled error. ({ message: 'queue closed', code: 'QUEUECLOSED', data: undefined })
code:
const createSheet = workbook => {
let promise = [];
/**
* get count of all the user list
*/
promise.push(Dashboard.DDPro());
Promise.all(promise)
.then(results => {
results.forEach((result, index) => {
console.log(result);
const worksheet = workbook.addWorksheet(result.title, {
properties: { outlineLevelCol: 1 }
});
worksheet.columns = Object.keys(result.data[0]).map(item => {
return { header: item, key: item };
});
result.data.forEach(row => worksheet.addRow(row).commit);
});
})
.catch(err => console.log(err));
};
can any body helps me solve the problem.

one of my friend is trying to automate a process in which the bot will post instagram stories as a video from a specific folder

below is the working code in which it can post images but is there any way i can also share videos as instagram story?
the error i get when i try to post video instead of image are:**
error image
PS D:\Softwares\programming\Insta Bot\story> node index.js
18:45:11 - info: Dry Run Activated
18:45:11 - info: Post() called! ======================
18:45:11 - debug: 1 files found in ./images/
18:45:11 - warn: Record file not found, saying yes to D:\Softwares\programming\Insta Bot\story\images\meme.mp4
18:45:11 - debug: Read File Success
18:45:11 - error: undefined
(MAIN CODE)
index.js
const logger = require("./logger.js")
const { random, sleep } = require('./utils')
require('dotenv').config();
const { IgApiClient, IgLoginTwoFactorRequiredError } = require("instagram-private-api");
const ig = new IgApiClient();
const Bluebird = require('bluebird');
const inquirer = require('inquirer');
const { CronJob } = require('cron');
const path = require("path");
const fs = require("fs");
const fsp = fs.promises;
const sharp = require("sharp");
//==================================================================================
const statePath = "./etc/state.conf";
const recordPath = "./etc/usedfiles.jsonl";
const imgFolderPath = "./images/";
const dryrun = true;
const runOnStart = true;
//==================================================================================
(async () => { // FOR AWAIT
// LOGIN TO INSTAGRAM
if (!dryrun) {
await login();
logger.info("Log In Successful");
} else {
logger.info("Dry Run Activated");
}
// SCHEDULER
// logger.silly("I'm a schedule, and I'm running!! :)");
const job = new CronJob('38 43 * * * *', post, null, true); //https://crontab.guru/
if (!runOnStart) logger.info(`Next few posts scheduled for: \n${job.nextDates(3).join("\n")}\n`);
else post();
// MAIN POST COMMAND
async function post() {
logger.info("Post() called! ======================");
let postPromise = fsp.readdir(imgFolderPath)
.then(filenames => {
if (filenames.length < 1) throw new Error(`Folder ${imgFolderPath} is empty...`)
logger.debug(`${filenames.length} files found in ${imgFolderPath}`);
return filenames;
})
.then(filenames => filenames.map(file => path.resolve(imgFolderPath + file)))
.then(filenames => pickUnusedFileFrom(filenames, filenames.length))
.then(filename => {
if (!dryrun) registerFileUsed(filename)
return filename
})
.then(fsp.readFile)
.then(async buffer => {
logger.debug("Read File Success "); //TODO move this to previous then?
return sharp(buffer).jpeg().toBuffer()
.then(file => {
logger.debug("Sharp JPEG Success");
return file
})
})
.then(async file => {
if (!dryrun) {
// await sleep(random(1000, 60000)) //TODO is this necessary?
return ig.publish.story({ file })
.then(fb => logger.info("Posting successful!?"))
}
else return logger.info("Data not sent, dryrun = true")
})
.then(() => logger.info(`Next post scheduled for ${job.nextDates()}\n`))
.catch(logger.error)
}
})();
//=================================================================================
async function login() {
ig.state.generateDevice(process.env.IG_USERNAME);
// ig.state.proxyUrl = process.env.IG_PROXY;
//register callback?
ig.request.end$.subscribe(async () => {
const serialized = await ig.state.serialize();
delete serialized.constants; // this deletes the version info, so you'll always use the version provided by the library
await stateSave(serialized);
});
if (await stateExists()) {
// import state accepts both a string as well as an object
// the string should be a JSON object
const stateObj = await stateLoad();
await ig.state.deserialize(stateObj)
.catch(err => logger.debug("deserialize: " + err));
} else {
let standardLogin = async function() {
// login like normal
await ig.simulate.preLoginFlow();
logger.debug("preLoginFlow finished");
await ig.account.login(process.env.IG_USERNAME, process.env.IG_PASSWORD);
logger.info("Logged in as " + process.env.IG_USERNAME);
process.nextTick(async () => await ig.simulate.postLoginFlow());
logger.debug("postLoginFlow finished");
}
// Perform usual login
// If 2FA is enabled, IgLoginTwoFactorRequiredError will be thrown
return Bluebird.try(standardLogin)
.catch(
IgLoginTwoFactorRequiredError,
async err => {
logger.info("Two Factor Auth Required");
const {username, totp_two_factor_on, two_factor_identifier} = err.response.body.two_factor_info;
// decide which method to use
const verificationMethod = totp_two_factor_on ? '0' : '1'; // default to 1 for SMS
// At this point a code should have been sent
// Get the code
const { code } = await inquirer.prompt([
{
type: 'input',
name: 'code',
message: `Enter code received via ${verificationMethod === '1' ? 'SMS' : 'TOTP'}`,
},
]);
// Use the code to finish the login process
return ig.account.twoFactorLogin({
username,
verificationCode: code,
twoFactorIdentifier: two_factor_identifier,
verificationMethod, // '1' = SMS (default), '0' = TOTP (google auth for example)
trustThisDevice: '1', // Can be omitted as '1' is used by default
});
},
)
.catch(e => logger.error('An error occurred while processing two factor auth', e, e.stack));
}
return
//================================================================================
async function stateSave(data) {
// here you would save it to a file/database etc.
await fsp.mkdir(path.dirname(statePath), { recursive: true }).catch(logger.error);
return fsp.writeFile(statePath, JSON.stringify(data))
// .then(() => logger.info('state saved, daddy-o'))
.catch(err => logger.error("Write error" + err));
}
async function stateExists() {
return fsp.access(statePath, fs.constants.F_OK)
.then(() => {
logger.debug('Can access state info')
return true
})
.catch(() => {
logger.warn('Cannot access state info')
return false
});
}
async function stateLoad() {
// here you would load the data
return fsp.readFile(statePath, 'utf-8')
.then(data => JSON.parse(data))
.then(data => {
logger.info("State load successful");
return data
})
.catch(logger.error)
}
}
async function registerFileUsed( filepath ) {
let data = JSON.stringify({
path: filepath,
time: new Date().toISOString()
}) + '\n';
return fsp.appendFile(recordPath, data, { encoding: 'utf8', flag: 'a+' } )
.then(() => {
logger.debug("Writing filename to record file");
return filepath
})
}
function pickUnusedFileFrom( filenames, iMax = 1000) {
return new Promise((resolve, reject) => {
let checkFileUsed = async function ( filepath ) {
return fsp.readFile(recordPath, 'utf8')
.then(data => data.split('\n'))
.then(arr => arr.filter(Boolean))
.then(arr => arr.map(JSON.parse))
.then(arr => arr.some(entry => entry.path === filepath))
}
let trythis = function( iMax, i = 1) {
let file = random(filenames);
checkFileUsed(file)
.then(async used => {
if (!used) {
logger.info(`Unused file found! ${file}`);
resolve(file);
} else if (i < iMax) {
logger.debug(`Try #${i}: File ${file} used already`);
await sleep(50);
trythis(iMax, ++i)
} else {
reject(`I tried ${iMax} times and all the files I tried were previously used`)
}
})
.catch(err => {
logger.warn("Record file not found, saying yes to " + file);
resolve(file);
})
}( iMax );
})
}

AWS Lambda Custom Nodejs Container Shows Runtime Error

I have built a AWS Lambda function with custom container image. I am trying to convert an excel file to pdf with Libreoffice - getting the file from S3 and saving it to a file and converting it to pdf and then uploading it back to S3.
Here the code.
const fs = require('fs');
const getStream = require('get-stream');
const { Readable } = require('stream')
const { S3Client, GetObjectCommand, PutObjectCommand } = require("#aws-sdk/client-s3");
const libre = require('libreoffice-convert');
const path = require('path');
exports.handler = async (event) => {
const bucket = event.queryStringParameters.bucket;
const file = event.queryStringParameters.file;
const convertedFile = event.queryStringParameters.convertedFile;
if (event.queryStringParameters['warmup'] !== undefined) {
return {
result: true,
message: 'warmed up'
}
}
const client = new S3Client({ region: "ap-south-1" });
const command = new GetObjectCommand({ Bucket: bucket, Key: file });
const response = await client.send(command);
const objectData = response.Body;
const writeStream = fs.createWriteStream("/tmp/sample.xlsx");
objectData.pipe(writeStream);
var end = new Promise((resolve, reject) => {
objectData.on('close', resolve(true));
objectData.on('end', resolve(true));
objectData.on('error', reject(false));
});
let completed = await end;
if (completed) {
const extend = '.pdf'
const outputPath = `/tmp/sample${extend}`;
const enterPath = '/tmp/sample.xlsx';
var readingFile = new Promise((resolve, reject) => {
fs.readFile(enterPath, (err, data)=>{
if (err) {
reject(false);
}
resolve(data);
});
});
var fileData = await readingFile;
var converting = new Promise((resolve, reject) => {
libre.convert(fileData, extend, undefined, (err, done) => {
if (err) {
reject(false)
}
fs.writeFileSync(outputPath, done);
resolve(true)
});
})
var converted = await converting;
if (converted) {
var convertedFileStream = fs.createReadStream(outputPath);
const uploadCommand = new PutObjectCommand({ Bucket: bucket, Key: convertedFile, Body: convertedFileStream });
const lastResponse = await client.send(uploadCommand);
const returnResponse = {
result: true,
message: 'success',
bucket: event.queryStringParameters.bucket,
file: event.queryStringParameters.file,
convertedFile: event.queryStringParameters.convertedFile
};
if (event.queryStringParameters['returnEvent'] !== undefined) {
returnResponse['returnEvent'] = event;
}
return returnResponse;
}
}
return completed;
};
However, I am getting this error at time. Sometimes, it is success, but, sometimes it throws this error.
{
"errorType": "Error",
"errorMessage": "false",
"stack": [
"Error: false",
" at _homogeneousError (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:56:16)",
" at postError (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:72:34)",
" at done (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:99:13)",
" at fail (/function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:113:13)",
" at /function/node_modules/aws-lambda-ric/lib/Runtime/CallbackContext.js:148:24",
" at processTicksAndRejections (internal/process/task_queues.js:97:5)"
]
}
I dont know Nodejs on a great deal so I think if the code is not written the correct way. Any ideas what I am doing wrong here ?
Like #hoangdv when I logged errors I came to know that the file saving to the disk was not correct. So, I changed the area of the code where it saves to like this and then it worked.
const client = new S3Client({ region: "ap-south-1" });
const command = new GetObjectCommand({ Bucket: bucket, Key: file });
const { Body } = await client.send(command);
await new Promise((resolve, reject) => {
Body.pipe(fs.createWriteStream(filePath))
.on('error', err => reject(err))
.on('close', () => resolve())
})
const excelFile = fs.readFileSync(filePath);

Trying to upload zip file to azure storage via node js

Trying to upload file to azure storage blob using nodejs but facing promise issue. below is my code facing issue while uploading zip file to azure blob storage need to convert this code to callback function or .then function
var re = /\.zip/;
fs.readdir("/tmp/", function(err, files) {
if (err) {
console.log( "Could not list the directory.", err)
process.exit( 1 )
}
console.log("files",files)
var matches = files.filter( function(text) { return re.test(text) } )
console.log("These are the files you have", matches)
var numFiles = matches.length
console.log(numFiles);
if ( numFiles ) {
// Read in the file, convert it to base64, store to S3
for(let i = 0; i < numFiles; i++ ) {
uploadFileToBlob(matches[i])
.then((result: any) => {
console.log('result');
console.log(result);
})
.catch((error: any) => {
console.log(error);
});
}
}
})
const uploadFileToBlob = async (file: any) => {
const { AbortController } = require("#azure/abort-controller");
const { AnonymousCredential, BlobServiceClient, newPipeline } = require("#azure/storage-blob");
const account = 'string';
const accountSas = "sastoken";
const localFilePath = file;
const pipeline = newPipeline(new AnonymousCredential(), {
// httpClient: MyHTTPClient, // A customized HTTP client implementing IHttpClient interface
retryOptions: { maxTries: 4 }, // Retry options
userAgentOptions: { userAgentPrefix: "AdvancedSample V1.0.0" }, // Customized telemetry string
keepAliveOptions: {
// Keep alive is enabled by default, disable keep alive by setting false
enable: false
}
});
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net${accountSas}`,
pipeline
);
const containerName = 'quickstart';
const containerClient = blobServiceClient.getContainerClient(containerName);
const blobName = file;
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
console.log("blockBlobClient",blockBlobClient);
try {
const result =await blockBlobClient.uploadFile(localFilePath, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev: any) => console.log(ev)
});
console.log("uploadFile succeeds");
return result;
} catch (err) {
console.log(
`uploadFile failed, requestId - ${err.details.requestId}, statusCode - ${err.statusCode}, errorCode - ${err.details.errorCode}`
);
throw err;
}
}
getting output
Promise { <pending> }
I tried to write code in callback function or .then function but getting same error and same code is working in local machine
Please change the following lines of code:
await blockBlobClient.uploadFile(localFilePath, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("uploadFile succeeds");
to
const result = await blockBlobClient.uploadFile(localFilePath, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("uploadFile succeeds");
return result;
And then call your uploadFileToBlob method like:
uploadFileToBlob('filename.zip')
.then((result) => {
console.log('result');
console.log(result);
})
.catch((error) => {
console.log(error);
});
UPDATE
Here's the complete code I used. I was able to upload the file successfully:
const uploadFileToBlob = async (file) => {
const { AbortController } = require("#azure/abort-controller");
const { AnonymousCredential, BlobServiceClient, newPipeline } = require("#azure/storage-blob");
const account = 'account-name';
const accountSas = '?sv=2020-04-08&ss=b&srt=sco&se=2021-03-08T18%3A30%3A00Z&sp=rwdxftlacup&sig=pT1d1NJQdu3bcnPA37voTCyQ6jKnmNVYgo1wyiYspDc%3D';
const localFilePath = file;
const pipeline = newPipeline(new AnonymousCredential(), {
// httpClient: MyHTTPClient, // A customized HTTP client implementing IHttpClient interface
retryOptions: { maxTries: 4 }, // Retry options
userAgentOptions: { userAgentPrefix: "AdvancedSample V1.0.0" }, // Customized telemetry string
keepAliveOptions: {
// Keep alive is enabled by default, disable keep alive by setting false
enable: false
}
});
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net${accountSas}`,
pipeline
);
const containerName = 'container-name';
const containerClient = blobServiceClient.getContainerClient(containerName);
const blobName = file;
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
console.log("blockBlobClient",blockBlobClient);
try {
const result = await blockBlobClient.uploadFile(localFilePath, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("uploadFile succeeds");
return result;
} catch (err) {
console.log(
`uploadFile failed, requestId - ${err.details.requestId}, statusCode - ${err.statusCode}, errorCode - ${err.details.errorCode}`
);
throw err;
}
}
uploadFileToBlob('filename.zip')
.then((result) => {
console.log('result');
console.log(result);
})
.catch((error) => {
console.log(error);
});

node javascript file upload doesn't work on remote server

On my local dev machine accessing localhost the following code works beautifully even with network settings changed to "Slow 3G." However, when running on my VPS, it fails to process the file on the server. Here are two different codes blocks I tried (again, both work without issue on local dev machine accessing localhost)
profilePicUpload: async (parent, args) => {
const file = await args.file;
const fileName = `user-${nanoid(3)}.jpg`;
const tmpFilePath = path.join(__dirname, `../../tmp/${fileName}`);
file
.createReadStream()
.pipe(createWriteStream(tmpFilePath))
.on('finish', () => {
jimp
.read(`tmp/${fileName}`)
.then(image => {
image.cover(300, 300).quality(60);
image.writeAsync(`static/uploads/users/${fileName}`, jimp.AUTO);
})
.catch(error => {
throw new Error(error);
});
});
}
It seems like this code block doesn't wait long enough for the file upload to finish since if I check the storage location on the VPS, I see this:
I also tried the following with no luck:
profilePicUpload: async (parent, args) => {
const { createReadStream } = await args.file;
let data = '';
const fileStream = await createReadStream();
fileStream.setEncoding('binary');
// UPDATE: 11-2
let i = 0;
fileStream.on('data', chunk => {
console.log(i);
i++;
data += chunk;
});
fileStream.on('error', err => {
console.log(err);
});
// END UPDATE
fileStream.on('end', () => {
const file = Buffer.from(data, 'binary');
jimp
.read(file)
.then(image => {
image.cover(300, 300).quality(60);
image.writeAsync(`static/uploads/users/${fileName}`, jimp.AUTO);
})
.catch(error => {
throw new Error(error);
});
});
}
With this code, I don't even get a partial file.
jimp is a JS library for image manipulation.
If anyone has any hints to get this working properly, I'd appreciate it very much. Please let me know if I'm missing some info.
I was able to figure out a solution by referring to this article: https://nodesource.com/blog/understanding-streams-in-nodejs/
Here is my final, working code:
const { createWriteStream, unlink } = require('fs');
const path = require('path');
const { once } = require('events');
const { promisify } = require('util');
const stream = require('stream');
const jimp = require('jimp');
profilePicUpload: async (parent, args) => {
// have to wait while file is uploaded
const { createReadStream } = await args.file;
const fileStream = createReadStream();
const fileName = `user-${args.uid}-${nanoid(3)}.jpg`;
const tmpFilePath = path.join(__dirname, `../../tmp/${fileName}`);
const tmpFileStream = createWriteStream(tmpFilePath, {
encoding: 'binary'
});
const finished = promisify(stream.finished);
fileStream.setEncoding('binary');
// apparently async iterators is the way to go
for await (const chunk of fileStream) {
if (!tmpFileStream.write(chunk)) {
await once(tmpFileStream, 'drain');
}
}
tmpFileStream.end(() => {
jimp
.read(`tmp/${fileName}`)
.then(image => {
image.cover(300, 300).quality(60);
image.writeAsync(`static/uploads/users/${fileName}`, jimp.AUTO);
})
.then(() => {
unlink(tmpFilePath, error => {
console.log(error);
});
})
.catch(error => {
console.log(error);
});
});
await finished(tmpFileStream);
}

Resources