Delete and recreate WriteStream - node.js

In Node.js how can you delete the file a WriteStream was writing to and then recreate the stream without error?
With this code
clear(){
return new Promise((resolve, reject) => {
this.stream.once('close', () => {
Fs.unlinkSync(this.filepath);
this.stream = Fs.createWriteStream(this.filepath, {flags: 'a'});
resolve();
});
this.stream.end();
});
}
This error occurs. I've tried everything.
Uncaught Error: EPERM: operation not permitted, open 'C:\test\test.log'
The calling code
it('erases the log file', async () => {
const file = new File("C:\test\test.log");
await file.clear();
const exists = Fs.existsSync(testfile);
Assert.strictEqual(exists, false);
});
I can only avoid the error if I remove the this.stream = Fs.createWriteStream(this.filepath, {flags: 'a'}); in clear()

When I try to duplicate your issue here, I only get that error if you are not properly waiting for the clear() method to finish before you attempt to use the stream again.
For example, this code works:
const fs = require('fs');
class MyFile {
constructor(filepath) {
this.filepath = filepath;
this.open();
}
open() {
this.stream = fs.createWriteStream(this.filepath, { flags: 'a' });
this.stream.on('error', err => {
console.log(err);
});
}
log(str) {
this.stream.write(str);
}
clear() {
return new Promise((resolve, reject) => {
this.stream.once('close', () => {
console.log("got clear close event");
try {
fs.unlinkSync(this.filepath);
} catch (e) {
console.log(e);
}
this.open();
resolve();
});
this.stream.end();
});
}
close() {
return new Promise((resolve, reject) => {
this.stream.once('close', () => {
console.log("got final close event");
resolve();
});
this.stream.end();
});
}
}
async function run() {
const f = new MyFile("./temp.xxx");
f.log("hello\n");
f.log("goodbye\n");
await f.clear();
f.log("hello\n");
f.log("goodbye\n");
await f.close();
}
run().then(result => {
console.log("done");
}).catch(err => {
console.log(err);
});
But, if I remove the await in front of await f.clear(), then I get your exact error because the code proceeds trying to use the stream after you've called this.stream.end(), but before the new stream is in place which is an EPERM error.
If this doesn't illustrate for you what your problem is, then you need to show us all the rest of the code that uses this class and calls the clear() method. The problem is likely in that code.

Related

In node.js, why is my data not getting passed back after a asynchronous file read using a Promise

I know for sure that my pullData module is getting the data back from the file read but the function calling it, though it has an await, is not getting the data.
This is the module (./initialise.js) that reads the data:
const fs = require('fs');
const getData = () => {
return new Promise((resolve, reject) => {
fs.readFile('./Sybernika.txt',
{ encoding: 'utf8', flag: 'r' },
function (err, data) {
if (err)
reject(err);
else
resolve(data);
});
});
};
module.exports = {getData};
And this is where it gets called (app.js):
const init = require('./initialise');
const pullData = async () => {
init.getData().then((data) => {
return data;
}).catch((err) => {
console.log(err);
});
};
const start = async() => {
let data = await pullData();
console.log(data);
}
start();
putting 'console.log(data)' just before return(data) in the resolve part of the call shows the data so I know it's being read OK. However, that final console.log shows my data variabkle as being undefined.
Any suggestions?
It's either
const pullData = async () => {
return init.getData().then((data) => {
return data;
}).catch((err) => {
console.log(err);
});
};
or
const pullData = async () =>
init.getData().then((data) => {
return data;
}).catch((err) => {
console.log(err);
});
Both versions make sure a promise returned by then/catch is passed down to the caller.

Graphql "Network Error : Failed to Fetch" when upload images by taking image from direct camera using Oppo

I have a problem when upload images by taking direct from OPPO camera. I tried this from other smartphone is working as expected.
This is my code
const form = new FormData();
function processImages() {
return new Promise((resolve, reject) => {
images.forEach(async (image, index, array) => {
try {
const data = await getBufferFromStreams(image);
form.append('images', data.fileBuffer, data.filename);
if (index === array.length - 1) {
resolve(true);
}
} catch (error) {
reject(error);
}
});
});
}
const success = await processImages();
const response = await this.post(`${url}`, form, {});
export const getBufferFromStreams = async (file) => {
if (!file) {
return new Error('file is undefined');
}
const { stream, filename } = await file;
const chunks = [];
return new Promise((resolve, reject) => {
stream.on('data', (chunk) => {
chunks.push(chunk);
});
stream.on('error', (error) => {
reject(error);
});
stream.on('end', () => {
resolve({ fileBuffer: Buffer.concat(chunks), filename });
});
});
};
I always get error "Network Error: Failed to Fetch" when uploading images when taking picture from camera only on OPPO smartphone.
Do you have encounter same issues and solved ?

How to properly deal with errors while using NodeJS's readline module

I am attempting to read and process a file line by line. I would like to use try / catch async pattern to do this. Below is an example lifted directly from NodeJS docs on how to use readline module.
const { once } = require('events');
const { createReadStream } = require('fs');
const { createInterface } = require('readline');
(async function processLineByLine() {
try {
const rl = createInterface({
input: createReadStream('big-file.txt'),
crlfDelay: Infinity
});
rl.on('line', (line) => {
// Process the line.
});
await once(rl, 'close');
console.log('File processed.');
} catch (err) {
console.error(err);
}
})();
The await once part is throwing me through a loop I think. What I want to do if I encounter an error while parsing the line:
rl.on('line', (line) => {
try {
// Process the line. maybe error from parsing?
JSON.parse(line)
} catch ( error ) {
throw new Error("error while attempting to process json.")
}
});
Is have access to that newly thrown error in the outer try / catch block like:
console.log('File processed.');
} catch (err) {
console.error(err);
// should see "error while attempting to process json."
}
})();
So far the firebase function crashes without ever reaching the outer try / catch block. I've tried adding error event listeners to the readline stream like:
rl.on("error", () => { // throw error here })
with no success.
try/catch only catch synchrone errors. So it won't catch anything from inside rl.on(). Doing await once() just await the stream rl before to execute the console.log('File processed.'); but the try{}catch(e){} has already been executed so any err can't be catch.
rl.on('error', () => {} will only catch the error from the rl stream itself, so even if an error occur at createReadStream('big-file.txt') it won't be catch (and as it is async the final catch(e) won t catch it neither).
To catch any error which occured in rl.on('line ....', one of the solutions is to reject the error. A rejected error into an async/await func will be catch() like in an synchrone flow.
an Exemple
async function processLineByLine() {
try{
async function run() {
const rs = createReadStream(__filename)
// otherwise createReadStream() err are not handled
rs.on('error', () => {
console.log('HandleReadStreanErr')
})
const rl = createInterface({
input: rs,
crlfDelay: Infinity
})
return new Promise((resolve, reject) => {
rl.on('line', (line) => {
try {
throw new Error("error while attempting to process json.")
resolve(console.log(line.toString()))
} catch(e) {
reject(e)
}
})
})
// handle specificaly the rl stream error
rl.on('error', () => console.log('errr rl stream'))
await once(rl, 'close');
console.log('File processed.');
}
// await the overall execution for the catch() to wait
await run()
} catch(e) {
// only rejected err reach here or the one happening synchronously
console.error('eeeeeee')
}
}
processLineByLine()
I personally like to handle each err close to where they occur. But some persons like to handle them at a single place with an err handler. In this case we can wrap the overall execution with a promise
async function processLineByLine() {
try{
async function run() {
return new Promise(async (resolve, reject) => {
const rs = createReadStream('khg.jk')
rs.on('error', () => {
reject('HandleReadStreanErr')
})
const rl = createInterface({
input: rs,
crlfDelay: Infinity
})
rl.on('line', (line) => {
try {
// uncomment following err
// throw new Error("error while attempting to process json.")
resolve(console.log(line.toString()))
} catch(e) {
reject(e)
}
})
rl.on('error', () => reject('errr rl stream'))
await once(rl, 'close');
console.log('File processed.');
})
}
await run()
} catch(e) {
console.error('set error handler: ', e)
}
}
processLineByLine()

Nodejs triggers function before previous function finish

What I need to do is,
to check if fileFullPath exist
if not, in the end of successfull file download, to call saveInfo.
When I execute the application, what I observe is, it calls saveInfo before finishing file write operation. And I get error message:
(node:20224) UnhandledPromiseRejectionWarning: Error: BatchCluster has ended, cannot enqueue -charset
What am I doing wrong?
async function dl(url, path, data = null) {
await request.get({
url: url,
})
.on("error", async function (error) {
console.log(error);
return false;
})
.on('response', async function (res) {
var fileExt = res.headers['content-type'].split('/')[1];
var fileFullPath = `${path}.${fileExt}`;
await res.pipe(fs.createWriteStream(fileFullPath));
console.log("file downloaded");
if (data) {
await saveInfo(fileFullPath, data);
}
});
return true;
}
async function saveInfo(filePath, data) {
await exiftool.write(filePath, {
Keywords: data.keywords,
Copyright: data.copyright,
});
console.log("Tags are saved");
exiftool.end();
}
OK, I found a way to do this. piping to streams is not very friendly to promises so I ended up doing some manual promise manipulations. I think better promise support for streams is coming to node.js as we already have some async iterators. Anyway, here's a way to make things work by watching for the right events on your streams:
function dl(url, path, data = null) {
return new Promise((resolve, reject) => {
request.get({
url: url,
}).on("error", function (error) {
console.log(error);
reject(error);
}).on('response', function (res) {
let fileExt = res.headers['content-type'].split('/')[1];
let fileFullPath = `${path}.${fileExt}`;
let writeStream = fs.createWriteStream(fileFullPath);
// set up event handlers to monitor the writeStream for error or completion
writeStream.on('error', reject).on('close', async () => {
if (data) {
try {
await saveInfo(fileFullPath, data);
} catch(e) {
reject(e);
return;
}
}
console.log("file downloaded");
resolve(true);
});
// send the response stream to our file
res.pipe(writeStream).on('error', reject);
});
});
}
async function saveInfo(filePath, data) {
await exiftool.write(filePath, {
Keywords: data.keywords,
Copyright: data.copyright,
});
console.log("Tags are saved");
exiftool.end();
}

What's wrong with this use of async await?

I am trying to download tracks via the soundcloud API, and then launch a callback once an indeterminant amount of tracks is downloaded. When I run the below code, I see "All done" being console logged before anything else, even though I intend for it to be the last thing... What am I doing wrong?
// Deps
import fs from 'fs'
import SC from 'node-soundcloud'
import request from 'request'
// Write mp3 function
function writeMP3(track) {
return new Promise((resolve, reject) => {
console.log('Starting download: ', track.title)
request.get(track.download_url)
.on('error', err => {
// reject('Download error: ', err)
})
.on('finish', () => {
() => resolve('Download complete')
})
.pipe(fs.createWriteStream(`./data/temp/${track.title}_${track.user.username}.mp3`))
})
}
async function asyncTrackFetch(track) {
return await writeMP3(track)
}
// Array of promises to callback upon
const trackActions = []
SC.init({
id: 'MY_ID',
secret: 'MY_SECRET'
})
SC.get('/tracks', (err, tracks) => {
if (err) {
throw new Error(err)
} else {
console.log('Tracks fetched: ', tracks.length)
tracks.map(track => {
if (track.downloadable) {
console.log('downloadable')
trackActions.push(asyncTrackFetch(track))
}
})
}
})
// Perform requests async
Promise.all(trackActions).then(() => {
console.log('All done')
console.log(fs.readdirSync('./data/temp'))
})
Promise.all(trackActions) waits on whatever promises are in trackActions, but trackActions is empty at the time you make the call. You're only adding promises to the array after your SC.get callback gets called.
Try putting your Promise.all... block inside the SC.get callback like this:
SC.get('/tracks', (err, tracks) => {
if (err) {
throw new Error(err)
} else {
console.log('Tracks fetched: ', tracks.length)
tracks.map(track => {
if (track.downloadable) {
console.log('downloadable')
trackActions.push(asyncTrackFetch(track))
}
})
Promise.all(trackActions).then(() => {
console.log('All done')
console.log(fs.readdirSync('./data/temp'))
})
}
})
It's worth mentioning as well that your line throw new Error(err) will crash the program since there's nowhere for that error to be caught.
As Antonio Val mentioned, there are better ways to do this. If you promisify the node-soundcloud library then the last part of your code could look like this:
SC.get('/tracks').then(tracks => {
// No need for trackedActions array.
return Promise.all(tracks.filter(track => track.downloadable)
.map(track => asyncTrackFetch(track)))
}).then(fetchedTracks => {
console.log('All done fetching tracks', fetchedTracks)
}).catch(err => {
// Handle error.
})
Or inside an async function,
try {
const tracks = await SC.get('/tracks')
const fetchPromises = tracks
.filter(track => track.downloadable)
.map(track => asyncTrackFetch(track))
const fetchedTracks = await Promise.all(fetchPromises)
console('All done fetching tracks.', fetchedTracks)
} catch (err) {
// Handle error
}
I think the easiest way would be to move Promise.all after tracks.map loop finished.
A more elegant solution would be to promisify SC.get as well and use async await along all your code.
UPDATE:
Couldn't test it so not sure if it works, but it would be something like this:
import fs from 'fs'
import SC from 'node-soundcloud'
import request from 'request'
function writeMP3(track) {
return new Promise((resolve, reject) => {
console.log('Starting download: ', track.title)
request.get(track.download_url)
.on('error', err => {
// reject('Download error: ', err)
})
.on('finish', () => {
() => resolve('Download complete')
})
.pipe(fs.createWriteStream(`./data/temp/${track.title}_${track.user.username}.mp3`))
})
}
function getTracks() {
return new Promise((resolve, reject) => {
SC.get('/tracks', (err, tracks) => {
if (err) {
return reject(err)
}
console.log('Tracks fetched: ', tracks.length)
resolve(tracks)
})
})
}
SC.init({
id: 'MY_ID',
secret: 'MY_SECRET'
})
With async await:
async function start() {
const tracks = await getTracks();
for (let track of tracks) {
await writeMP3(track)
}
}
start()
.then(() => {
console.log('All done')
console.log(fs.readdirSync('./data/temp'))
})
.catch((err) => {
// insert error handler here
})
If you just want to use Promises:
getTracks
.then((tracks) => {
const promiseArray = tracks.map((track) => {
return writeMP3(track)
})
return Promise.all(promiseArray)
})
.then(() => {
console.log('All done')
console.log(fs.readdirSync('./data/temp'))
})
.catch((err) => {
// insert error handler here
})

Resources