when I read more than one file it will be stuck and also hang my pc. I need to restart my pc
on one file or 5 files it will work perfectly but not more than 5 files
if anyone know this issue let me know
const ffmpegPath = require('#ffmpeg-installer/ffmpeg').path
const ffmpeg = require('fluent-ffmpeg')
ffmpeg.setFfmpegPath(ffmpegPath)
const testFolder = './videos/';
const fs = require('fs');
fs.readdir(testFolder, async(err, files) => {
try {
for(let i = 0; i < 10; i++){
if(files[i] != '1 Surah Fatiha Dr Israr Ahmed Urdu - 81of81.mp4'){
let converter = await ffmpeg(`./videos/${files[i]}`)
await converter.setStartTime('00:00:00').setDuration('30').output(`./outputfolder/${files[i]}`).on('end', function(err) {
if(err) {
console.log(`err durinng conversation \n ${err}`)
}
else{
console.log(`Done ${files[i]}`);
}
}).on('error', function(err){
console.log(`error: ${files[i]}`, err)
}).run()
}
}
} catch (error) {
console.log(error)
}
});
Your computer is crashing because fluent-ffmpeg is not asynchronous, this makes your code run in a loop running ffmpeg several times without waiting for the previous video to complete processing, consequently consuming all your processing power.
I created an asynchronous function called renderVideo and changed the for loop to the for of loop, as only it is able to wait for an asynchronous function.
The code looked like this:
const ffmpegPath = require('#ffmpeg-installer/ffmpeg').path
const ffmpeg = require('fluent-ffmpeg')
ffmpeg.setFfmpegPath(ffmpegPath)
const fs = require('fs');
const testFolder = './videos/';
fs.readdir(testFolder, async(err, videos) => {
try {
for(let video of videos){
if(video != '1 Surah Fatiha Dr Israr Ahmed Urdu - 81of81.mp4'){
await renderVideo(video)
}
}
} catch (error) {
console.log(error)
}
function renderVideo(video){
return new Promise((resolve, reject)=>{
let converter = ffmpeg(`./videos/${video}`)
converter
.setStartTime('00:00:00')
.setDuration('30')
.output(`./outputfolder/${video}`)
.on('end', (done)=> {
resolve(done)
})
.on('error', (err)=> {
reject(`The video ${video} return with error: ${err}`)
})
.run()
})
}
})
I also changed the names of some variables to make sense in the current code.
Related
I am trying to create a read stream and then pipe the contents of a word document XML file to a write stream and then read from that finished write stream. The problem I am running into is that on the first sequence of reading then writing and then reading I get a [Error: ENOENT: no such file or directory, open] error. However after this file was created from the first attempt the code runs smoothly and returns the pageCount value as expected.
I have tried to read from the completed file and then return the pageCount value inside of the 'finish' event, but that just leaves me with an undefined returned value. As such, I am not sure what to do.
Any help would be appreciated for this struggling junior.
Update, the following code worked for me.
console.log("unzipping");
const createWordOutput = new Promise((resolve, reject) => {
console.log("end is firing");
fs.createReadStream(data)
.pipe(unzipper.Parse())
.on("entry", async function (entry) {
const fileName = entry.path;
const type = entry.type;
const size = entry.vars.uncompressedSize;
//docProps has the meta data of the document, the word/document.xml is the actual document
if (fileName === "docProps/app.xml") {
// console.log(fileName);
entry.pipe(fs.createWriteStream("./wordOutput")).on("finish", () => {
console.log("finished writing the file");
console.log("resolving");
return resolve();
});
//once the piping is completed and the XML structure is fully writen a 'finish' event is emitted. This event accepts a callback. Here I put the cb and call readTheFile on the ./output file. This successfully reads the metadata of each file
} else {
entry.autodrain();
}
});
});
await createWordOutput;
const pageCount = await readWordFile("./wordOutput");
if (pageCount === undefined) {
console.log("PAGECOUNT IS UNDEFINED");
}
console.log("logging page count in unzip the file");
console.log(pageCount);
return pageCount;
};```
The error is coming from readWordFile, because it runs before the stream is done.
You need to move reading to the finish part
Try this:
console.log("unzipping");
let pageCount = "";
fs.createReadStream(data)
.pipe(unzipper.Parse())
.on("entry", function(entry) {
const fileName = entry.path;
const type = entry.type;
const size = entry.vars.uncompressedSize;
if (fileName === "docProps/app.xml") {
// console.log(fileName);
entry.pipe(fs.createWriteStream("./wordOutput")).on("finish", async() => {
console.log("finished writing the file");
// finished writing, do everything here, and return
pageCount = await readWordFile("./wordOutput");
if (pageCount === undefined) {
console.log("PAGECOUNT IS UNDEFINED");
}
console.log("logging page count in unzip the file");
console.log(pageCount);
return pageCount;
});
} else {
entry.autodrain();
}
});
};
const readWordFile = (data) => {
return new Promise(async(resolve, reject) => {
console.log("this is the data that readWordFile received");
console.log(data);
console.log("reading word file");
const XMLData = await fsp.readFile(data, {
encoding: "utf-8"
});
console.log(XMLData);
const pageCount = XMLData.split("<Pages>")
.join(",")
.split("</Pages>")
.join(",")
.split(",")[1];
console.log("getting page count from read word file");
console.log(pageCount);
resolve(pageCount);
});
};
Thanks in advance.
I'm creating an Electron-Create-React-App using electron-forge on Windows 10 Pro and am stuck with using async functions with execFile and readFile().
I want to achieve the following:-
main process - Receive a buffer of a screen capture (video) from the renderer process.
Create a temporary file and write the buffer to a .mp4 file.
Crop the video (based on x:y:width:height) using ffmpeg (installed in Electron as a binary).
Output = .mp4 file in temporary directory
Read the cropped .mp4 file using fs.readFile() (as a base64 encoded buffer)
Send the buffer to another renderer screen.
Delete temp file.
Q: I've managed to do most of it but cannot access the cropped .mp4 file in the temp directory.
I've tried the following:-
Electron main process
const fs = require('fs').promises
const path = require('path')
ipcMain.on("capture:buffer", async (video_object) => {
const {x_pos, y_pos, window_width, window_height, buffer} = video_object
try {
const dir = await fs.mkdtemp(await fs.realpath(os.tmpdir()) + path.sep)
const captured_video_file_path = path.join(dir, "screen_capture_video.mp4")
// This works
await fs.writeFile(captured_video_file_path, buffer, (error, stdout, stderr) => {
if (error) {
console.log(error)
}
console.log("Screen Capture File written")
})
// This also works
execFile(`${ffmpeg.path}`,
['-i', `${captured_video_file_path}`, '-vf',
`crop=${window_width}:${window_height}:${x_pos}:${y_pos}`,
`${path.join(dir,'cropped_video.mp4')}`],
(error, stdout, stderr) => {
if (error) {
console.log(error.message)
}
if (stderr) {
console.log(stderr)
}
console.log("Cropped File created")
})
// This code onwards doesn't work
await fs.readFile(path.join(dir, "cropped_video.mp4"), 'base64', (error, data) => {
if (error) {
console.log(error)
}
// To renderer
mainWindow.webContents.send("main:video_buffer", Buffer.from(data))
})
} catch (error) {
console.log(error)
} finally {
fs.rmdir(dir, {recursive: true})
}
})
When trying to read the file i get the following error :-
[Error: ENOENT: no such file or directory, open 'C:\Users\XXXX\XXXXX\XXXXX\temp\temp_eYGMCR\cropped_video.mp4']
I've checked that the correct path exists with console.log.
I suspect it is a 'simple' issue with using async / execFile() properly but don't know exactly where I am making a silly mistake.
Any help would be appreciated.
Thanks.
Because at that time of calling fs.readFile, execFile may not be done yet.
Untested, but you may want to create a promise and wait for execFile to be completed before proceeding and see whether it works.
await new Promise( resolve => {
execFile(`${ffmpeg.path}`,
['-i', `${captured_video_file_path}`, '-vf',
`crop=${window_width}:${window_height}:${x_pos}:${y_pos}`,
`${path.join(dir,'cropped_video.mp4')}`],
(error, stdout, stderr) => {
if (error) {
console.log(error.message)
}
if (stderr) {
console.log(stderr)
}
console.log("Cropped File created")
resolve() //this tells `await` it's ready to move on
})
})
Thanks for the pointers guys.
Here's the solution I found.
Another big problem with safely creating and removing temporary directories in Electron is fs.rmdir() doesn't work when using Electron-Forge / Builder due to an issue with ASAR files.
(ASAR files are used to package Electron apps).
const fsPromises = require('fs').promises
ipcMain.on("capture:buffer", async (video_object) => {
const {x_pos, y_pos, window_width, window_height, buffer} = video_object
const temp_dir = await fsPromises.mkdtemp(await fsPromises.realpath(os.tmpdir()) + path.sep)
const captured_video_file_path = path.join(dir, "screen_capture_video.mp4")
try {
await fsPromises.writeFile(captured_video_file_path, buffer)
}
catch (error) {console.error}
// note no callback req'd as per jfriends advice
let child_object =
execFile(`${ffmpeg.path}`,
['-i', `${captured_video_file_path}`, '-vf',
`crop=${window_width}:${window_height}:${x_pos}:${y_pos}`,
`${path.join(dir,'cropped_video.mp4')}`],
(error, stdout, stderr) => {
if (error) {
console.log(error.message)
}
if (stderr) {
console.log(stderr)
}
console.log("Cropped File created")
})
child_object.on("close", async
() => {
try { video_buffer = await fsPromises.readFile(path.join(dir, "cropped_video.mp4")
// To renderer
mainWindow.webContents.send("main:video_buffer", video_buffer)
} catch (error) {
log(error)
} finally {
process.noAsar = true
fs.rmdir(temp_directory, {recursive: true}, (error) => {if (error) {log(error)}})
console.log("Done !!!")
process.noASAR = false
}
})
My issue is this:
I have made a call to someones web service. I get back the file name, extension and the "bytes". Bytes actually come in as an array and at position 0 "Bytes[0]" is the following string:
JVBERi0xLjYKJeLjz9MKMSAwIG9iago8PC9EZWNvZGVQYXJtczw8L0sgLTEvQ29sdW1ucyAyNTUwL1Jvd3MgMzMwMD4+L1R5cGUvWE9iamVjdC9CaXRzUGVyQ29tcG9uZW50IDEvU3VidHlwZS9JbWFnZS9XaWR0aCAyNTUwL0NvbG9yU3BhY2UvRGV2aWNlR3JheS9GaWx0ZXIvQ0NJVFRGYXhEZWNvZGUvTGVuZ3RoIDI4Mzc0L0hlaWdodCAzMzAwPj5zdHJlYW0K////////y2IZ+M8+zOPM/HzLhzkT1NAjCCoEY0CMJNAjCR4c8HigRhBAi1iZ0eGth61tHhraTFbraRaYgQ8zMFyGyGM8ZQZDI8MjMI8M6enp9W6enp+sadIMIIEYwy/ggU0wwgwjWzSBUmwWOt/rY63fraTVNu6C7R7pN6+v///20v6I70vdBaPjptK8HUQfX9/17D/TMet+l06T//0v3/S9v+r98V0nH///7Ff+Ed3/v16X9XX/S/KP0vSb//W88ksdW18lzBEJVpPXT0k9b71///...
The string example above has been cut off for readability.
How do I take that string and save it as a readable file?
This case it's a pdf.
let pdfBytes = '{String shown above in example}'
You can use the Node.js File System Module to save the received buffer.
Assuming the encoding of your data is base64:
const fs = require('fs');
let pdfBytes = 'JVBERi0xLjYKJeLjz9...'
let writeStream = fs.createWriteStream('filename.pdf');
writeStream.write(pdfBytes, 'base64');
writeStream.on('finish', () => {
console.log('saved');
});
writeStream.end();
I am using the fs file system here to create and save the file. I use a lot of try catch in case anything goes wrong. This example shows how you could pass the data to a function that could then create the file for you.
const util = require('util');
const fs = require('fs');
const fsOpen = util.promisify(fs.open);
const fsWriteFile = util.promisify(fs.writeFile);
const fsClose = util.promisify(fs.close);
function saveNewFile(path, data) {
return new Promise((async (resolve, reject) => {
let fileToCreate;
// Open the file for writing
try {
fileToCreate = await fsOpen(path, 'wx');
} catch (err) {
reject('Could not create new file, it may already exist');
return;
}
// Write the new data to the file
try {
await fsWriteFile(fileToCreate, data);
} catch (err) {
reject('Error writing to new file');
return;
}
// Close the file
try {
await fsClose(fileToCreate);
} catch (err) {
reject('Error closing new file');
return;
}
resolve('File created');
}));
};
// Data we want to use to create the file.
let pdfBytes = 'JVBERi0xLjYKJeLj...'
saveNewFile('./filename.pdf', pdfBytes);
I've been using node-oracledb for a few months and I've managed to achieve what I have needed to so far.
I'm currently working on a search app that could potentially return about 2m rows of data from a single call. To ensure I don't get a disconnect from the browser and the server, I thought I would try queryStream so that there is a constant flow of data back to the client.
I implemented the queryStream example as-is, and this worked fine for a few hundred thousand rows. However, when the returned rows is greater than one million, Node runs out of memory. By logging and watching both client and server log events, I can see that client is way behind the server in terms of rows sent and received. So, it looks like Node is falling over because it's buffering so much data.
It's worth noting that at this point, my selectstream implementation is within a req/res function called via Express.
To return the data, I do something like....
stream.on('data', function (data) {
rowcount++;
let obj = new myObjectConstructor(data);
res.write(JSON.stringify(obj.getJson());
});
I've been reading about how streams and pipe can help with flow, so what I'd like to be able to do is to be able to pipe the results from the query to a) help with flow and b) to be able to pipe the results to other functions before sending back to the client.
E.g.
function getData(req, res){
var stream = myQueryStream(connection, query);
stream
.pipe(toSomeOtherFunction)
.pipe(yetAnotherFunction)
.pipe(res);
}
I'm spent a few hours trying to find a solution or example that allows me to pipe results, but I'm stuck and need some help.
Apologies if I'm missing something obvious, but I'm still getting to grips with Node and especially streams.
Thanks in advance.
There's a bit of an impedance mismatch here. The queryStream API emits rows of JavaScript objects, but what you want to stream to the client is a JSON array. You basically have to add an open bracket to the beginning, a comma after each row, and a close bracket to the end.
I'll show you how to do this in a controller that uses the driver directly as you have done, instead of using separate database modules as I advocate in this series.
const oracledb = require('oracledb');
async function get(req, res, next) {
try {
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
res.write('[');
stream.on('data', (row) => {
res.write(JSON.stringify(row));
res.write(',');
});
stream.on('end', () => {
res.end(']');
});
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
Once you get the concepts, you can simplify things a bit with a reusable Transform class which allows you to use pipe in the controller logic:
const oracledb = require('oracledb');
const { Transform } = require('stream');
class ToJSONArray extends Transform {
constructor() {
super({objectMode: true});
this.push('[');
}
_transform (row, encoding, callback) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
this.push(',');
}
this._prevRow = row;
callback(null);
}
_flush (done) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
}
this.push(']');
delete this._prevRow;
done();
}
}
async function get(req, res, next) {
try {
const toJSONArray = new ToJSONArray();
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
stream.pipe(toJSONArray).pipe(res);
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
Rather than writing your own logic to create a JSON stream, you can use JSONStream to convert an object stream to (stringified) JSON, before piping it to its destination (res, process.stdout etc) This saves the need to muck around with .on('data',...) events.
In the example below, I've used pipeline from node's stream module rather than the .pipe method: the effect is similar (with better error handling I think). To get objects from oracledb.queryStream, you can specify option {outFormat: oracledb.OUT_FORMAT_OBJECT} (docs). Then you can make arbitrary modifications to the stream of objects produced. This can be done using a transform stream, made perhaps using through2-map, or if you need to drop or split rows, through2. Below the stream is sent to process.stdout after being stringified as JSON, but you could equally send to it express's res.
require('dotenv').config() // config from .env file
const JSONStream = require('JSONStream')
const oracledb = require('oracledb')
const { pipeline } = require('stream')
const map = require('through2-map') // see https://www.npmjs.com/package/through2-map
oracledb.getConnection({
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
connectString: process.env.CONNECT_STRING
}).then(connection => {
pipeline(
connection.queryStream(`
select dual.*,'test' as col1 from dual
union select dual.*, :someboundvalue as col1 from dual
`
,{"someboundvalue":"test5"} // binds
,{
prefetchRows: 150, // for tuning
fetchArraySize: 150, // for tuning
outFormat: oracledb.OUT_FORMAT_OBJECT
}
)
,map.obj((row,index) => {
row.arbitraryModification = index
return row
})
,JSONStream.stringify() // false gives ndjson
,process.stdout // or send to express's res
,(err) => { if(err) console.error(err) }
)
})
// [
// {"DUMMY":"X","COL1":"test","arbitraryModification":0}
// ,
// {"DUMMY":"X","COL1":"test5","arbitraryModification":1}
// ]
I need combine two streams in parallel way. I want to use these streams to write into two files some generated information.
I have done the first stream (stream1). It looks like this:
'use strict';
var fs = require('fs');
var stream1 = fs.createWriteStream("random1.txt");
let ln = 1000000;
let j = 0;
function generateRandom()
{
var reslt = true;
do {
ln--;
j++;
if (ln === 0) {
stream1.write(...write randon number...);
stream1.end();
} else {
reslt = stream1.write(...write randon number...);
}
} while (ln > 0 && reslt);
if (ln > 0) {
stream1.once('drain', generateRandom);
}
}
stream1.on('error', (err) => {throw err;});
var stream1Promise = new Promise((resolve, reject)=>{
try {
generateRandom();
});
} catch (error) {
reject(error);
}
});
stream1Promise
.then(function(callback) {
if (typeof callback === 'function') {
callback();
}
}, function(error) {
console.log("Error occurred: ", error);
stream1.end();
});
But I don't understand how to add another one stream stream2 so that it writes different random information to another file.
I've tried to use process.nextTick in callback of stream1.write to switch my streams, but I get all time "FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed - process out of memory" error.
Could anyone help me?
Thanks.