sent file using axios using passthrough stream module in nodejs - node.js

Imports
const ffmpegPath = require('#ffmpeg-installer/ffmpeg').path;
const FfmpegCommand = require('fluent-ffmpeg');
const fs = require('fs');
const path = require('path');
const streamNode = require('stream');
const FormData = require('form-data');
const axios = require('axios').default;
Code here
async function audios() {
let stream = fs.createReadStream(path.join(__dirname, '../videos/video.mp4'));
let writeStream = fs.createWriteStream(path.join(__dirname, '../response/audios/' + +new Date() + '.wav'));
let pass = new streamNode.PassThrough();
let outputFile = path.join(__dirname, '../response/audios/' + +new Date() + '.wav');
const ffmpeg = FfmpegCommand(file);
ffmpeg
.setFfmpegPath(ffmpegPath)
.format('mp4')
.toFormat('wav')
.on('end', function () {
console.log('file has been converted successfully');
})
.on('error', function (err, stdout, stderr) {
console.log('an error happened: ' + err.message);
console.log('ffmpeg stdout: ' + stdout);
console.log('ffmpeg stderr: ' + stderr);
})
.on('end', function() {
console.log('Processing finished !');
})
.stream(pass, { end: false })
var bodyFormData = new FormData();
bodyFormData.append('file', pass);
let headers = bodyFormData.getHeaders();
try {
const jdata = await axios.post('http://localhost:4080/video',bodyFormData, { maxContentLength: Infinity,
maxBodyLength: Infinity,validateStatus: (status) => true ,headers:headers });
console.log(jdata.data);
} catch (error) {
console.log("error" ,error.message);
}
}
I am getting errors to sent passthrough stream through formdata ;
issue is ffmpeg not creating readstrem so I am created passthrough from it and passed in formdata but not working right now

Related

How can I use Readable() constructor to read from a text file and write it in a Writable() constructor?

I am trying to read from a text file using new Readable() constructor,but I don't know how to make the code receive it as a file path. I would like the result to be displayed in the console using new Writable() constructor.
const fs = require('fs');
const {Readable} = require('stream');
const {Writable} = require('stream');
const userData = __dirname + '/files/data.txt';
const rStream = new Readable({
read() {}
});
const wStream = new Writable({
write(chunk, encoding, callback) {
console.log("Readable data: ", chunk.toString());
callback();
}
});
rStream.pipe(wStream);
rStream.push(userData);
rStream.on('close', () =>
wStream.end());
wStream.on('close', () =>
console.log('No more data in file...')
);
rStream.on('error', (err) =>
console.log('Readable error!', err.message)
);
wStream.on('error', (err) =>
console.log('Writable error!', err.message)
);
rStream.destroy();

How to download file from gitlab synchronously using NodeJS

I need to download a file from a private gitlab server and I need the method to be synchronous. This was by previous async code and it works fine because I was using promises. But I'm having trouble converting it to synchronous. The other posts i've seen on SO either ended up using async code or didn't have options for headers.
const https = require('https');
const fs = require('fs');
const gitlabUrl = 'https://gitlab.custom.private.com';
const gitlabAcessToken = 'xmyPrivateTokenx';
const gLfilePath = '/api/v4/projects/1234/repository/files/FolderOne%2Ftest.txt/raw?ref=main';
const gLfileName='test.txt';
function downloadFileFromGitlab(filePath, fileName) {
return new Promise((resolve, reject) => {
var options = {
path: filePath,
headers: {
'PRIVATE-TOKEN': gitlabAccessToken
}
};
var url = gitlabUrl
var file = fs.createWriteStream(fileName);
const request = https.get(url, options, (response) => {
response.pipe(file);
file.on('finish', () => {
file.close();
resolve();
});
file.on('error', (err) => {
file.close();
reject(err);
});
});
request.on('error', error => {
throw console.error(error);
});
});
}
downloadFileFromGitlab(gLfilePath,gLfileName);
I was able to figure it out using curl
function downloadFileFromGitlab(filePath, fileName) {
let curlCommand = "curl -s " + gitlabUrl + filePath + " -H 'PRIVATE-TOKEN:" + gitlabAccessToken +"'";
let file = child_process.execSync(curlCommand);
fse.writeFileSync(fileName,file);
}

How to return prematurely from lambda

I'm trying to trigger a lambda when I drop a new file in a bucket.
This code is working as in it's detecting the file and send the info to my API.
I'm also trying to ignore every file not name "text.txt" but I can't figure out how to return from the lambda inside that includes block
const http = require('http');
exports.handler = async (event, context) => {
return new Promise((resolve, reject) => {
const srcRegion = event.Records[0].awsRegion;
const srcEventTime = event.Records[0].eventTime;
const srcEventName = event.Records[0].eventName;
const srcIP = event.Records[0].requestParameters.sourceIPAddress;
const srcBucket = event.Records[0].s3.bucket.name;
const srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
const srcETag = event.Records[0].s3.object.eTag;
if (!srcKey.includes('text.txt')) {
resolve('Not original file');
}
const data = JSON.stringify({
lambda: {
src_bucket: srcBucket,
src_key: srcKey,
src_region: srcRegion,
src_event_time: srcEventTime,
src_event_name: srcEventName,
src_ip: srcIP,
src_etag: srcETag
}
});
const options = {
host: '*****************'
path: '*****************'
port: '*****************'
method: '*****************'
headers: '*****************'
};
const req = http.request(options, (res) => {
res.on('data', function(d) {
console.log(d);
});
});
req.on('error', (e) => {
// reject(e.message);
resolve('Error');
});
// send the request
req.write(data);
req.end();
resolve('Success');
});
};
Try this inside the includes block: context.done(undefined, 'Done.')

How to generate a content-length

I'm using the node module form-data to send multipart/forms to a server, but i'm not sure how to calculate the content-length correctly for the request. The form-data github documentation seems a bit hand-wavy about it and the server i'm sending to requires the content-length header to be included.
const FormData = require('form-data');
const fs = require('fs');
const https = require('https');
class Platform {
_postKey(key, serviceName) {
const options = this._getOptions();
const keyName = (key.search(/(PUBLIC)/) !== -1) ? 'publicKey' : 'privateKey';
const extension = (keyName == 'publicKey') ? 'pub' : '';
const filePath = (extension.length > 0) ? __dirname + '/data/'+serviceName+'.'+extension : __dirname + '/data/'+serviceName;
const file = fs.createWriteStream(filePath, {flags: 'wx'});
file.write(key);
file.end();
const form = new FormData();
form.append('file', fs.createReadStream(filePath));
form.append('Name', keyName);
form.append('MimeMajor', 'application');
form.append('MimeMinor', 'x-pem-file');
form.append('Extension', extension);
form.append('FileClass', 'MFS::File');
options.headers = form.getHeaders();
options.headers.Authorization = 'Basic ' + Buffer.from(this.FILESTORE_USERNAME + ':' + this.FILESTORE_PASSWORD).toString('base64');
// options.headers['Content-Length'] = key.length;
console.log(options);
return new Promise((resolve, reject) => {
let post = https.request(options, (res) => {
let data = '';
if (res.statusCode < 200 || res.statusCode > 299) {
reject(new Error('File Storage API returned a status code outside of acceptable range: ' + res.statusCode));
} else {
res.setEncoding('utf8');
res.on('data', (chunk) => {
data += chunk;
});
res.on('error', (err) => {
reject(err);
});
res.on('end', () => {
console.log(data);
if (data) {
resolve(JSON.parse(data));
} else {
resolve();
}
});
}
});
post.on('error', reject);
form.pipe(post);
});
}
}

How to merge two csv files rows in node js

I have 2 csv files which have different different data but having a same header
eg. FILE 1 data is
"CODE","NAME","SUB_USER","SCORE"
"01","TEST","1","5"
"01","TEST","2","6"
other file FILE2 have data like this
"CODE","NAME","SUB_USER","SCORE"
"02","TEST2","3","5"
"02","TEST2","4","6"
so i want to merge both file create FILE3 output like this
"CODE","NAME","SUB_USER","SCORE"
"01","TEST","1","5"
"01","TEST","2","6"
"02","TEST2","3","5"
"02","TEST2","4","6"
I have tried below code
var express = require('express');
var router = express.Router();
var fs = require('fs');
var parse = require('csv-parse');
var async = require('async');
var csv = require("fast-csv");
var file1 = appRoot + '\\csvFiles\\details1.csv';
var file2 = appRoot + '\\csvFiles\\details2.csv';
var stream = fs.createReadStream(file1);
var stream2 = fs.createReadStream(file2);
var fileData1 = [],
fileData2 = [];
csv
.fromStream(stream)
.on("data", function(data) {
fileData1.push(data);
})
.on("end", function() {
console.log("done");
});
csv
.fromStream(stream2)
.on("data", function(data) {
fileData2.push(data);
})
.on("end", function() {
console.log("done");
});
var fileData3 = fileData1.concat(fileData2);
csv.writeToPath("outputfile.csv", fileData3).on("finish", function() {
console.log("END");
});
But not working don't know why?? Please help me
///**********************************************************************//
Thax for help but i got new problem here
After some changes above code start working
var file1 = appRoot + '\\csvFiles\\details1.csv';
var file2 = appRoot + '\\csvFiles\\idetails2.csv';
var stream = fs.createReadStream(file1);
var stream2 = fs.createReadStream(file2);
var fileData1 = [],
fileData2 = [],
i = 0;
csv.fromStream(stream).on("data", function(data) {
fileData1.push(data);
}).on("end", function() {
csv.fromStream(stream2).on("data", function(data) {
if (i != 0) {
fileData2.push(data);
}
i++;
}).on("end", function() {
console.log("done");
var fileData3 = fileData1.concat(fileData2);
csv.writeToPath("outputfile.csv", fileData3).on("finish", function() {
res.send('Done merge');
});
});
});
But problem is that what if my number of file increase then how i will handle that thing
The biggest problem here is a quite common one. You do async tasks but you don't wait for them to finish before you are using their result.
You concat the file data before the "end" callback for each tasks was called.
The solution is to wait for every callback to be called and THEN working with the data.
I created a small example using Promises
const file1 = 'one.csv';
const file2 = 'two.csv';
const stream = fs.createReadStream(file1);
const stream2 = fs.createReadStream(file2);
const fileData1 = [];
const fileData2 = [];
const file1Promise = new Promise((resolve) => {
csv
.parseFile(file1, {headers: true})
.on('data', function(data) {
fileData1.push(data);
})
.on('end', function() {
console.log('done');
resolve();
});
});
const file2Promise = new Promise((resolve) => {
csv
.parseFile(file2, {headers: true})
.on('data', function(data) {
fileData2.push(data);
})
.on('end', function() {
console.log('done');
resolve();
});
});
Promise.all([
file1Promise,
file2Promise,
])
.then(() => {
const fileData3 = fileData1.concat(fileData2);
console.log(fileData3);
const csvStream = csv.format({headers: true});
const writableStream = fs.createWriteStream('outputfile.csv');
writableStream.on('finish', function() {
console.log('DONE!');
});
csvStream.pipe(writableStream);
fileData3.forEach((data) => {
csvStream.write(data);
});
csvStream.end();
});
I created a function with which you can easily merge multiple files:
function concatCSVAndOutput(csvFilePaths, outputFilePath) {
const promises = csvFilePaths.map((path) => {
return new Promise((resolve) => {
const dataArray = [];
return csv
.parseFile(path, {headers: true})
.on('data', function(data) {
dataArray.push(data);
})
.on('end', function() {
resolve(dataArray);
});
});
});
return Promise.all(promises)
.then((results) => {
const csvStream = csv.format({headers: true});
const writableStream = fs.createWriteStream(outputFilePath);
writableStream.on('finish', function() {
console.log('DONE!');
});
csvStream.pipe(writableStream);
results.forEach((result) => {
result.forEach((data) => {
csvStream.write(data);
});
});
csvStream.end();
});
}
example usage
concatCSVAndOutput(['one.csv', 'two.csv'], 'outputfile.csv')
.then(() => ...doStuff);

Resources