NodeJs - Error while reading files using fs.createReadStream - node.js

I am trying to implement a feature to my web app where you can upload CSV files and insert data into Postgresql. I have made my app endpoint and written some code
const router = require('express').Router()
const uploadMid = require('./fileUpMid')
const pool = require('./db')
const fs = require("fs");
const fastcsv = require("fast-csv");
const upload = async (req, res) => {
if (req.files === null) {
return res.status(400).json({ msg: 'No file uploaded' });
}
const file = req.files.file;
file.mv(`${__dirname}/uploads/${file.name}`, err => {
if (err) {
console.error(err);
return res.status(500).send(err);
}
res.json({ fileName: file.name, filePath: `/uploads/${file.name}` });
});
let persons = [];
let path = __dirname + "/uploads/" +file.name;
fs.createReadStream(path)
.pipe(fastcsv.parse({ headers: true }))
.on("error", (error) => {
console.error(error.message);
})
.on("data", (row) => {
persons.push(row);
})
.on("end", () => {
//remove head
persons.shift();
const q = "some query here";
pool.connect((err, client, done) => {
if (err) throw err;
try {
persons.forEach(row => {
console.log(typeof row)
var obj = JSON.parse(JSON.stringify(row));
var values = Object.keys(obj).map(function (key) { return obj[key]; });
console.log(values)
client.query(q, values, (err, res) => {
if (err) {
console.log(err.stack);
} else {
console.log("inserted " + res.rowCount + " row:", row);
}
});
});
} finally {
done();
}
});
})
// fs.unlinkSync(path)
}
router.post('/file', uploadMid.single("file") ,upload)
module.exports = router
Everything seemed to work fine, but when I try to upload a second file I awlways get an error on terminal
Error: ENOENT: no such file or directory, open 'filename here with full path'
>- Emitted 'error' event on ReadStream instance at:
>- at internal/fs/streams.js:126:14
>- at FSReqCallback.oncomplete (fs.js:180:23) {
>- errno: -4058,
>- code: 'ENOENT',
>- syscall: 'open',
>- path: 'filename here with full path'}
I know this is not a safe nor secure way to upload data but this app is intended to be run only locally. Even when the first file is upload successfully in DevTools console it logs
GET http://localhost:3000/uploads/filename [HTTP/1.1 404 Not Found 8ms]
But the file is created with all its content on uploads directory.
Any tip for what to look for ?
Thank you in advance :)

Judging by the error (Error: ENOENT: no such file or directory, open 'filename here with full path'), here is the suggested way of defining paths in NodeJS apps using the path module.
const path = require('path');
// Inside`upload` middleware
const filePath = path.join(__dirname, 'uploads', file.name);

Related

Nodejs: ENOENT on Twitter Image Bot

I'm a beginner level programmer. I used some online guides as well as my starter knowledge. I'm attempting to create a bot that posts to twitter every hour. I keep getting the error ENOENT, which as I understand, means that it can't find the directory. Here is the error I'm getting (All I censored was personal information, but it's not censored in the actual code or error)
opening an image...
15.jpg
internal/fs/utils.js:269
throw err;
^
Error: ENOENT: no such file or directory, open 'C:\Users\####\Desktop\####\bot\images15.jpg'
at Object.openSync (fs.js:462:3)
at Object.readFileSync (fs.js:364:35)
at C:\Users\####\Desktop\####\bot\server.js:32:29
at FSReqCallback.oncomplete (fs.js:156:23) {
errno: -4058,
syscall: 'open',
code: 'ENOENT',
path: 'C:\\Users\\#####\\Desktop\\####\\bot\\images15.jpg'
}
It looks like its changing the name of the file to have images in front. I can't figure out why it's doing this though. Anyone know what I'm doing wrong? Here's my code:
const fs = require('fs'),
path = require('path'),
Twit = require('twit'),
config = require(path.join(__dirname, 'config.js')),
images = require(path.join(__dirname, 'images.js'));
const T = new Twit(config);
function randomFromArray(images) {
return images[Math.floor(Math.random() * images.length)];
}
function tweetRandomImage() {
fs.readdir(__dirname + '/images', function (err, files) {
if (err) {
console.log('error:', err);
}
else {
let images = [];
files.forEach(function (f) {
images.push(f);
});
console.log('opening an image...');
const image = randomFromArray(images);
console.log(image);
const imagePath = path.join(__dirname, '/images' + image);
const imageSource = image.source
b64content = fs.readFileSync(imagePath, { encoding: 'base64' });
console.log('uploading an image...');
T.post('media/upload', { media_data: b64content }, function (err, data, response) {
if (err) {
console.log('error:', err);
}
else {
console.log('image uploaded, now tweeting it...');
T.post('statuses/update', {
media_ids: new Array(data.media_id_string)
},
function (err, data, response) {
if (err) {
console.log('error:', err);
}
else {
console.log('posted an image!');
}
}
);
}
});
}
});
}
setInterval(function () {
tweetRandomImage();
}, 10000);
Any help would be appreciated, thank you!
lesson of the day, ALWAYS WATCH THE SynTAx
const fs = require('fs'),
path = require('path'),
Twit = require('twit'),
config = require(path.join(__dirname, 'config.js')),
images = require(path.join(__dirname, 'images.js'));
const T = new Twit(config);
function randomFromArray(images) {
return images[Math.floor(Math.random() * images.length)];
}
function tweetRandomImage() {
fs.readdir(__dirname + '/images', function (err, files) {
if (err) {
console.log('error:', err);
}
else {
let images = [];
files.forEach(function (f) {
images.push(f);
});
console.log('opening an image...');
const image = randomFromArray(images);
console.log(image);
//THE ONLY CHANGE I MADE BELOW
const imagePath = path.join(__dirname, '/images/' + image);
//THE ONLY CHANGE I MADE ABOVE
//THE ONLY PROBLEM WAS THAT YOU TRIED GETTING A FILE BUT HAVING A FOLDER AND A FILE NAME AS 1 THING UNSEPARATED BY "/" OR "\\" AND THE COMPUTER(& me who read it at first) THOUGHT TO LOOK FOR A FILE CALLED "images15.jpg" instead of "images/15.jpg"
const imageSource = image.source
b64content = fs.readFileSync(imagePath, { encoding: 'base64' });
console.log('uploading an image...');
T.post('media/upload', { media_data: b64content }, function (err, data, response) {
if (err) {
console.log('error:', err);
}
else {
console.log('image uploaded, now tweeting it...');
T.post('statuses/update', {
media_ids: new Array(data.media_id_string)
},
function (err, data, response) {
if (err) {
console.log('error:', err);
}
else {
console.log('posted an image!');
}
}
);
}
});
}
});
}
setInterval(function () {
tweetRandomImage();
}, 10000);

Chunk uploading with Filepond to NodeJS running Express

I need to upload large files (2GB+) using Filepond to a NodeJS server running Express. These files need to be streamed (because of their size) to two locations...a local folder and a remote folder.
My research led me to a solution using tus-node-server https://github.com/pqina/filepond/issues/48
I've implemented the solution as seen below but I don't know how to:
1. Change the name of the file on upload (it uses a UID, I'd like to use the filename from the "metadata" property)
2. Execute a function on the server during or after the upload.
Any help?
Client.js
Filepond.setOptions({
server: {
process: (fieldName, file, metadata, load, error, progress, abort) => {
var upload = new tus.Upload(file, {
endpoint: `/upload`,
retryDelays: [0, 1000, 3000, 5000],
metadata: {
filename: file.name,
filetype: file.type
},
onError: function(err) {
console.log("Failed because: " + err);
error(err)
},
onProgress: function(bytesUploaded, bytesTotal) {
progress(true, bytesUploaded, bytesTotal)
},
onSuccess: function() {
load(upload.url.split('/').pop())
}
})
// Start the upload
upload.start();
return {
abort: () => {
upload.abort()
abort()
}
}
}
}
})
Server.js
const express = require('express');
const app = express();
const uploadApp = express();
const Client = require('ftp');
const path = require('path');
const tus = require('tus-node-server');
var tusServer = new tus.Server();
tusServer.datastore = new tus.FileStore({
path: '/public/files'
});
uploadApp.all('*', tusServer.handle.bind(tusServer));
app.use('/public', express.static(path.join(__dirname, "/public")));
app.use('/upload', uploadApp);
app.listen(3000);
Before trying this method I was using app.post to receive the upload and perform further action.
app.post('/upload', (req, res, next) => {
let uploadFile = req.files.file,
fileName = req.files.file.name,
ftp = new Client();
uploadFile.mv(`${__dirname}/public/files/${fileName}`, err => {
if (err) { return res.status(500).send(err) };
res.json({ file: `public/${req.files.file.name}` })
});
ftp.on('ready', () => {
ftp.put(fileName, `/ftppath/${fileName}`, err => {
if (err) throw err;
ftp.end();
})
})
ftp.connect(ftpOptions)
});

Can not find file with ffmpeg

I am programming a server in node.js to process a audio file.
I save the file with this code:
app.use(upload());
app.get("/", function (req, res) {
res.sendFile("index.html")
});
//Uploaded files will be saved
app.post("/", function (req, res) {
if (req.files) {
var file = req.files.filename,
filename = file.name;
file.mv("./upload/" + filename, function (err) {
if (err) {
console.log("err");
}
else {
res.send("Done");
console.log(filename);
convert(filename);
}
})
}
})
I save the file in my upload directory. There everthing works great but now comes the problem.
I convert the file with ffmpeg
function convert(filename) {
var realName = "./upload/" + filename;
var newName = "./output/" + filename.substr(0, filename.length - 4) + ".flac";
ffmpegN = ffmpeg(realName);
ffmpegN.audioBitrate(16).audioFrequency(16000).withAudioCodec('flac').format("flac").save(outputFile);
ffmpeg(realName)
.toFormat('flac')
.on('error', (err) => {
console.log('An error occurred: ' + err.message);
})
.on('progress', (progress) => {
// console.log(JSON.stringify(progress));
console.log('Processing: ' + progress.targetSize + ' KB converted');
})
.on('end', () => {
console.log('Processing finished !');
})
.save(newName);//path where you want to save your file
SpeechToText(newName);
}
Then I want to pass this file to the google speech api. But then I get the error that the file is not found
Here is the code for the Speech Api:
function SpeechToText(path) {
// The name of the audio file to transcribe
const fileName = path;
// Reads a local audio file and converts it to base64
const file = fs.readFileSync(fileName);
const audioBytes = file.toString('base64');
// The audio file's encoding, sample rate in hertz, and BCP-47 language code
const audio = {
content: audioBytes,
};
const config = {
encoding: 'FLAC',
languageCode: 'de-DE',
};
const request = {
audio: audio,
config: config,
};
// Detects speech in the audio file
client
.recognize(request)
.then(data => {
const response = data[0];
const transcription = response.results
.map(result => result.alternatives[0].transcript)
.join('\n');
console.log(`Transcription: ${transcription}`);
})
.catch(err => {
console.error('ERROR:', err);
});
}
The thing is that if I upload a file everything works. But if I try it for a second time the error occurs:
fs.js:646
return binding.open(pathModule._makeLong(path), stringToFlags(flags), mode);
^
Error: ENOENT: no such file or directory, open
'C:\Users\paulu\desktop\API\output\sample.flac'
at Object.fs.openSync (fs.js:646:18)
at Object.fs.readFileSync (fs.js:551:33)
at SpeechToText (C:\Users\paulu\desktop\API\server.js:68:21)
at convert (C:\Users\paulu\desktop\API\server.js:121:5)
at C:\Users\paulu\desktop\API\server.js:50:17
at doMove (C:\Users\paulu\desktop\API\node_modules\express-
fileupload\lib\index.js:152:17)
at WriteStream.<anonymous> (C:\Users\paulu\desktop\API\node_modules\express-
fileupload\lib\in dex.js:182:15)
at emitNone (events.js:106:13)
at WriteStream.emit (events.js:208:7)
at fs.close (fs.js:2094:12)
Thank you for all answers !
I fixed it.
The problem was that the ffmpeg process was not finished. Here the solution:
function convert(filename) {
var realName = "./upload/" + filename;
var newName = "./output/" + filename.substr(0, filename.length - 4) + ".flac";
ffmpeg(realName)
.toFormat('flac')
.on('error', (err) => {
console.log('An error occurred: ' + err.message);
})
.on('progress', (progress) => {
// console.log(JSON.stringify(progress));
console.log('Processing: ' + progress.targetSize + ' KB converted');
})
.on('end', () => {
console.log('Processing finished !');
SpeechToText(newName); //Just move this statement to here !
})
.save(newName);//path where you want to save your file
}

Uploading a buffer to google cloud storage

I'm trying to save a Buffer (of a file uploaded from a form) to Google Cloud storage, but it seems like the Google Node SDK only allows files with a given path to be uploaded (Read / Write streams).
This is what I have used for AWS (S3) - is the anything else similar in the Google node SDK?:
var fileContents = new Buffer('buffer');
var params = {
Bucket: //bucket name
Key: //file name
ContentType: // Set mimetype
Body: fileContents
};
s3.putObject(params, function(err, data) {
// Do something
});
The only way that I have found to do it so far is write the buffer to disk, upload the file using the SDK (specifying the path to the new file) and then delete the file once it's uploaded successfully - the downside to this is that the whole process is significantly slower, to where it seems to be unfeasible to use Google storage. Is there any work around / way to upload a buffer?
.save to save the day! Some code below where I save my "pdf" that I created.
https://googleapis.dev/nodejs/storage/latest/File.html#save
const { Storage } = require("#google-cloud/storage");
const gc = new Storage({
keyFilename: path.join(__dirname, "./path to your service account .json"),
projectId: "your project id",
});
const file = gc.bucket(bucketName).file("tester.pdf");
file.save(pdf, (err) => {
if (!err) {
console.log("cool");
} else {
console.log("error " + err);
}
});
This is actually easy:
let remotePath = 'some/key/to/store.json';
let localReadStream = new stream.PassThrough();
localReadStream.end(JSON.stringify(someObject, null, ' '));
let remoteWriteStream = bucket.file(remotePath).createWriteStream({
metadata : {
contentType : 'application/json'
}
});
localReadStream.pipe(remoteWriteStream)
.on('error', err => {
return callback(err);
})
.on('finish', () => {
return callback();
});
We have an issue about supporting this more easily: https://github.com/GoogleCloudPlatform/gcloud-node/issues/1179
But for now, you can try:
file.createWriteStream()
.on('error', function(err) {})
.on('finish', function() {})
.end(fileContents);
The following snippet is from a google example. The example assumes you have used multer, or something similar, and can access the file at req.file. You can stream the file to cloud storage using middleware that resembles the following:
function sendUploadToGCS (req, res, next) {
if (!req.file) {
return next();
}
const gcsname = Date.now() + req.file.originalname;
const file = bucket.file(gcsname);
const stream = file.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
stream.on('error', (err) => {
req.file.cloudStorageError = err;
next(err);
});
stream.on('finish', () => {
req.file.cloudStorageObject = gcsname;
file.makePublic().then(() => {
req.file.cloudStoragePublicUrl = getPublicUrl(gcsname);
next();
});
});
stream.end(req.file.buffer);
}
I have this approach working to me:
const destFileName = `someFolder/${file.name}`;
const fileCloud = this.storage.bucket(bucketName).file(destFileName);
fileCloud.save(file.buffer, {
contentType: file.mimetype
}, (err) => {
if (err) {
console.log("error");
}
});

Downloading multiple file from ftp site using node js

i am trying to download each file on ftp server from root folder.
what i did is this-
ftpClient.ls(".", function(err, res) {
res.forEach(function(file) {
console.log(file.name);
ftpClient.get("./"+file.name, 'D:/styleinc/ftp/'+file.name, function(hadErr) {
if (hadErr)
console.log(hadErr);
else
console.log('File copied successfully!');
});
});
but on running it gives me error-
{ [Error: connect ECONNREFUSED]
code: 'ECONNREFUSED',
errno: 'ECONNREFUSED',
syscall: 'connect',
msg: 'Probably trying a PASV operation while one is in progress'
}
i have already successfully logged in and authenticated my self on ftp site.....
i don't know what to do please guide me.
This is the chunk of code I used with async.mapLimit to make it work with only one connection concurrently.
'use strict'
var JSFtp = require('jsftp');
var inspect = require('util').inspect;
var fs = require('fs');
var async = require('async');
var ftp = new JSFtp(require('./util/ftp'))
var local = 'EDI/mohawk/OUTBOX/'
var remote = 'OUTBOX'
var gatherFiles = function(dir){
return new Promise(function(resolve, reject){
ftp.ls(dir + '/*', function(err, res) {
if (err) reject(err)
console.log(res)
var files = [];
res.forEach(function(file){
files.push(file.name)
});
resolve(files)
})
})
}
gatherFiles(remote).then(function(files){
console.log(files)
async.mapLimit(files, 1, function(file, callback){
console.log('attempting: ' +remote + file + '->' + local + file)
ftp.get(remote +'/'+ file, local +'/'+ file, function(err){
if(err){
console.log('Error getting ' + file)
callback(err)
}else{
console.log('Got ' + file)
callback()
}
})
}, function(err, res){
if(err){
console.log(err)
}
console.log('updates complete' + res)
})
})

Resources