I am trying to zip a single file using the Archiver npm package located: https://www.npmjs.com/package/archiver
I have been able to use the following to zip a directory:
archive.directory(folderName, false);
But when I try to use either of these nothing seems to happen (ie: no zip is generated, file never finishes zipping):
archive.file(folderName, { name: 'file4.txt' });
archive.file(fs.createReadStream(path.resolve(file)), {name: 'File' + singleFileCheck});
Has anyone run into this issue before? Please let me know what I am doing wrong. Thank you in advance!
edit:
module.exports = async function zipper(user, pass, orgid, s4url, apiToken, newOrgName, file) {
const s4 = require('../testcli/s4');
const fs = require('fs');
const archiver = require('archiver');
const path = require('path');
var parentDirect;
if(file == "./"){
parentDirect = "..";
}else{
parentDirect = path.basename(path.dirname(file));
}
const newZipFile = parentDirect + '/s4.zip';
var folderName = file;
//Checks for existence of infinite loop
if(path.resolve(parentDirect).length > path.resolve(folderName).length){
console.log(folderName.search(parentDirect));
console.error('\x1b[36m%s\x1b[0m', 'ERROR!!!! : Please adjust where your console is pointed, this will result in an infinite loop. Exiting.');
return;
}
var P = ['\\', '|', '/', '-'];
var x = 0;
var output = fs.createWriteStream(newZipFile);
var archive = archiver('zip');
scansdisplayinterval = setInterval(function () {
twrl();
}, 250);
// listen for all archive data to be written
output.on('close', function () {
console.log('\x1b[36m%s\x1b[0m', archive.pointer() + ' total bytes');
console.log('\x1b[36m%s\x1b[0m', 'archiver has been finalized and the output file descriptor has closed.');
try {
process.stdout.write(newZipFile);
clearInterval(scansdisplayinterval);
s4(user, pass, newZipFile, orgid, s4url, apiToken, newOrgName);
} catch (e) {
console.log(e);
}
});
// good practice to catch this error explicitly
archive.on('error', function (err) {
throw err;
});
// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', function(err) {
throw err;
});
// This event is fired when the data source is drained no matter what was the data source.
output.on('end', function() {
console.log('\x1b[36m%s\x1b[0m', 'Data has been drained');
});
// pipe archive data to the file
archive.pipe(output);
//Checks -f for file extension
let singleFileCheck = path.extname(file);
//If file has extension
if(singleFileCheck.length <= 4 && singleFileCheck != ''){
//Append single file
console.log('singleFile', path.resolve(file));
archive.file(path.resolve(file), { name: 'file4.txt' });
// archive.append(fs.createReadStream(path.resolve(file)), {name: 'File' + singleFileCheck});
//Else = folder
}else{
// append files from a sub-directory, putting its contents at the root of archive
archive.directory(folderName, false);
}
// archive.directory(folderName, false);
console.log('\x1b[36m%s\x1b[0m', "Zipping: " + folderName + " To: " + newZipFile);
console.log('\x1b[36m%s\x1b[0m', "Zipping To: " + path.resolve(newZipFile));
archive.finalize();
function twrl() {
process.stdout.write('\rZipping Folder ... ' + P[x++]);
x &= 3;
}
return(newZipFile);
};
The issue came from how I was defining the parentDirect var.
Solution:
let singleFileCheck = path.extname(file);
if(file == "./" || singleFileCheck.length <= 4 && singleFileCheck != ''){
parentDirect = "..";
}else{
parentDirect = path.basename(path.dirname(file));
}
I have vue (axios) making a get call to an express route which triggers a child_process of ffmpeg in an infinite loop. ffmpeg streams one file over udp , on close it re calls itself and streams another file.
I'd like to be able to kill this process from a button on a web page, but can't seem to work it out.
This is my express route code
router.get('/test', function(req, res) {
const childProcess = require('child_process');
const fs = require('fs')
const path = require('path')
//Grabs a random index between 0 and length
function randomIndex(length) {
return Math.floor(Math.random() * (length));
}
function Stream () {
const FILE_SRC = '/path/to/file'
//Read the directory and get the files
const dirs = fs.readdirSync(FILE_SRC)
.map(file => {
return path.join(FILE_SRC, file);
});
const srcs_dup = [];
const hashCheck = {}; //used to check if the file was already added to srcs_dup
var numberOfFiles = dirs.length - 1; //OR whatever # you want
console.log(numberOfFiles)
//While we haven't got the number of files we want. Loop.
while (srcs_dup.length < numberOfFiles) {
var fileIndex = randomIndex(dirs.length-1);
//Check if the file was already added to the array
if (hashCheck[fileIndex] == true) {
continue; //Already have that file. Skip it
}
//Add the file to the array and object
srcs_dup.push(dirs[fileIndex]);
hashCheck[fileIndex] = true;
}
var chosen = "'" + srcs_dup[0] + "'"
var call = "ffmpeg -re -i " + chosen + " -content_type audio/mpeg -f mp3 udp://224.1.2.3:1234"
const stop = childProcess.exec(call, { shell: true });
stop.stdout.on('data', function (data) {
console.log('stdout: ' + data.toString());
});
stop.stderr.on('data', (data) => {
console.log(`stderr: ${data}`);
});
stop.on('close', (code) => {
console.log ('child exited with code ' + code)
Stream();
});
stop.on('error', function(err) {
console.log('sh error' + err)
});
}
I'm trying to populate a database with pictures paths & names using Node.js.
What I am trying to do is the following :
- A function send a list of pictures as Base64 string.
- Another function receive this list, loop through it, convert it into picture and get the path back.
I'm pretty new to node.js so I might be doing something really stupid.
Here is the reception code :
app.post('/chatBot/moreinfo/create', function (req, res) {
returnList = '';
res.set('Content-Type', 'application/json');
//IF LIST IS NOT EMPTY
if (req.body.imgList.length !== 0) {
const imagePath = '/var/lib/SMImageBank/';
const regex = /^data:image\/(.*);.*$/i;
const listePicture = req.body.imgList;
// LOOPING INTO THE LIST
req.body.imgList.map ( function (element) {
const file = element;
const filetype = file.match(regex)[1];
var picLink2 = '';
const base64data = file.replace(/^data:image\/.*;base64,/, "");
const latin1data = new Buffer(base64data, 'base64').toString('latin1');
const filename = new Date().getTime() + '' + new Date().getMilliseconds() + "." + filetype;
fs.mkdir(imagePath, () => {
fs.writeFile(imagePath + filename, latin1data, "latin1", function (err, content) {
if (err) {
routerLog(req, {'type': 'error', 'content': err} );
res.sendStatus(500);
}
else {
if (process.env.NODE_ENV === "production")
picLink2 = 'http://****.fr/image/' + filename;
else if(process.env.NODE_ENV === "test")
picLink2 = 'http://dev.****.fr:8010/image/' + filename;
else if(process.env.NODE_ENV === "master")
picLink2 = 'http://dev.****.fr:8008/image/' + filename;
else{
picLink2 = 'http://*****.com:8008/image/' + filename;
}
}
});
})
console.log(picLink2);
returnList = returnList + ";" + picLink2;
});
}
MoreInfo.create(req.body, function (ret) {
res.send(ret);
routerLog(req);
})
});
What I want to do is to be able to access the variable "picLink2" from outside the writeFile & mkdir function so I can populate my returnList at each iteration. Obviously as node.js is asynchronous I can't access to picLink2 content from outside fs.writeFile() function. I know there has been a ton of question about this and lot of the answers are to put the code inside the writeFile()/readFile() function but I don't see how I can do it here since the writeFile() function is inside a map that is iterating into a list.
I'm new to the asynchronous world and I don't see how I can solve this problem.
Use writeFileSync for a synchronous operation if that doesn't hurt performance.
I am using loopback for storing Image to the server.
I want to modify the file name of the file before getting saved to the server.
Also I want to convert it to another thumbnail form before getting saved.
Here is how I am doing.
At client side
Upload.upload(
{
url: '/api/containers/container_name/upload',
file: file,
fileName: "demoImage.jpg",
//Additional data with file
params:{
orderId: 1,
customerId: 1
}
});
At Server Side I am receiving the query "params" but not getting the "File Name"
My Storage model name is container
Container.beforeRemote('upload', function(ctx, modelInstance, next) {
//OUPTUTS: {orderId:1, customerId:1]}
console.log(ctx.req.query);
//Now I want to change the File Name of the file.
//But not getting how to do that
next();
})
How to change the File name of the File getting saved at the server?
I figured it out.
We have to define a custom function getFileName in boot/configure-storage.js.
Suppose my datasource for loopback-component-storage is presImage.
server/boot/configure-storage.js
module.exports = function(app) {
//Function for checking the file type..
app.dataSources.presImage.connector.getFilename = function(file, req, res) {
//First checking the file type..
var pattern = /^image\/.+$/;
var value = pattern.test(file.type);
if(value ){
var fileExtension = file.name.split('.').pop();
var container = file.container;
var time = new Date().getTime();
var query = req.query;
var customerId = query.customerId;
var orderId = query.orderId;
//Now preparing the file name..
//customerId_time_orderId.extension
var NewFileName = '' + customerId + '_' + time + '_' + orderId + '.' + fileExtension;
//And the file name will be saved as defined..
return NewFileName;
}
else{
throw "FileTypeError: Only File of Image type is accepted.";
}
};
}
common/models/container.js
Now suppose my container model is container.
module.exports = function(Container) {
Container.afterRemote('upload', function(ctx, modelInstance, next) {
var files = ctx.result.result.files.file;
for(var i=0; i<files.length; i++){
var ModifiedfileName = files[i].name;
console.log(ModifiedfileName) //outputs the modified file name.
} //for loop
next();
}); //afterRemote..
};
Now for converting it images to Thumbnail size
Download the quickthumb
Here is how to use it with loopback.
This code is copied directly from Loopback thumbnail view
common/models/container.js
module.exports = function(Container) {
var qt = require('quickthumb');
Container.afterRemote('upload', function(ctx, res, next) {
var file = res.result.files.file[0];
var file_path = "./server/storage/" + file.container + "/" + file.name;
var file_thumb_path = "./server/storage/" + file.container + "/thumb/" + file.name;
qt.convert({
src: file_path,
dst: file_thumb_path,
width: 100
}, function (err, path) {
});
next();
});
};
Piggybacking on the answer above, this configure-storage enables the file name to be set explicitly via req.params.filename and to default to the existing name if none is provided.
configure-storage.js
module.exports = function(app) {
//Function for checking the file type..
app.dataSources.storage.connector.getFilename = function(file, req, ignoreRes) {
if (!req.params.filename) {
return file.name
}
var fileExtension = file.name.split('.').pop()
return req.params.filename + '.' + fileExtension
};
}
I am trying to find the most recently created file in a directory using Node.js and cannot seem to find a solution. The following code seemed to be doing the trick on one machine but on another it was just pulling a random file from the directory - as I figured it might. Basically, I need to find the newest file and ONLY that file.
var fs = require('fs'); //File System
var audioFilePath = 'C:/scanner/audio/'; //Location of recorded audio files
var audioFile = fs.readdirSync(audioFilePath)
.slice(-1)[0]
.replace('.wav', '.mp3');
Many thanks!
Assuming availability of underscore (http://underscorejs.org/) and taking synchronous approach (which doesn't utilize the node.js strengths, but is easier to grasp):
var fs = require('fs'),
path = require('path'),
_ = require('underscore');
// Return only base file name without dir
function getMostRecentFileName(dir) {
var files = fs.readdirSync(dir);
// use underscore for max()
return _.max(files, function (f) {
var fullpath = path.join(dir, f);
// ctime = creation time is used
// replace with mtime for modification time
return fs.statSync(fullpath).ctime;
});
}
While not the most efficient approach, this should be conceptually straight forward:
var fs = require('fs'); //File System
var audioFilePath = 'C:/scanner/audio/'; //Location of recorded audio files
fs.readdir(audioFilePath, function(err, files) {
if (err) { throw err; }
var audioFile = getNewestFile(files, audioFilePath).replace('.wav', '.mp3');
//process audioFile here or pass it to a function...
console.log(audioFile);
});
function getNewestFile(files, path) {
var out = [];
files.forEach(function(file) {
var stats = fs.statSync(path + "/" +file);
if(stats.isFile()) {
out.push({"file":file, "mtime": stats.mtime.getTime()});
}
});
out.sort(function(a,b) {
return b.mtime - a.mtime;
})
return (out.length>0) ? out[0].file : "";
}
BTW, there is no obvious reason in the original post to use synchronous file listing.
Another approach:
const fs = require('fs')
const glob = require('glob')
const newestFile = glob.sync('input/*xlsx')
.map(name => ({name, ctime: fs.statSync(name).ctime}))
.sort((a, b) => b.ctime - a.ctime)[0].name
A more functional version might look like:
import { readdirSync, lstatSync } from "fs";
const orderReccentFiles = (dir: string) =>
readdirSync(dir)
.filter(f => lstatSync(f).isFile())
.map(file => ({ file, mtime: lstatSync(file).mtime }))
.sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
const getMostRecentFile = (dir: string) => {
const files = orderReccentFiles(dir);
return files.length ? files[0] : undefined;
};
First, you need to order files (newest at the begin)
Then, get the first element of an array for the most recent file.
I have modified code from #mikeysee to avoid the path exception so that I use the full path to fix them.
The snipped codes of 2 functions are shown below.
const fs = require('fs');
const path = require('path');
const getMostRecentFile = (dir) => {
const files = orderReccentFiles(dir);
return files.length ? files[0] : undefined;
};
const orderReccentFiles = (dir) => {
return fs.readdirSync(dir)
.filter(file => fs.lstatSync(path.join(dir, file)).isFile())
.map(file => ({ file, mtime: fs.lstatSync(path.join(dir, file)).mtime }))
.sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
};
const dirPath = '<PATH>';
getMostRecentFile(dirPath)
this should do the trick ("dir" is the directory you use fs.readdir over to get the "files" array):
function getNewestFile(dir, files, callback) {
if (!callback) return;
if (!files || (files && files.length === 0)) {
callback();
}
if (files.length === 1) {
callback(files[0]);
}
var newest = { file: files[0] };
var checked = 0;
fs.stat(dir + newest.file, function(err, stats) {
newest.mtime = stats.mtime;
for (var i = 0; i < files.length; i++) {
var file = files[i];
(function(file) {
fs.stat(file, function(err, stats) {
++checked;
if (stats.mtime.getTime() > newest.mtime.getTime()) {
newest = { file : file, mtime : stats.mtime };
}
if (checked == files.length) {
callback(newest);
}
});
})(dir + file);
}
});
}
[Extended umair's answer to correct a bug with current working directory]
function getNewestFile(dir, regexp) {
var fs = require("fs"),
path = require('path'),
newest = null,
files = fs.readdirSync(dir),
one_matched = 0,
i
for (i = 0; i < files.length; i++) {
if (regexp.test(files[i]) == false)
continue
else if (one_matched == 0) {
newest = files[i];
one_matched = 1;
continue
}
f1_time = fs.statSync(path.join(dir, files[i])).mtime.getTime()
f2_time = fs.statSync(path.join(dir, newest)).mtime.getTime()
if (f1_time > f2_time)
newest[i] = files[i]
}
if (newest != null)
return (path.join(dir, newest))
return null
}
Using pure JavaScript and easy to understand structure :
function getLatestFile(dirpath) {
// Check if dirpath exist or not right here
let latest;
const files = fs.readdirSync(dirpath);
files.forEach(filename => {
// Get the stat
const stat = fs.lstatSync(path.join(dirpath, filename));
// Pass if it is a directory
if (stat.isDirectory())
return;
// latest default to first file
if (!latest) {
latest = {filename, mtime: stat.mtime};
return;
}
// update latest if mtime is greater than the current latest
if (stat.mtime > latest.mtime) {
latest.filename = filename;
latest.mtime = stat.mtime;
}
});
return latest.filename;
}
Unfortunately, I don't think the files are guaranteed to be in any particular order.
Instead, you'll need to call fs.stat (or fs.statSync) on each file to get the date it was last modified, then select the newest one once you have all of the dates.
Surprisingly, there is no example in this questions that explicitly uses Array functions, functional programming.
Here is my take on getting the latest file of a directory in nodejs. By default, it will get the latest file by any extension. When passing the extension property, the function will return the latest file for that extension.
The advantage of this code is that its declarative and modular and easy to understand as oppose to using "logic branching/control flows", of course given you understand how these array functions work 😀
const fs = require('fs');
const path = require('path');
function getLatestFile({directory, extension}, callback){
fs.readdir(directory, (_ , dirlist)=>{
const latest = dirlist.map(_path => ({stat:fs.lstatSync(path.join(directory, _path)), dir:_path}))
.filter(_path => _path.stat.isFile())
.filter(_path => extension ? _path.dir.endsWith(`.${extension}`) : 1)
.sort((a, b) => b.stat.mtime - a.stat.mtime)
.map(_path => _path.dir);
callback(latest[0]);
});
}
getLatestFile({directory:process.cwd(), extension:'mp3'}, (filename=null)=>{
console.log(filename);
});
with synchronized version of read directory (fs.readdirSync) and file status (fs.statSync):
function getNewestFile(dir, regexp) {
newest = null
files = fs.readdirSync(dir)
one_matched = 0
for (i = 0; i < files.length; i++) {
if (regexp.test(files[i]) == false)
continue
else if (one_matched == 0) {
newest = files[i]
one_matched = 1
continue
}
f1_time = fs.statSync(files[i]).mtime.getTime()
f2_time = fs.statSync(newest).mtime.getTime()
if (f1_time > f2_time)
newest[i] = files[i]
}
if (newest != null)
return (dir + newest)
return null
}
you can call this function as follows:
var f = getNewestFile("./", new RegExp('.*\.mp3'))
An async version of #pguardiario's functional answer (I did this myself then found theirs halfway down the page when I went to add this).
import {promisify} from 'util';
import _glob from 'glob';
const glob = promisify(_glob);
const newestFile = (await Promise.all(
(await glob(YOUR_GLOB)).map(async (file) => (
{file, mtime:(await fs.stat(file)).mtime}
))
))
.sort(({mtime:a}, {mtime:b}) => ((a < b) ? 1 : -1))
[0]
.file
;
This is a commonplace need - write files out to a temp dir and automatically open the most recent one.
The following works with node version 16:
#!/usr/bin/env node
const fs = require('fs');
const path = require('path');
const cp = require('child_process');
const fsPromises = fs.promises;
process.on('exit', code => {
console.log('exiting with code:', code);
});
const folder = path.join(process.env.HOME, 'publications/temp');
const c = cp.spawn('sh');
const files = fs.readdirSync(folder).map(v => path.resolve(folder + '/' + v));
const openFile = (file) => {
c.stdin.end(`(open "${file}" &> /dev/null) &> /dev/null &`);
};
if(files.length > 500) {
console.error('too many files, clean this folder up lol');
process.exit(1);
}
const newest = {file: null, mtime: null};
Promise.all(files.map(f => {
return fsPromises.stat(f).then(stats => {
if (!newest.file || (stats.mtime.getTime() > newest.mtime.getTime())) {
newest.file= f;
newest.mtime= stats.mtime;
}
});
})).then(v => {
if(!newest.file){
console.error('could not find the newest file?!');
return;
}
openFile(newest.file);
});
you may want to check for folders instead of files, and you could add something like this towards the beginning:
if (files.length === 1) {
if (fs.statSync(files[0]).isFile()) {
openFile(files[0]);
process.exit(0);
} else {
console.error('folder or symlink where file should be?');
process.exit(1);
}
}