How to zip a single file with Archiver - node.js

I am trying to zip a single file using the Archiver npm package located: https://www.npmjs.com/package/archiver
I have been able to use the following to zip a directory:
archive.directory(folderName, false);
But when I try to use either of these nothing seems to happen (ie: no zip is generated, file never finishes zipping):
archive.file(folderName, { name: 'file4.txt' });
archive.file(fs.createReadStream(path.resolve(file)), {name: 'File' + singleFileCheck});
Has anyone run into this issue before? Please let me know what I am doing wrong. Thank you in advance!
edit:
module.exports = async function zipper(user, pass, orgid, s4url, apiToken, newOrgName, file) {
const s4 = require('../testcli/s4');
const fs = require('fs');
const archiver = require('archiver');
const path = require('path');
var parentDirect;
if(file == "./"){
parentDirect = "..";
}else{
parentDirect = path.basename(path.dirname(file));
}
const newZipFile = parentDirect + '/s4.zip';
var folderName = file;
//Checks for existence of infinite loop
if(path.resolve(parentDirect).length > path.resolve(folderName).length){
console.log(folderName.search(parentDirect));
console.error('\x1b[36m%s\x1b[0m', 'ERROR!!!! : Please adjust where your console is pointed, this will result in an infinite loop. Exiting.');
return;
}
var P = ['\\', '|', '/', '-'];
var x = 0;
var output = fs.createWriteStream(newZipFile);
var archive = archiver('zip');
scansdisplayinterval = setInterval(function () {
twrl();
}, 250);
// listen for all archive data to be written
output.on('close', function () {
console.log('\x1b[36m%s\x1b[0m', archive.pointer() + ' total bytes');
console.log('\x1b[36m%s\x1b[0m', 'archiver has been finalized and the output file descriptor has closed.');
try {
process.stdout.write(newZipFile);
clearInterval(scansdisplayinterval);
s4(user, pass, newZipFile, orgid, s4url, apiToken, newOrgName);
} catch (e) {
console.log(e);
}
});
// good practice to catch this error explicitly
archive.on('error', function (err) {
throw err;
});
// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', function(err) {
throw err;
});
// This event is fired when the data source is drained no matter what was the data source.
output.on('end', function() {
console.log('\x1b[36m%s\x1b[0m', 'Data has been drained');
});
// pipe archive data to the file
archive.pipe(output);
//Checks -f for file extension
let singleFileCheck = path.extname(file);
//If file has extension
if(singleFileCheck.length <= 4 && singleFileCheck != ''){
//Append single file
console.log('singleFile', path.resolve(file));
archive.file(path.resolve(file), { name: 'file4.txt' });
// archive.append(fs.createReadStream(path.resolve(file)), {name: 'File' + singleFileCheck});
//Else = folder
}else{
// append files from a sub-directory, putting its contents at the root of archive
archive.directory(folderName, false);
}
// archive.directory(folderName, false);
console.log('\x1b[36m%s\x1b[0m', "Zipping: " + folderName + " To: " + newZipFile);
console.log('\x1b[36m%s\x1b[0m', "Zipping To: " + path.resolve(newZipFile));
archive.finalize();
function twrl() {
process.stdout.write('\rZipping Folder ... ' + P[x++]);
x &= 3;
}
return(newZipFile);
};

The issue came from how I was defining the parentDirect var.
Solution:
let singleFileCheck = path.extname(file);
if(file == "./" || singleFileCheck.length <= 4 && singleFileCheck != ''){
parentDirect = "..";
}else{
parentDirect = path.basename(path.dirname(file));
}

Related

Stop nodejs child_process with browser api call

I have vue (axios) making a get call to an express route which triggers a child_process of ffmpeg in an infinite loop. ffmpeg streams one file over udp , on close it re calls itself and streams another file.
I'd like to be able to kill this process from a button on a web page, but can't seem to work it out.
This is my express route code
router.get('/test', function(req, res) {
const childProcess = require('child_process');
const fs = require('fs')
const path = require('path')
//Grabs a random index between 0 and length
function randomIndex(length) {
return Math.floor(Math.random() * (length));
}
function Stream () {
const FILE_SRC = '/path/to/file'
//Read the directory and get the files
const dirs = fs.readdirSync(FILE_SRC)
.map(file => {
return path.join(FILE_SRC, file);
});
const srcs_dup = [];
const hashCheck = {}; //used to check if the file was already added to srcs_dup
var numberOfFiles = dirs.length - 1; //OR whatever # you want
console.log(numberOfFiles)
//While we haven't got the number of files we want. Loop.
while (srcs_dup.length < numberOfFiles) {
var fileIndex = randomIndex(dirs.length-1);
//Check if the file was already added to the array
if (hashCheck[fileIndex] == true) {
continue; //Already have that file. Skip it
}
//Add the file to the array and object
srcs_dup.push(dirs[fileIndex]);
hashCheck[fileIndex] = true;
}
var chosen = "'" + srcs_dup[0] + "'"
var call = "ffmpeg -re -i " + chosen + " -content_type audio/mpeg -f mp3 udp://224.1.2.3:1234"
const stop = childProcess.exec(call, { shell: true });
stop.stdout.on('data', function (data) {
console.log('stdout: ' + data.toString());
});
stop.stderr.on('data', (data) => {
console.log(`stderr: ${data}`);
});
stop.on('close', (code) => {
console.log ('child exited with code ' + code)
Stream();
});
stop.on('error', function(err) {
console.log('sh error' + err)
});
}

Gulp: Abnormal behavior of program

I'm new to Gulp and I'm having a problem with gulp,here are some points that I want to be done
I want to lookup for a file that has an .storyboard extension
(it is already DONE)
I want to perform a task whenever a certain file's content is
changed,
I want to Watch that file and when something is being changed in
that file
I want to rewrite its content by removing all other content that was
already in the file.
When I make changes in file with .storyboard extension, it just keep on displaying a message done, file has been saved
Here is my Code:
//fs to read and write files while path is for iterating directories
fs = require('fs'),
path = require('path')
//DomParser to Parse Xml
var DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } });
//Gulp for detecting changes
var gulp = require('gulp')
var mainStoryBoardFile;
function crawl(dir) {
// console.log('[+]', dir);
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
};
}
}
//calling function
crawl(__dirname);
var newFilePath = './data.xml'
var document;
var dataFound;
//What to do
gulp.task('read', function (done) {
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
done();
});
gulp.task('write', function (done) {
fs.writeFile(mainStoryBoardFile, '', function () { console.log('done') })
fs.writeFile(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
done();
});
gulp.task('watch', function (done) {
gulp.watch(mainStoryBoardFile, gulp.series('read', 'write'));
});
Here is a solution to solve this problem, You can watch changes on a single file and you can also perform some sort of function whenever a file is changed. in xml case, you can watch a file, when it changes you can add new properties or attributes or you can create new elements in xml file.
//Dependencies
//fs to read and write files while path is for iterating directories
var fs = require('fs'),
path = require('path'),
DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } }),
gulp = require('gulp'),
arrayOfControls = require('./object.json'),
RandExp = require('randexp');
console.log("GulpService has been Started\n");
function crawl(dir) {
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
}
}
}
//calling function
crawl(__dirname);
var mainStoryBoardFile;
var document, dataFound;
function readWrite() {
crawl(__dirname);
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
fs.writeFileSync(mainStoryBoardFile, '', function () {
console.log('done')
});
fs.writeFileSync(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
}
var watcher = gulp.watch(mainStoryBoardFile);
watcher.on('change', function (path, stats) {
readWrite();
console.log('File ' + path + ' was changed');
watcher.unwatch(mainStoryBoardFile);
watcher.add(mainStoryBoardFile);
});

Saving blobs as a single webm file

I'm recording the users screen via webrtc, and then posting video blobs every x seconds using MediaStreamRecorder. On the server side I have an action set up in sails which saves the blob as a webm file.
The problem is that I can't get it to append the data, and create one large webm file. When it appends the file size increases like expected, so the data is appending, but when I go to play the file it'll either play the first second, not play at all, or play but not show the video.
It would be possible to merge the files with ffmpeg, but I'd rather avoid this if at all possible.
Here's the code on the client:
'use strict';
// Polyfill in Firefox.
// See https://blog.mozilla.org/webrtc/getdisplaymedia-now-available-in-adapter-js/
if (typeof adapter != 'undefined' && adapter.browserDetails.browser == 'firefox') {
adapter.browserShim.shimGetDisplayMedia(window, 'screen');
}
io.socket.post('/processvideo', function(resData) {
console.log("Response: " + resData);
});
function handleSuccess(stream) {
const video = document.querySelector('video');
video.srcObject = stream;
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/webm';
mediaRecorder.ondataavailable = function (blob) {
console.log("Sending Data");
//var rawIO = io.socket._raw;
//rawIO.emit('some:event', "using native socket.io");
io.socket.post('/processvideo', {"vidblob": blob}, function(resData) {
console.log("Response: " + resData);
});
};
mediaRecorder.start(3000);
}
function handleError(error) {
errorMsg(`getDisplayMedia error: ${error.name}`, error);
}
function errorMsg(msg, error) {
const errorElement = document.querySelector('#errorMsg');
errorElement.innerHTML += `<p>${msg}</p>`;
if (typeof error !== 'undefined') {
console.error(error);
}
}
if ('getDisplayMedia' in navigator) {
navigator.getDisplayMedia({video: true})
.then(handleSuccess)
.catch(handleError);
} else {
errorMsg('getDisplayMedia is not supported');
}
Code on the server:
module.exports = async function processVideo (req, res) {
var fs = require('fs'),
path = require('path'),
upload_dir = './assets/media/uploads',
output_dir = './assets/media/outputs',
temp_dir = './assets/media/temp';
var params = req.allParams();
if(req.isSocket && req.method === 'POST') {
_upload(params.vidblob, "test.webm");
return res.send("Hi There");
}
else {
return res.send("Unknown Error");
}
function _upload(file_content, file_name) {
var fileRootName = file_name.split('.').shift(),
fileExtension = file_name.split('.').pop(),
filePathBase = upload_dir + '/',
fileRootNameWithBase = filePathBase + fileRootName,
filePath = fileRootNameWithBase + '.' + fileExtension,
fileID = 2;
/* Save all of the files as different files. */
/*
while (fs.existsSync(filePath)) {
filePath = fileRootNameWithBase + fileID + '.' + fileExtension;
fileID += 1;
}
fs.writeFileSync(filePath, file_content);
*/
/* Appends the binary data like you'd expect, but it's not playable. */
fs.appendFileSync(upload_dir + '/' + 'test.file', file_content);
}
}
Any help would be greatly appreciated!
I decided this would be difficult to develop, and wouldn't really fit the projects requirements. So I decided to build an electron app. Just posting this so I can resolve the question.

I have a directory with the list of files. I have to list files and find the file with maximum file size and insert the largest file in the mongodb

I have a directory with the list of files. I have to list files and find the file with maximum file size and insert the largest file in the mongodb.I have found out the maximum size file,but unable to print in console.It displays undefined.
const path = require('path');
const fs = require('fs');
var sleep = require('system-sleep');
var fsCompare = require('fs-compare');
var statssize = 0;
var foundFile;
var directorypath = path.join(__dirname, 'directory');
var fsfiles = fs.readdir(directorypath, function(err, data) {
var temp = [];
if (err) {
return console.error(err);
}
data.forEach(function(file) {
var fileSizeInMegabytes;
var pathtofiles = 'directory' + '\\' + file;
fs.stat(pathtofiles, function(err, stats) {
// sleep(5000);
if (err) {
return console.error(err);
}
if (statssize < stats.size) {
statssize = stats.size;
foundFile = pathtofiles;
}
fileSizeInMegabytes = statssize / 1000000.0;
});
});
console.log(foundFile + "foundfile");
console.log(statssize + "stats of file");
});
The code is asynchronous, i.e your console.log get executed before fs.stat() function returns any value.
console.log(foundFile + "foundfile");
console.log(statssize + "stats of file");
These should only be written inside the callback func of fs.stat() i.e right below this line
fileSizeInMegabytes = statssize / 1000000.0;
(y)

Node.js How to delete first line in file

I'm making simple Node.js app and I need to delete first line in file. Please is any way how to do it? I think that It will be possible with fs.write, but how?
Here is streamed version of removing first line from file.
As it uses streams, means you don't need to load whole file in memory, so it is way more efficient and fast, as well can work on very large files without filling memory on your hardware.
var Transform = require('stream').Transform;
var util = require('util');
// Transform sctreamer to remove first line
function RemoveFirstLine(args) {
if (! (this instanceof RemoveFirstLine)) {
return new RemoveFirstLine(args);
}
Transform.call(this, args);
this._buff = '';
this._removed = false;
}
util.inherits(RemoveFirstLine, Transform);
RemoveFirstLine.prototype._transform = function(chunk, encoding, done) {
if (this._removed) { // if already removed
this.push(chunk); // just push through buffer
} else {
// collect string into buffer
this._buff += chunk.toString();
// check if string has newline symbol
if (this._buff.indexOf('\n') !== -1) {
// push to stream skipping first line
this.push(this._buff.slice(this._buff.indexOf('\n') + 2));
// clear string buffer
this._buff = null;
// mark as removed
this._removed = true;
}
}
done();
};
And use it like so:
var fs = require('fs');
var input = fs.createReadStream('test.txt'); // read file
var output = fs.createWriteStream('test_.txt'); // write file
input // take input
.pipe(RemoveFirstLine()) // pipe through line remover
.pipe(output); // save to file
Another way, which is not recommended.
If your files are not large, and you don't mind loading them into memory, load file, remove line, save file, but it is slower and wont work well on large files.
var fs = require('fs');
var filePath = './test.txt'; // path to file
fs.readFile(filePath, function(err, data) { // read file to memory
if (!err) {
data = data.toString(); // stringify buffer
var position = data.toString().indexOf('\n'); // find position of new line element
if (position != -1) { // if new line element found
data = data.substr(position + 1); // subtract string based on first line length
fs.writeFile(filePath, data, function(err) { // write file
if (err) { // if error, report
console.log (err);
}
});
} else {
console.log('no lines found');
}
} else {
console.log(err);
}
});
Here is another way:
const fs = require('fs');
const filePath = './table.csv';
let csvContent = fs.readFileSync(filePath).toString().split('\n'); // read file and convert to array by line break
csvContent.shift(); // remove the the first element from array
csvContent = csvContent.join('\n'); // convert array back to string
fs.writeFileSync(filePath, csvContent);
Thanks to #Lilleman 's comment, I've made an amendment to the original solution, which requires a 3rd-party module "line-by-line" and can prevent memory overflow and racing condition while processing very large file.
const fs = require('fs');
const LineReader = require('line-by-line');
const removeLines = function(srcPath, destPath, count, cb) {
if(count <= 0) {
return cb();
}
var reader = new LineReader(srcPath);
var output = fs.createWriteStream(destPath);
var linesRemoved = 0;
var isFirstLine = true;
reader.on('line', (line) => {
if(linesRemoved < count) {
linesRemoved++;
return;
}
reader.pause();
var newLine;
if(isFirstLine) {
newLine = line;
isFirstLine = false;
} else {
newLine = '\n' + line;
}
output.write(newLine, () => {
reader.resume();
});
})
.on('error', (err) => {
reader.pause();
return cb(err);
})
.on('close', () => {
return cb();
})
}
---------------- original solution below---------------
Inspired by another answer, here is a revised stream version:
const fs = require('fs');
const readline = require('readline');
const removeFirstLine = function(srcPath, destPath, done) {
var rl = readline.createInterface({
input: fs.createReadStream(srcPath)
});
var output = fs.createWriteStream(destPath);
var firstRemoved = false;
rl.on('line', (line) => {
if(!firstRemoved) {
firstRemoved = true;
return;
}
output.write(line + '\n');
}).on('close', () => {
return done();
})
}
and it can be easily modified to remove certain amount of lines, by changing the 'firstRemoved' into a counter:
var linesRemoved = 0;
...
if(linesRemoved < LINES_TO_BE_REMOVED) {
linesRemoved++;
return;
}
...
Here is a naive solution using the Promise-based file system APIs.
const fs = require('node:fs/promises')
const os = require('node:os')
async function removeLines(path, numLinesToRemove) {
const data = await fs.readFile(path, { encoding: 'utf-8' })
const newData = data
.split(os.EOL) // split data into array of strings
.slice(numLinesToRemove) // remove first N lines of array
.join(os.EOL) // join array into a single string
// overwrite original file with new data
return fs.writeFile(path, newData)
}

Resources