I need to check if a file is being uploaded to a FTP server. I have no control over the server side (I'd just love a file.temp rename), so my option (best guess to my knowledge of FTP) is to ask the file size or last modified after an interval. My problem is I would need this to be sync.
function isStillUploading(ftp, filePath) {
var startFileSize = -1;
var nextFileSize = -2;
ftp.size(filePath,
function sizeReturned(error, size) {
if (!error) startFileSize = size;
else startFileSize = -3;
});
// CPU-melting style delay.
var start = Date.now();
while (Date.now() - start < 500) { }
ftp.size(filePath,
function sizeReturned(error, size) {
if (!error) nextFileSize = size;
else nextFileSize = -4;
});
// This would be better, but I have no way of having
// root f wait for this and return a correct boolean.
//setTimeout(ftp.size(filePath,
// function sizeReturned(error, size) {
// if (!error) nextFileSize = size;
// else nextFileSize = -4;
// }),
// 500);
// TODO: add checks for -1 -2 -3 -4
console.log("File size change: " + startFileSize + ":" + nextFileSize);
return (startFileSize != nextFileSize);
}
Writing this in a callback style would still imply a loop somewhere to re-check the file's size or (IMO) nasty callback nesting I really don't like. As far as code readability goes, I think just making it sync would be so much easier.
Is there a simple way of doing this or should I re-write with callbacks and events?
Thank you for your help.
Related
I have a for loop for checking multiple uploaded image's aspect ratio, after completing the loop i want to check ratio in if else condition to redirect user. Problem is the conditions are checked before loop finishes, I need the loop to be completed before checking conditions. I found out async whilst might be suitable here but i'm confused about the best approach for implemetation, can anyone give me workaround to perform the code sequentially.
//check image ratio
var validImageRatio = true;
for(i=0; i<req.files.propertyPhoto.length; i++){
var tempFile = req.files.propertyPhoto[i].tempFilePath.replace(/\\/g, '/');
var ratio;var width;var height;
var acceptedRatio = 3;
//get image ratio
sizeOf(tempFile, function (err, dimensions) {
width = dimensions.width;
height = dimensions.height;
ratio = width/height;
});
if (ratio < (acceptedRatio - 0.1) || ratio > (acceptedRatio + 0.1)) {
validImageRatio = false;
break;
}
}
//if ratio invalid, redirect
if (!validImageRatio) {
...
}
//if ratio valid, upload
else{
...
}
Since you're doing the check asynchronously, the synchronous code will run first. If you use async/await inside the for loop, it will block each iteration of the loop making it run slower. The approach you can go for is to use Promise.all to run the checks concurrently.
const promises = req.files.propertyPhoto.map(prop => new Promise(resolve => {
const tempFile = prop.tempFilePath.replace(/\\/g, '/');
const acceptedRatio = 3;
// get image ratio
sizeOf(tempFile, function (err, dimensions) {
const width = dimensions.width;
const height = dimensions.height;
const ratio = width / height;
if (ratio < (acceptedRatio - 0.1) || ratio > (acceptedRatio + 0.1)) {
return resolve(false);
}
resolve(true);
});
}));
const result = await Promise.all(promises);
if (result.some(r => r === false)) {
// if any of the ratio is invalid, redirect
} else {
// else upload
}
I'm guessing at what you mean, but a for-loop would complete before checking the conditions at the bottom except that you include a "break" statement. The break statement makes the for-loop stop executing and move on.
I have a log file with about 14.000 aircraft position datapoints captured from a system called Flarm, it looks like this:
{"addr":"A","time":1531919658.578100,"dist":902.98,"alt":385,"vs":-8}
{"addr":"A","time":1531919658.987861,"dist":914.47,"alt":384,"vs":-7}
{"addr":"A","time":1531919660.217471,"dist":925.26,"alt":383,"vs":-7}
{"addr":"A","time":1531919660.623466,"dist":925.26,"alt":383,"vs":-7}
What I need to do is find a way to 'play' this file back in real-time (as if it were occuring right now, even though it's pre-recorded), and emit an event whenever a log entry 'occurs'. The file is not being added to, it's pre-recorded and the playing back would occur at a later stage.
The reason for doing this is that I don't have access to the receiving equipment when I'm developing.
The only way I can think to do it is to set a timeout for every log entry, but that doesn't seem like the right way to do it. Also, this process would have to scale to longer recordings (this one was only an hour long).
Are there other ways of doing this?
If you want to "play them back" with the actual time difference, a setTimeout is pretty much what you have to do.
const processEntry = (entry, index) => {
index++;
const nextEntry = getEntry(index);
if (nextEntry == null) return;
const timeDiff = nextEntry.time - entry.time;
emitEntryEvent(entry);
setTimeout(processEntry, timeDiff, nextEntry, index);
};
processEntry(getEntry(0), 0);
This emits the current entry and then sets a timeout based on the difference until the next entry.
getEntry could either fetch lines from a prefilled array or fetch lines individually based on the index. In the latter case only two lines of data would only be in memory at the same time.
Got it working in the end! setTimeout turned out to be the answer, and combined with the input of Lucas S. this is what I ended up with:
const EventEmitter = require('events');
const fs = require('fs');
const readable = fs.createReadStream("./data/2018-07-18_1509log.json", {
encoding: 'utf8',
fd: null
});
function read_next_line() {
var chunk;
var line = '';
// While this is a thing we can do, assign chunk
while ((chunk = readable.read(1)) !== null) {
// If chunk is a newline character, return the line
if (chunk === '\n'){
return JSON.parse(line);
} else {
line += chunk;
}
}
return false;
}
var lines = [];
var nextline;
const processEntry = () => {
// If lines is empty, read a line
if (lines.length === 0) lines.push(read_next_line());
// Quit here if we've reached the last line
if ((nextline = read_next_line()) == false) return true;
// Else push the just read line into our array
lines.push(nextline);
// Get the time difference in milliseconds
var delay = Number(lines[1].time - lines[0].time) * 1000;
// Remove the first line
lines.shift();
module.exports.emit('data', lines[0]);
// Repeat after the calculated delay
setTimeout(processEntry, delay);
}
var ready_to_start = false;
// When the stream becomes readable, allow starting
readable.on('readable', function() {
ready_to_start = true;
});
module.exports = new EventEmitter;
module.exports.start = function() {
if (ready_to_start) processEntry();
if (!ready_to_start) return false;
}
Assuming you want to visualize the flight logs, you can use fs watch as below, to watch the log file for changes:
fs.watch('somefile', function (event, filename) {
console.log('event is: ' + event);
if (filename) {
console.log('filename provided: ' + filename);
} else {
console.log('filename not provided');
}
});
Code excerpt is from here. For more information on fs.watch() check out here
Then, for seamless update on frontend, you can setup a Websocket to your server where you watch the log file and send newly added row via that socket to frontend.
After you get the data in frontend you can visualize it there. While I haven't done any flight visualization project before, I've used D3js to visualize other stuff (sound, numerical data, metric analysis and etc.) couple of times and it did the job every time.
I have created a little maze with a robot and I use Blockly to generate code to try to solve it. I can move the robot using Javascript commands which are Blockly blocks. So far so good.
I am currently breaking my head over arguments of if-statements and while loops. Mainly, I have tried two things:
Blockly maze
create a variable, 'not_goal_reached' which says whether or not the robot has reached the goal position (cross). Code:
function not_done() {
var goal_location = get_goal_position()
var goal_x = goal_location[0];
var goal_y = goal_location[1];
console.log('in not done');
//console.log(player.x!= goal_x || player.y != goal_y)
return (player.x!= goal_x || player.y != goal_y);
};
Blockly.Blocks['not_goal_reached'] = {
init: function() {
this.appendDummyInput()
.appendField("not at goal")
this.setOutput(true, "Boolean");
this.setColour(230);
this.setTooltip('');
this.setHelpUrl('');
}
};
Blockly.JavaScript['not_goal_reached'] = function(block) {
var code = 'not_done()';
// TODO: Change ORDER_NONE to the correct strength.
//console.log(code)
return [code, Blockly.JavaScript.ORDER_ATOMIC];
};
However, when using this block in an If or While statement. I always get a Javascript error that does not help me to find the solution:
TypeError: Cannot read property 'toBoolean' of undefined
at Interpreter.stepConditionalExpression (acorn_interpreter.js:148)
at Interpreter.step (acorn_interpreter.js:45)
at nextStep (index.html:79)
I use the Acorn js interpreter:
window.LoopTrap = 2000;
//Blockly.JavaScript.INFINITE_LOOP_TRAP = 'if(--window.LoopTrap == 0) throw "Infinite loop.";\n';
var code = Blockly.JavaScript.workspaceToCode(workspace);
console.log(code);
var myInterpreter = new Interpreter(code, initInterpreter);
//Blockly.JavaScript.INFINITE_LOOP_TRAP = null
var counter = 0;
function nextStep() {
try {
if (myInterpreter.step()) {
counter+=1;
console.log(counter);
if (counter < window.LoopTrap) {
window.setTimeout(nextStep, 30);
}
else {
throw "Infinite Loop!"
}
}
}
catch (e) {
//alert(e);
console.log(e)
}
}
nextStep();
Problem: javascript error I can not solve :(
I created my own While block that does not require input. This While block checks internally whether or not the robot has reached the goal and then processes the DO statements:
Blockly.Blocks['repeat_forever'] = {
init: function() {
this.appendDummyInput()
.appendField("While not at goal");
this.appendStatementInput("DO")
.appendField("Do");
this.setPreviousStatement(true);
this.setColour(230);
this.setTooltip('');
this.setHelpUrl('');
}
};
Blockly.JavaScript['repeat_forever'] = function(block) {
var branch = Blockly.JavaScript.statementToCode(block, 'DO');
// TODO: Assemble JavaScript into code variable.
//if (Blockly.JavaScript.INFINITE_LOOP_TRAP) {
// branch = Blockly.JavaScript.INFINITE_LOOP_TRAP.replace(/%1/g,
// '\'block_id_' + block.id + '\'') + branch;
// console.log(branch);
//}
var code = 'while (' + not_done() + ') {' + branch + '}';
console.log(code)
return [code, Blockly.JavaScript.ORDER_ATOMIC];
};
This works, BUT, here I have the problem that my internal function 'not_done' is only evaluated once (at code generation) to while(true) (since the first time the robot is of course not at the goal location yet). This block correctly applies the DO codes but does not halt (since while (true)). If I add quotes around 'not_done()' the function is evaluated once apparently, but then I receive the same Javascript error as above (Cannot read property 'toBoolean' of undefined)
Am I missing something here? Thanks a lot for your time!
Greetings
K
It seems that you setTimeout which cannot be reached while the while loop runs.
I'm kinda new to NodeJS and I'm working on a simple file encoder.
I planned to change the very first 20kb of a file and just copy the rest of it.
So I used the following code, but it changed some bytes in the rest of the file.
Here is my code:
var fs = require('fs');
var config = require('./config');
fs.open(config.encodeOutput, 'w', function(err, fw) {
if(err) {
console.log(err);
} else {
fs.readFile(config.source, function(err, data) {
var start = 0;
var buff = readChunk(data, start);
while(buff.length) {
if(start < config.encodeSize) {
var buffer = makeSomeChanges(buff);
writeChunk(fw, buffer);
} else {
writeChunk(fw, buff);
}
start += config.ENCODE_BUFFER_SIZE;
buff = readChunk(data, start);
}
});
}
});
function readChunk(buffer, start) {
return buffer.slice(start, start + config.ENCODE_BUFFER_SIZE);
}
function writeChunk(fd, chunk) {
fs.writeFile(fd, chunk, {encoding: 'binary', flag: 'a'});
}
I opened encoded file and compared it with the original file.
I even commented these parts:
//if(start < config.encodeSize) {
// var buffer = makeSomeChanges(buff);
// writeChunk(fw, buffer);
//} else {
writeChunk(fw, buff);
//}
So my program just copies the file, but it still changes some bytes.
What is wrong?
So I checked the pattern and I realized some bytes are not in the right place and I guessed that it should be because I'm using async write function.
I changed fs.writeFile() to fs.writeFileSync() and everything is working fine now.
Since you were using asynchronous IO, you should've been waiting for a queue of operations, as multiple writes happening at the same time are likely to end up corrupting your file. This explains why your issue is solved using synchronous IO — this way, a further write cannot start before the previous one completed.
However, using synchronous APIs when asynchronous ones are available is a poor choice, due to which your program will be actually blocked while it writes to the file. You should go for async and create a queue to wait for.
Please look at the following image, from http://mongoexplorer.com/:
I've been trying to work through GridFS, referencing https://github.com/jamescarr/nodejs-mongodb-streaming. The files I uploaded, come back nicely and the stream that comes back via the following get function looks right.
var gridfs = (function () {
function gridfs() { }
gridfs.get = function (id, fn) {
var db, store;
db = mongoose.connection.db;
id = new ObjectID(id);
store = new GridStore(db, id, "r", {
root: "fs"
});
return store.open(function (err, store) {
if (err) {
return fn(err);
}
return fn(null, store);
});
};
return gridfs;
})();
Using http://mongoexplorer.com/ I uploaded files into GridFS to test with, but they seem broken when I use the node code above to retrieve them.
That is when I noticed the filename / fileName thing. Looking here /node_modules/mongodb/lib/mongodb/gridfs/gridstore.js I saw the reference to filename with a lowercase 'N', but in my GridFS, it's fileName with a capital 'N'.
OK, so just for kicks, I changed it to lowercase in GridFS, but I still get some corruption in the stream (node code above) when retrieving files uploaded with http://mongoexplorer.com/.
Clicking Save as... in http://mongoexplorer.com/, however brings back my fine just perfectly.
To get back to my question, (since my tests didn't seem to prove anything,) I am wondering which is it: filename with a lowercase 'N', or fileName with 'N' in caps?
Please use the latest mongodb native driver as there are a ton of fixes for GridFS, there is a ton of examples in the github directory for the driver under the tests for usage of GridFS as streams as well.
Docs are at
http://mongodb.github.com/node-mongodb-native
In general I would say that if you use the core functionalities stick to the driver as the one you are using it using a driver that' way of of date which explains you corruption issues.
Another Windows tool nl. MongoVue also looks for filename instead of fileName. I'd say the answer is more likely filename instead of fileName.
With retrieving the small Windows file from GridStore, I found a bug, but I don't know how to fix it. I guess there must be some value like Chunk.CurrentSize or the like, but looking at the chunk.js file in the native node mongo driver https://github.com/mongodb/node-mongodb-native/blob/master/lib/mongodb/gridfs/chunk.js, I did the following...
I found this:
Chunk.prototype.readSlice = function(length) {
if ((this.length() - this.internalPosition + 1) >= length) {
var data = null;
if (this.data.buffer != null) { //Pure BSON
data = this.data.buffer.slice(this.internalPosition, this.internalPosition + length);
} else { //Native BSON
data = new Buffer(length);
length = this.data.readInto(data, this.internalPosition);
}
this.internalPosition = this.internalPosition + length;
return data;
} else {
return null;
}
};
and moved the following
data = this.data.buffer.slice(this.internalPosition, this.internalPosition + length);
into the this if statement (1024 * 256 is the value from Chunk.DEFAULT_CHUNK_SIZE = 1024 * 256;)
if (this.data.buffer != null) { //Pure BSON
if (this.data.buffer.length > 1024 * 256) {
// move to here
}
else
{
data = this.data.buffer;
}
like so:
Chunk.prototype.readSlice = function(length) {
if ((this.length() - this.internalPosition + 1) >= length) {
var data = null;
if (this.data.buffer != null) { //Pure BSON
if (this.data.buffer.length > 1024 * 256) {
data = this.data.buffer.slice(this.internalPosition, this.internalPosition + length);
}
else
{
data = this.data.buffer;
}
} else { //Native BSON
data = new Buffer(length);
length = this.data.readInto(data, this.internalPosition);
}
this.internalPosition = this.internalPosition + length;
return data;
} else {
return null;
}
};
The issue with windows files smaller than the chunk size is solved, but this isn't the most elegant solution. I'd like to propose this as the answer, but I realize using the default chunk size hard coded isn't the dynamic value which would make this less of a workaround ;-)