There is a Nodejs script to read a group of files one by one. And for each file, read the documents line by line, after read a line, it will make an http post require to send the line to a remote server. Then read the next line. The question is the script will miss some lines.
Thank you.
It seems that lr.pause(); just hide line event, instead of pause the read file process.
var fs = require('fs');
var http = require('http');
var JSON = require('JSON');
var S = require('string');
var uuid = require('node-uuid');
var readline = require('readline');
var httpsync = require('httpsync');
var LineByLineReader = require('line-by-line');
var sleep = require('sleep');
function postES(_path,data,id,lr){
var post_data = JSON.stringify(data);
var post_options = {
host: _host,
port: _port,
path: _path,
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': post_data.length
}
};
var post_req = http.request(post_options, function(res) {
res.setEncoding('utf8');
res.on('data', function(data) {
console.log(data);
});
res.on('end', function() {
console.log("end");
// resume read line
lr.resume();
});
});
post_req.on('error', function(data) {
console.log("error,post."+data+post_data);
// resume read line
lr.resume();
});
post_req.write(post_data);
post_req.end();
}
function readlineFunSession(line,id,lr) {
var _data={};
// compose _data object
postES('/cs/session/'+_data["sessionid"],_data,id,lr);
}
function readfileFun(files,start,end,id,readlineFun) {
if(start<end && start<files.length){
var lr = new LineByLineReader(files[start],{encoding:'utf8',skipEmptyLines:true});
lr.on('error', function (e) {
console.log('error,LineByLineReader.'+e.toString());
});
lr.on('line', function (line) {
// pause read line
lr.pause();
try{
readlineFun(line,id,lr);
}catch(e){
console.log('error,line.'+e.toString());
}
});
lr.on('end', function () {
readfileFun(files,++start,end,id,readlineFun);
});
}
}
// var files is an arry of files
// this function try to go throgh file[0],file[1],file[2],......,file[10],
readfileFun(files,0,10,"ID-1",readlineFunSession);
Do a series of action where next action should run after only the current finish in nodejs is a bit difficult due its asynchronous paradigm, one way you can do is using sync maker npm like fiber or waterfall,
but other simple (and stupid) way you can do is create dummy worker manager, make your nodejs run infinitely, while every (time interval), check if the current progress is done, run next action if it did done.
btw while you can't make request to become sync, you can read file synchronously, so in your case, i think you should read all lines in all files to become one big array of line.
var jswget = require("jswget");
var arrayoflines = ["line1", "line2", "line3"];
var counter = 0;
var inProgress = false;
var request = function(){
if (arrayoflines.length == 0) {
// no more line, should exit
process.exit();
}
if (inProgress) {
// if previous work is not completed then skip.
return;
}
// get first line, and remove it from array index
var current_line = arrayoflines.shift();
inProgress = true;
jswget({
url: "http://someurl:3000/somepath?q1=" + current_line,
method: 'POST',
formdata: some_postdata,
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': post_data.length
},
onsuccess: function(responsetext, req, res){
// success requesting, should do next line
},
onerror: function(err, req){
// oops, error occurred, but we will do next line nevertheless
},
onend: function(){
// success or not, the request is end, so we should prepare for next request
counter+=1;
inProgress = false;
}
})
}
setInterval(function(){
request();
}, 100)
This may help you...
With Node 0.12, it's possible to do this synchronously now:
var fs = require('fs');
var path = require('path');
// Buffer mydata
var BUFFER = bufferFile('../public/mydata.txt');
function bufferFile(relPath) {
return fs.readFileSync(path.join(__dirname, relPath)); // zzzz....
}
fs is the file system. readFileSync() returns a Buffer, or string if you ask.
fs correctly assumes relative paths are a security issue. path is a work-around.
To load as a string, specify the encoding:
return fs.readFileSync(path,{ encoding: 'utf8' });
Related
I'd like to return a file from Blob Storage when you hit a given Azure Function end-point. This file is binary data.
Per the Azure Storage Blob docs, the most relevant call appears to be the following since its the only one that doesn't require writing the file to an interim file:
getBlobToStream
However this call gets the Blob and writes it to a stream.
Is there a way with Azure Functions to use a Stream as the value of res.body so that I can get the Blob Contents from storage and immediately write it to the response?
To add some code, trying to get something like this to work:
'use strict';
const azure = require('azure-storage'),
stream = require('stream');
const BLOB_CONTAINER = 'DeContainer';
module.exports = function(context){
var file = context.bindingData.file;
var blobService = azure.createBlobService();
var outputStream = new stream.Writable();
blobService.getBlobToStream(BLOB_CONTAINER, file, outputStream, function(error, serverBlob) {
if(error) {
FileNotFound(context);
} else {
context.res = {
status: 200,
headers: {
},
isRaw: true,
body : outputStream
};
context.done();
}
});
}
function FileNotFound(context){
context.res = {
status: 404,
headers: {
"Content-Type" : "application/json"
},
body : { "Message" : "No esta aqui!."}
};
context.done();
}
Unfortunately we don't have streaming support implemented in NodeJS just yet - it's on the backlog: https://github.com/Azure/azure-webjobs-sdk-script/issues/1361
If you're not tied to NodeJ open to using a C# function instead, you can use the storage sdk object directly in your input bindings and stream request output, instead of using the intermediate object approach.
While #Matt Manson's answer is definitely correct based on the way I asked my question, the following code snippet might be more useful for someone who stumbles across this question.
While I can't send the Stream to the response body directly, I can use a custom stream which captures the data into a Uint8Array, and then sends that to the response body.
NOTE: If the file is REALLY big, this will use a lot of memory.
'use strict';
const azure = require('azure-storage'),
stream = require('stream');
const BLOB_CONTAINER = 'deContainer';
module.exports = function(context){
var file = context.bindingData.file;
var blobService = azure.createBlobService();
var outputStream = new stream.Writable();
outputStream.contents = new Uint8Array(0);//Initialize contents.
//Override the write to store the value to our "contents"
outputStream._write = function (chunk, encoding, done) {
var curChunk = new Uint8Array(chunk);
var tmp = new Uint8Array(this.contents.byteLength + curChunk.byteLength);
tmp.set(this.contents, 0);
tmp.set(curChunk, this.contents.byteLength);
this.contents = tmp;
done();
};
blobService.getBlobToStream(BLOB_CONTAINER, file, outputStream, function(error, serverBlob) {
if(error) {
FileNotFound(context);
} else {
context.res = {
status: 200,
headers: {
},
isRaw: true,
body : outputStream.contents
};
context.done();
}
});//*/
}
function FileNotFound(context){
context.res = {
status: 404,
headers: {
"Content-Type" : "application/json"
},
body : { "Message" : "No esta aqui!"}
};
context.done();
}
I tried #Doug's solution from the last comment above, with a few minor mods in my azure function, and so far, after trying 20 different ideas, this is the only one that actually delivered the file to the browser! Thank you, #Doug...
const fs = require("fs");
const stream = require("stream");
...
const AzureBlob = require('#[my_private_artifact]/azure-blob-storage');
const azureStorage = new AzureBlob(params.connectionString);
//Override the write to store the value to our "contents" <-- Doug's solution
var outputStream = new stream.Writable();
outputStream.contents = new Uint8Array(0);//Initialize contents.
outputStream._write = function (chunk, encoding, done) {
var curChunk = new Uint8Array(chunk);
var tmp = new Uint8Array(this.contents.byteLength + curChunk.byteLength);
tmp.set(this.contents, 0);
tmp.set(curChunk, this.contents.byteLength);
this.contents = tmp;
done();
};
let azureSpeedResult = await azureStorage.downloadBlobToStream(params.containerName, params.objectId, outputStream);
let headers = {
"Content-Length": azureSpeedResult.size,
"Content-Type": mimeType
};
if (params.action == "download") {
headers["Content-Disposition"] = "attachment; filename=" + params.fileName;
}
context.res = {
status: 200,
headers: headers,
isRaw: true,
body: outputStream.contents
};
context.done();
...
I am trying to use Node.js to programmatically build Jenkins jobs that take Git parameters.
I am sending the parameters as post data, as shown below. However, no matter what value I assign to ref, Jenkins runs the build with the default parameter value (specified in the job's configuration). I have tried passing in the parameters as query strings in the URL, but that also did not work.
I am using Jenkins v1.651.1 and Node v6.2.0.
var jobOptions = {
url: requestedJobObject.url + 'build',
method: 'POST',
port: 8080
};
// parameters = { "name": "ref", "value": "origin/master" }
if (!_.isEmpty(parameters)) {
var jsonParametersString = JSON.stringify({"parameter": parameters});
var parameterParam = encodeURIComponent(jsonParametersString);
parameters.json = parameterParam;
jobOptions.headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': querystring.stringify(parameters).length
};
jobOptions.url += 'WithParameters';
postData = querystring.stringify(parameters);
}
// jobOptions contains auth field & separates url into hostname and path
// makes an http request to jobOptions and calls req.write(postData)
makeRequest(jobOptions, callback, responseCB, postData)
makeRequest makes an http request:
function makeRequest (object, callback, responseCB, postData) {
var accumulator = '';
var parsedUrl = u.parse('//' + object.url, true, true);
var options = {
hostname: parsedUrl.hostname,
port: object.port || 8080,
path: parsedUrl.path,
method: object.method || 'GET',
auth: getAuthByHost(parsedUrl.hostname)
};
if (object.headers) {
options.headers = object.headers;
}
var response = null;
var req = http.request(options, function(res) {
response = res;
res.on('data', function (data) {
accumulator = accumulator + data.toString();
res.resume();
});
res.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
// if not object, use accumulator as string
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(res);
}
});
});
req.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(response);
}
});
if (postData) {
req.write(postData);
}
req.end();
}
try this, it works for me:
var auth = 'Basic yourUserToken';
var jobOptions = {
url:'jenkinsHostName:8080/jenkins/job/jobName/' +'build',
method: 'POST',
port: 8080
};
var parameter = {"parameter": [{"name":"ref", "value":"origin/master"}]};
var postData;
if (!_.isEmpty(parameter)) {
var jsonParametersString = JSON.stringify(parameter);
jobOptions.headers = {
'Authorization':auth,
'Content-Type': 'application/x-www-form-urlencoded',
};
jobOptions.url += '?token=jobRemoteTriggerToken';
postData = "json="+jsonParametersString;
console.log("postData = " + postData);
}
var callback;
var responseCB;
makeRequest(jobOptions, callback, responseCB, postData) ;
It is based on your code. I removed the querystring - it seems that it returned an empty string when performed on the parameters object. I change /buildWithParameters to /build - it didn't work the other way.
In addition, verify that when you pass the 'Content-Length' in the header, it doesn't truncated your json parameters object (I removed it ).
also note that I used the user API token, that you can get at http://yourJenkinsUrl/me/configure and click the "Shown API Token" button.
Not sure about this, as I don't know Node.js -- but maybe this fits: the Jenkins remote access API indicates that the parameter entity in the json request must point to an array, even if there's just one parameter to be defined.
Does the change below fix the problem (note the angle brackets around parameters)?
[...]
var jsonParametersString = JSON.stringify({"parameter": [parameters]});
[...]
I am using 'child_process'(fork method) to handle task of saving some records across server. For this I was using jquery ajax call in the child process to save the records. But somehow that code doesn't get executed.
I have already included the jquery.min.js file in the html in which I am including the file forking child process as well.
The file forking child process:
var childProcess = require('child_process');
var syncProcess;
function launchMindwaveDataSync(){
//alert("launchMindwaveDataSync fired");
var iconSync = document.getElementById("iconSync");
iconSync.src = "images/findDevice.gif";
iconDevice.title = "Synchronizing...";
//Launch the device reader script in a new V8 process.
syncProcess = childProcess.fork('./js/mindwaveDataSync.js');
syncProcess.on('message', function(message){
console.log(message);
switch(message.msg)
{
case "connected":
global.HEADSET_CONNECTED = true;
iconDevice.src = "images/icon-power.png";
iconDevice.title = "Connected to Mindwave Mobile device";
startSynchronizing();
break;
case "disconnected":
case "error":
case "close":
case "timeout":
global.HEADSET_CONNECTED = false;
iconDevice.src = "images/error.png";
iconDevice.title = "Mindwave Mobile device is disconnected";
break;
}
});
syncProcess.on('error', function(e){
console.log(e);
});
setTimeout(function(){
console.log('sending command initialize');
syncProcess.send({cmd:'initialize'});
},1000);
};
function startSynchronizing(){
syncProcess.send({cmd: 'synchronize'});
}
The child process which is supposed to make ajax call
var recursive = require('recursive-readdir');
var SecurConf = require('../js/secureConf');
var sconf = new SecurConf();
var filesToSync = [];
var crypto = require('crypto');
var options = {
//prompt : 'File Password : ',
algo : 'aes-128-ecb',
file : {
encoding : 'utf8',
out_text : 'hex'
}
};
process.on('message', function (command){
console.log(command);
switch(command.cmd)
{
case "initialize": initializeConnection();
break;
case "synchronize": checkForFiles();
break;
case "record":
client.resume();
break;
case "pause":
client.pause();
break;
case "stop":
client.destroy();
break;
}
//process.send({msg:"Sync Process: " + command.cmd});
});
function checkForFiles(){
recursive('C:/MindWaveData/Data/', function (err, files) {
// Files is an array of filename
filesToSync = files;
decryptFiles();
//process.send({msg:files});
});
}
function decryptFiles(){
var ajaxSuccess = function(res){
process.send({msg:res});
}
for(var i = 0; i < filesToSync.length; i++){
var ef = ""+filesToSync[i];
sconf.decryptFile(ef, function(err, file, content){
if(err){
process.send({msg:"some error occurred while decrypting..."});
} else {
var parsedContent = JSON.parse(content);
var decryptedContent = JSON.stringify(parsedContent, null,'\t');
for(var j = 0; j<parsedContent.length; j++){
$.ajax({
//async: false,
type: "POST",
url: "http://192.168.14.27:8001/admin/webservice",
contentType: "application/json; charset=utf-8",
dataType: "json",
data: JSON.stringify({
'ncFunction': 'login',
'ncParams': {
'ncUserEmail': "clarity_admin#yopmail.com",
'ncUserPassword': "cl123!##"
}
}),
success: function (res) {
ajaxSuccess(res);
},
error: function (xhr, type, err) {
ajaxSuccess(res);
}
});
}
});
}
}
function initializeConnection(){
//console.log('initializeConnection::function');
//process.send({msg:"initializeConnection called"});
checkConnection()
//process.send({msg:"connected"});
//call function to send ajax request
}
function checkConnection(){
//console.log('checkConnection::function');
//call ajax request 3 times to check the connection.. once in third try we get the response OK, we can send the process message as connected
var ajaxCallCount = 0;
var makeAjaxCall = function(){
//console.log('function makeAjaxCall');
//process.send({msg:"connected"});
if(ajaxCallCount < 2){
ajaxCallCount++;
//console.log('ajaxCallCount:'+ajaxCallCount);
//process.send({msg:'value of ajaxCallCount:'+ajaxCallCount});
connectionSuccess();
}
else{
process.send({msg:"connected"});
}
};
var connectionSuccess = function(){
//console.log('function connectionSuccess');
makeAjaxCall();
};
makeAjaxCall();
}
Can we use jquery ajax call in the child process like this? Plus I have included the file forking child process in one html file and on load of its body I am calling /launchMindwaveDataSync/ from the first file shown below.
Thanks in advance
I'm trying to upload a image from a AngularJS interface to a nodejs server (expressjs).
(I'm using mean.io)
Every time I upload someting, req.body logs "{}" and req.files logs "undefined"
I'm using angular-file-upload directive in AngularJS
Client-side code:
$scope.onFileSelect = function() {
console.log($files);
for (var i = 0; i < $files.length; i++) {
var file = $files[i];
$scope.upload = $upload.upload({
url: 'map/set',
method: 'POST',
headers: {'enctype': 'multipart/form-data'},
data: {myObj: $scope.myModelObj},
file: file,
}).progress(function(evt) {
console.log('percent: ' + parseInt(100.0 * evt.loaded / evt.total));
}).success(function(data, status, headers, config) {
// file is uploaded successfully
console.log(data);
});
}
};
Server-side code
var app = express();
require(appPath + '/server/config/express')(app, passport, db);
app.use(bodyParser({uploadDir:'./uploads'}));
app.post('/map/set', function(req, res) {
console.log(req.body);
console.log(req.files);
res.end('Success');
});
*****Edit*****
HTML Code
<div class="row">
<input id="file" type="file" ng-file-select="onFileSelect()" >
</div>
Hand built request
$scope.onFileSelect = function() {
//$files: an array of files selected, each file has name, size, and type.
//console.log($files);
var xhr = new XMLHttpRequest();
// not yet supported in most browsers, some examples use
// this but it's not safe.
// var fd = document.getElementById('upload').getFormData();
var fd = new FormData();
var files = document.getElementById('myfileinput').files;
console.log(files);
for(var i = 0;i<files.length; i++) {
fd.append("file", files[i]);
}
/* event listeners */
xhr.upload.addEventListener("progress", uploadProgress, false);
xhr.addEventListener("error", uploadFailed, false);
xhr.addEventListener("load", uploadComplete, false);
xhr.addEventListener("abort", uploadCanceled, false);
function uploadComplete(){
console.log("complete");
}
function uploadProgress(){
console.log("progress");
}
function uploadFailed(){
console.log("failed");
}
function uploadCanceled(){
console.log("canceled");
}
xhr.open("POST", "map/set");
xhr.send(fd);
};
The latest version of mean.io uncluding express 4.x as dependency. In the documentation for migration express 3 to 4 you can read, express will no longer user the connect middlewares. Read more about here: https://github.com/visionmedia/express/wiki/Migrating-from-3.x-to-4.x
The new body-parser module only handles urlencoded and json bodies. That means for multipart bodies (file uploads) you need an additional module like busboy or formadible.
Here is an example how I use angular-file-upload with busboy:
The AngularJS Stuff:
$upload.upload({
url: '/api/office/imageUpload',
data: {},
file: $scope.$files
}) …
I write a little helper module to handle uploads with busboy easier. It’s not very clean coded, but do the work:
var env = process.env.NODE_ENV || 'development';
var Busboy = require('busboy'),
os = require('os'),
path = require('path'),
config = require('../config/config')[env],
fs = require('fs');
// TODO: implement file size limit
exports.processFileUpload = function(req, allowedExtensions, callback){
var busboy = new Busboy({ headers: req.headers });
var tempFile = '';
var fileExtenstion = '';
var formPayload = {};
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
fileExtenstion = path.extname(filename).toLowerCase();
tempFile = path.join(os.tmpDir(), path.basename(fieldname)+fileExtenstion);
file.pipe(fs.createWriteStream(tempFile));
});
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) {
var jsonValue = '';
try {
jsonValue = JSON.parse(val);
} catch (e) {
jsonValue = val;
}
formPayload[fieldname] = jsonValue;
});
busboy.on('finish', function() {
if(allowedExtensions.length > 0){
if(allowedExtensions.indexOf(fileExtenstion) == -1) {
callback({message: 'extension_not_allowed'}, tempFile, formPayload);
} else {
callback(null, tempFile, formPayload)
}
} else {
callback(null, tempFile, formPayload)
}
});
return req.pipe(busboy);
}
In my controller i can use the module that way:
var uploader = require('../helper/uploader'),
path = require('path');
exports.uploadEmployeeImage = function(req,res){
uploader.processFileUpload(req, ['.jpg', '.jpeg', '.png'], function(uploadError, tempPath, formPayload){
var fileExtenstion = path.extname(tempPath).toLowerCase();
var targetPath = "/exampleUploadDir/testFile" + fileExtenstion;
fs.rename(tempPath, targetPath, function(error) {
if(error){
return callback("cant upload employee image");
}
callback(null, newFileName);
});
});
}
I'm going to take a guess here that the header settings are incorrect.
headers: {'enctype': 'multipart/form-data'},
Should be changed to:
headers: {'Content-Type': 'multipart/form-data'},
Ensure you have an 'id' AND 'name' attribute on the file input - not having an id attribute can cause problems on some browsers. Also, try building the request like this:
var xhr = new XMLHttpRequest();
// not yet supported in most browsers, some examples use
// this but it's not safe.
// var fd = document.getElementById('upload').getFormData();
var fd = new FormData();
var files = document.getElementById('myfileinput').files;
for(var i = 0;i<files.length; i++) {
fd.append("file", files[i]);
}
/* event listeners */
xhr.upload.addEventListener("progress", uploadProgress, false);
xhr.addEventListener("error", uploadFailed, false);
xhr.addEventListener("load", uploadComplete, false);
xhr.addEventListener("abort", uploadCanceled, false);
xhr.open("POST", "your/url");
xhr.send(fd);
angular isn't great with file uploads so doing it by hand might help.
Given Node's async nature it is difficult to time a series of web requests. How would I fire off 100 webrequests and figure out how long each individual request takes? Knowing the OS will only allow a few concurrent web request, how do I get the timeing for each individual webrequest, removing the time spent waiting for the other connections to complete. I was hoping the socket event was fired when the request launched but it seems that the socket event is fired after the connection has been established.
var http = require('http');
var urls = [
'/cameron',
'/sara',
'...',
// Time a url collection.
function timeUrl(url, calback) {
var options = {
host: 'www.examplesite.com',
port: 80,
path: ''
};
var times = [];
times.push({'text': 'start', 'time':Date.now()});
http.get(options, function(res) {
times.push({'text': 'response', 'time':Date.now()});
var result = '';
res.on('data', function(chunk) {
result += chunk.length ;
// result += chunk;
});
res.on('end', function() {
times.push({'text': 'end', 'time': Date.now(), 'body': result, 'statusCode': res.statusCode}); // ,
calback(times);
});
}).on('error', function(e) {
calback();
console.log("Got error: " + e.message);
times.push({'error':Date.now()});
}).on('socket', function (response) {
times.push({'text': 'socket', 'time':Date.now()});
});
}
for (var i = 0; i < urls.length; i++) {
var url = urls[i];
timeUrl(url, function(times) {
console.log(url);
for (var i = 0; i < times.length; i++) {
console.log(times[i].text, times[i].time - times[1].time , 'ms');
}
console.log('statusCode:', times[times.length -1].statusCode, 'Response Size:', times[times.length -1].body);
console.log('-');
});
}
If you're worried about OS concurrency just introduce maximum concurrency (throttling) into your requests instead of trying to guess when exactly the OS has started. I'm skipping over some minor details like error handling and using the excellent async.js library:
var http = require('http')
, async = require('async')
, CONCURRENCY = 5 // edit to fit your OS concurrency limit
, results = {}
, urls = [
'/cameron',
'/sara',
'/...'
];
// Time a url collection.
function timeUrl(url, callback) {
var options = { host: 'www.examplesite.com', port: 80 }
, start = Date.now()
, socket = null;
options.path = url;
http.get(options, function(res) {
var response = Date.now()
, size = 0;
res.on('data', function(chunk) { size += chunk.length; });
res.on('end', function() {
var end = Date.now();
results[url] = { start: start, socket: socket, response: response, end: end, size: size };
callback();
});
}).on('error', function(e) {
results[url] = { start: start, socket: socket, error: Date.now(), stack: e };
callback();
}).on('socket', function () {
socket = Date.now();
});
}
async.forEachLimit(urls, CONCURRENCY, timeUrl, function() {
console.log(JSON.stringify(results));
});
For ease of use, doing what you seem to want to do, I've not seen anything beat Nodetime.