Making jquery ajax call in child process - node.js - node.js

I am using 'child_process'(fork method) to handle task of saving some records across server. For this I was using jquery ajax call in the child process to save the records. But somehow that code doesn't get executed.
I have already included the jquery.min.js file in the html in which I am including the file forking child process as well.
The file forking child process:
var childProcess = require('child_process');
var syncProcess;
function launchMindwaveDataSync(){
//alert("launchMindwaveDataSync fired");
var iconSync = document.getElementById("iconSync");
iconSync.src = "images/findDevice.gif";
iconDevice.title = "Synchronizing...";
//Launch the device reader script in a new V8 process.
syncProcess = childProcess.fork('./js/mindwaveDataSync.js');
syncProcess.on('message', function(message){
console.log(message);
switch(message.msg)
{
case "connected":
global.HEADSET_CONNECTED = true;
iconDevice.src = "images/icon-power.png";
iconDevice.title = "Connected to Mindwave Mobile device";
startSynchronizing();
break;
case "disconnected":
case "error":
case "close":
case "timeout":
global.HEADSET_CONNECTED = false;
iconDevice.src = "images/error.png";
iconDevice.title = "Mindwave Mobile device is disconnected";
break;
}
});
syncProcess.on('error', function(e){
console.log(e);
});
setTimeout(function(){
console.log('sending command initialize');
syncProcess.send({cmd:'initialize'});
},1000);
};
function startSynchronizing(){
syncProcess.send({cmd: 'synchronize'});
}
The child process which is supposed to make ajax call
var recursive = require('recursive-readdir');
var SecurConf = require('../js/secureConf');
var sconf = new SecurConf();
var filesToSync = [];
var crypto = require('crypto');
var options = {
//prompt : 'File Password : ',
algo : 'aes-128-ecb',
file : {
encoding : 'utf8',
out_text : 'hex'
}
};
process.on('message', function (command){
console.log(command);
switch(command.cmd)
{
case "initialize": initializeConnection();
break;
case "synchronize": checkForFiles();
break;
case "record":
client.resume();
break;
case "pause":
client.pause();
break;
case "stop":
client.destroy();
break;
}
//process.send({msg:"Sync Process: " + command.cmd});
});
function checkForFiles(){
recursive('C:/MindWaveData/Data/', function (err, files) {
// Files is an array of filename
filesToSync = files;
decryptFiles();
//process.send({msg:files});
});
}
function decryptFiles(){
var ajaxSuccess = function(res){
process.send({msg:res});
}
for(var i = 0; i < filesToSync.length; i++){
var ef = ""+filesToSync[i];
sconf.decryptFile(ef, function(err, file, content){
if(err){
process.send({msg:"some error occurred while decrypting..."});
} else {
var parsedContent = JSON.parse(content);
var decryptedContent = JSON.stringify(parsedContent, null,'\t');
for(var j = 0; j<parsedContent.length; j++){
$.ajax({
//async: false,
type: "POST",
url: "http://192.168.14.27:8001/admin/webservice",
contentType: "application/json; charset=utf-8",
dataType: "json",
data: JSON.stringify({
'ncFunction': 'login',
'ncParams': {
'ncUserEmail': "clarity_admin#yopmail.com",
'ncUserPassword': "cl123!##"
}
}),
success: function (res) {
ajaxSuccess(res);
},
error: function (xhr, type, err) {
ajaxSuccess(res);
}
});
}
});
}
}
function initializeConnection(){
//console.log('initializeConnection::function');
//process.send({msg:"initializeConnection called"});
checkConnection()
//process.send({msg:"connected"});
//call function to send ajax request
}
function checkConnection(){
//console.log('checkConnection::function');
//call ajax request 3 times to check the connection.. once in third try we get the response OK, we can send the process message as connected
var ajaxCallCount = 0;
var makeAjaxCall = function(){
//console.log('function makeAjaxCall');
//process.send({msg:"connected"});
if(ajaxCallCount < 2){
ajaxCallCount++;
//console.log('ajaxCallCount:'+ajaxCallCount);
//process.send({msg:'value of ajaxCallCount:'+ajaxCallCount});
connectionSuccess();
}
else{
process.send({msg:"connected"});
}
};
var connectionSuccess = function(){
//console.log('function connectionSuccess');
makeAjaxCall();
};
makeAjaxCall();
}
Can we use jquery ajax call in the child process like this? Plus I have included the file forking child process in one html file and on load of its body I am calling /launchMindwaveDataSync/ from the first file shown below.
Thanks in advance

Related

Titanium http request leak

I have to make a load of subsequent http requests to load product images into the app as it has to function in an offline mode.
Around 2000 calls.
The http client seems toi have a memory leak which causes the persistent mbytes in "instruments" to rise to around 200 without being garbaged.
After use of the http client it is being set to null.
I have tried setting the file property of the httpclient without any success
I have set the unload function to only call the callback function which in turn calls the http send function again (thus looping through the 2000 products to get the respective pictures)
I changed from SDK 7.5.0.v20180824022007 to SDK 8.1.0.v20190423134840 and even SDK 9.0.0.v20181031080737 but the problem remains
the code of my http common module:
function HttpClient(options = {}) {
this.root = options.root || "ROOT_OF_API";
this.endpoint = options.endpoint || false;
this.needsChecksum = options.needsChecksum || false;
this.data = {};
this.method = options.method || "Post";
this.timeout = options.timeout || 5000;
this.calculateChecksum = function () {
var moment = require('alloy/moment');
if (!Alloy.Models.user.authenticated()) {
return false;
}
var sp = (moment().unix() - Alloy.Models.meta.get("timeDiff"))
var hash = Ti.Utils.md5HexDigest("nX" + sp + "FossilSFAapp" + Alloy.Models.user.get('token').substring(10, 14) + "CS")
var checksum = sp + "-" + hash.substring(4, 8)
this.data.checksum = checksum
}
};
HttpClient.prototype.setData = function (data) {
this.data = data
};
HttpClient.prototype.send = function (callback) {
// set new checksum for request if is needed
if (this.needsChecksum) {
this.calculateChecksum()
}
// add app version
if (this.method === "POST") {
this.data.appversion = Ti.App.version;
}
// send
var client = Ti.Network.createHTTPClient({
onload: function () {
callback({
success: true
})
},
onerror: function(e) {
callback({
message: e.messgae,
success: false
})
},
timeout: this.timeout
});
client.open(this.method, this.root + this.endpoint);
if (this.setFile) {
client.file = Ti.Filesystem.getFile(Ti.Filesystem.applicationDataDirectory, this.fileName);
}
client.setRequestHeader('Content-Type', 'application/json');
client.send(JSON.stringify(this.data));
client = null;
};
module.exports = HttpClient;
and then the module is used in the product model like so:
var HttpClient = require('./HttpClient');
var httpClient = new HttpClient();
function getImage (i) {
if (collection.at(i) && collection.at(i).get('iimage0') && collection.at(i).needsImageUpdate()) {
httpClient.endpoint = collection.at(i).get('acarticlenumber') +".jpg";
httpClient.fileName = 'productImages/' + collection.at(i).get('acarticlenumber') + '.jpg'
httpClient.send(function(e){
if (i < collection.length) {
i++
getImage(i)
} else {
finished()
}
});
} else {
if (i < collection.length) {
i++
getImage(i)
} else {
finished()
}
}
}
// start getting images at index 0
getImage(0)
anyone have an idea why these memory leaks appear ?
It only ever occurs when actually sending the http request.

How to get binary data from ng-file-upload file object?

I'm attempting to use the ng-file-upload directive to provide file upload functionality in my angular app.
I've got it working for the most part - I can select multiple files and loop through to grab the file name and file types. I just can't seem to figure out where the actual binary data of each file is stored in the file object.
I tried using the approach outlined in this post - AngularJS Upload a file and send it to a DB, but that results in a an error that "$q is not defined".
function create_blob(file) {
var deferred = $q.defer();
var reader = new FileReader();
reader.onload = function () {
deferred.resolve(reader.result);
};
reader.readAsDataURL(file);
return deferred.promise;
}
So then I tried the approach outlined in this post - Send an uploaded image to the server and save it in the server, but again I'm running into an error reading "dataURI.split is not a function".
function dataURItoBlob(dataURI) {
var binary = atob(dataURI.split(',')[1]);
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {
type: mimeString
});
}
The code I'm using is as follows:
function create_blob(file) {
var deferred = $q.defer();
var reader = new FileReader();
reader.onload = function () {
deferred.resolve(reader.result);
};
reader.readAsDataURL(file);
return deferred.promise;
}
function dataURItoBlob(dataURI) {
var binary = atob(dataURI.split(',')[1]);
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {
type: mimeString
});
}
$scope.uploadFiles = function (files) {
$scope.files = files;
angular.forEach(files, function (file) {
if (file && !file.$error) {
//var reader = new FileReader();
//console.log(reader.readAsDataURL(file));
//var binary = create_blob(file);
var fileBinary = dataURItoBlob(file);
$http({
url: root + '/DesktopModules/ServiceProxy/API/NetSuite/InsertCaseFile',
method: "POST",
//headers: { 'caseId': id, 'fileName': file.name, fileContent: $.base64.encode(file) }
headers: { 'caseId': id, 'fileName': file.name, fileContent: fileBinary }
}).
success(function (data, status, headers, config) {
//if (data == true) {
// getCase();
// $scope.newMessage = "";
// //toaster.pop('success', "", "Message succesfully submitted.",0);
//}
}).
error(function (data, status, headers, config) {
});
file.upload.progress(function (evt) {
file.progress = Math.min(100, parseInt(100.0 * evt.loaded / evt.total));
});
}
});
}
What am I overlooking?
It depends on what format your DB is accepting for file upload. If it support multipart form data, then you can just use
Upload.upload({file: file, url: my/db/url}).then(...);
if it accepts post requests with file's binary as content of the request (like CouchDB, imgur, ...) then you can do
Upload.http({data: file, url: my/db/url, headers: {'Content-Type': file.type}})...;
if you db just accept json objects and you want to store the file as base64 data url in the database like this question then you can do
Upload.dataUrl(file, true).then(function(dataUrl) {
$http.post(url, {
fileBase64DataUrl: dataUrl,
fileName: file.name,
id: uniqueId
});
})

How can i write a mocha test for the following function?

I want to write a test for this node.js funcion,This has two arguments request and response. I set the request variable . But dont know how to set response variable.
function addCustomerData(request, response) {
common.getCustomerByMobile(request.param('mobile_phone'), function (customerdata) {
if (!customerdata) {
var areaInterest = request.param('area_interest');
var customerInfo = {userType: request.param('userType'),
firstName : request.param('first_name'),
middleName : request.param('middle_name'),
lastName : request.param('last_name'),
email : request.param('email'),
mobilePhone : request.param('mobile_phone'),
uniqueName : request.param('user_name'),
company : request.param('company')
};
if(customerInfo.email){
customerInfo.email = customerInfo.email.toLowerCase();
}
if(customerInfo.uniqueName){
customerInfo.uniqueName = customerInfo.uniqueName.toLowerCase();
}
if(areaInterest) {
customerInfo.areaInterest = '{' + areaInterest + '}';
}else
areaInterest = null;
addCustomer(request, response, customerInfo, function (data) {
request.session.userId = data;
return response.send({success: true, message: 'Inserted successfully'});
}
);
} else {
return response.send({success: false, message: 'User with this mobile number already exists'});
}
});
}
I wrote the test as follows
describe('signup', function(){
describe('#addCustomer()', function(){
before(function (done) {
request = {};
request.data = {};
request.session = {};
request.data['userType'] = '3';
request.data['first_name'] = 'Shiji';
request.data['middle_name'] = '';
request.data['last_name'] = 'George';
request.data['email'] = 'shiji#lastplot.com';
request.data['mobile_phone'] = '5544332333';
request.data['user_name'] = 'shiji';
request.session['imageArray'] = [];
request.param=function(key){
// Look up key in data
return this.data[key];
};
request1 = {};
request1.data = {};
request1.session = {};
request1.data['area_interest']=["aluva","ernakulam"];
request1.data['userType'] = '1';
request1.data['first_name'] = 'Hari';
request1.data['middle_name'] = 'G';
request1.data['last_name'] = 'Ganesh';
request1.data['email'] = 'hari#lastplot.com';
request1.data['mobile_phone'] = '5544332321';
request1.data['user_name'] = 'hariganesh';
request1.data['company'] = 'HG Realestate';
request1.session['imageArray'] = [];
request1.param=function(key){
// Look up key in data
return this.data[key];
};
done();
});
it('It should list the matching properties', function(done){
async.parallel([
function(callback) {
signup.addCustomerData(request, response, function (result, err) {
should.not.exist(err);
should.exists(result);
callback();
});
},
function(callback) {
signup.addCustomerData(request1, response, function (result, err) {
should.not.exist(err);
should.exists(result);
callback();
});
}],function(){
done();
});
});
But i got the error as response has no method send()
Thanks in Advance.
Your addCustomerData function does not have a callback, it just calls respond.send(). You need to mock the response object, as well as the send method, and put your tests inside of it, but you won't be able to use async.parallel() as, like I already mentioned, your function does not have a callback parameter. If you're testing request/response functions, I suggest you look into Supertest https://github.com/visionmedia/supertest which is widely used for cases like this.

nodejs read file and make http request

There is a Nodejs script to read a group of files one by one. And for each file, read the documents line by line, after read a line, it will make an http post require to send the line to a remote server. Then read the next line. The question is the script will miss some lines.
Thank you.
It seems that lr.pause(); just hide line event, instead of pause the read file process.
var fs = require('fs');
var http = require('http');
var JSON = require('JSON');
var S = require('string');
var uuid = require('node-uuid');
var readline = require('readline');
var httpsync = require('httpsync');
var LineByLineReader = require('line-by-line');
var sleep = require('sleep');
function postES(_path,data,id,lr){
var post_data = JSON.stringify(data);
var post_options = {
host: _host,
port: _port,
path: _path,
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': post_data.length
}
};
var post_req = http.request(post_options, function(res) {
res.setEncoding('utf8');
res.on('data', function(data) {
console.log(data);
});
res.on('end', function() {
console.log("end");
// resume read line
lr.resume();
});
});
post_req.on('error', function(data) {
console.log("error,post."+data+post_data);
// resume read line
lr.resume();
});
post_req.write(post_data);
post_req.end();
}
function readlineFunSession(line,id,lr) {
var _data={};
// compose _data object
postES('/cs/session/'+_data["sessionid"],_data,id,lr);
}
function readfileFun(files,start,end,id,readlineFun) {
if(start<end && start<files.length){
var lr = new LineByLineReader(files[start],{encoding:'utf8',skipEmptyLines:true});
lr.on('error', function (e) {
console.log('error,LineByLineReader.'+e.toString());
});
lr.on('line', function (line) {
// pause read line
lr.pause();
try{
readlineFun(line,id,lr);
}catch(e){
console.log('error,line.'+e.toString());
}
});
lr.on('end', function () {
readfileFun(files,++start,end,id,readlineFun);
});
}
}
// var files is an arry of files
// this function try to go throgh file[0],file[1],file[2],......,file[10],
readfileFun(files,0,10,"ID-1",readlineFunSession);
Do a series of action where next action should run after only the current finish in nodejs is a bit difficult due its asynchronous paradigm, one way you can do is using sync maker npm like fiber or waterfall,
but other simple (and stupid) way you can do is create dummy worker manager, make your nodejs run infinitely, while every (time interval), check if the current progress is done, run next action if it did done.
btw while you can't make request to become sync, you can read file synchronously, so in your case, i think you should read all lines in all files to become one big array of line.
var jswget = require("jswget");
var arrayoflines = ["line1", "line2", "line3"];
var counter = 0;
var inProgress = false;
var request = function(){
if (arrayoflines.length == 0) {
// no more line, should exit
process.exit();
}
if (inProgress) {
// if previous work is not completed then skip.
return;
}
// get first line, and remove it from array index
var current_line = arrayoflines.shift();
inProgress = true;
jswget({
url: "http://someurl:3000/somepath?q1=" + current_line,
method: 'POST',
formdata: some_postdata,
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': post_data.length
},
onsuccess: function(responsetext, req, res){
// success requesting, should do next line
},
onerror: function(err, req){
// oops, error occurred, but we will do next line nevertheless
},
onend: function(){
// success or not, the request is end, so we should prepare for next request
counter+=1;
inProgress = false;
}
})
}
setInterval(function(){
request();
}, 100)
This may help you...
With Node 0.12, it's possible to do this synchronously now:
var fs = require('fs');
var path = require('path');
// Buffer mydata
var BUFFER = bufferFile('../public/mydata.txt');
function bufferFile(relPath) {
return fs.readFileSync(path.join(__dirname, relPath)); // zzzz....
}
fs is the file system. readFileSync() returns a Buffer, or string if you ask.
fs correctly assumes relative paths are a security issue. path is a work-around.
To load as a string, specify the encoding:
return fs.readFileSync(path,{ encoding: 'utf8' });

Can't upload file from Angularjs to Expressjs

I'm trying to upload a image from a AngularJS interface to a nodejs server (expressjs).
(I'm using mean.io)
Every time I upload someting, req.body logs "{}" and req.files logs "undefined"
I'm using angular-file-upload directive in AngularJS
Client-side code:
$scope.onFileSelect = function() {
console.log($files);
for (var i = 0; i < $files.length; i++) {
var file = $files[i];
$scope.upload = $upload.upload({
url: 'map/set',
method: 'POST',
headers: {'enctype': 'multipart/form-data'},
data: {myObj: $scope.myModelObj},
file: file,
}).progress(function(evt) {
console.log('percent: ' + parseInt(100.0 * evt.loaded / evt.total));
}).success(function(data, status, headers, config) {
// file is uploaded successfully
console.log(data);
});
}
};
Server-side code
var app = express();
require(appPath + '/server/config/express')(app, passport, db);
app.use(bodyParser({uploadDir:'./uploads'}));
app.post('/map/set', function(req, res) {
console.log(req.body);
console.log(req.files);
res.end('Success');
});
*****Edit*****
HTML Code
<div class="row">
<input id="file" type="file" ng-file-select="onFileSelect()" >
</div>
Hand built request
$scope.onFileSelect = function() {
//$files: an array of files selected, each file has name, size, and type.
//console.log($files);
var xhr = new XMLHttpRequest();
// not yet supported in most browsers, some examples use
// this but it's not safe.
// var fd = document.getElementById('upload').getFormData();
var fd = new FormData();
var files = document.getElementById('myfileinput').files;
console.log(files);
for(var i = 0;i<files.length; i++) {
fd.append("file", files[i]);
}
/* event listeners */
xhr.upload.addEventListener("progress", uploadProgress, false);
xhr.addEventListener("error", uploadFailed, false);
xhr.addEventListener("load", uploadComplete, false);
xhr.addEventListener("abort", uploadCanceled, false);
function uploadComplete(){
console.log("complete");
}
function uploadProgress(){
console.log("progress");
}
function uploadFailed(){
console.log("failed");
}
function uploadCanceled(){
console.log("canceled");
}
xhr.open("POST", "map/set");
xhr.send(fd);
};
The latest version of mean.io uncluding express 4.x as dependency. In the documentation for migration express 3 to 4 you can read, express will no longer user the connect middlewares. Read more about here: https://github.com/visionmedia/express/wiki/Migrating-from-3.x-to-4.x
The new body-parser module only handles urlencoded and json bodies. That means for multipart bodies (file uploads) you need an additional module like busboy or formadible.
Here is an example how I use angular-file-upload with busboy:
The AngularJS Stuff:
$upload.upload({
url: '/api/office/imageUpload',
data: {},
file: $scope.$files
}) …
I write a little helper module to handle uploads with busboy easier. It’s not very clean coded, but do the work:
var env = process.env.NODE_ENV || 'development';
var Busboy = require('busboy'),
os = require('os'),
path = require('path'),
config = require('../config/config')[env],
fs = require('fs');
// TODO: implement file size limit
exports.processFileUpload = function(req, allowedExtensions, callback){
var busboy = new Busboy({ headers: req.headers });
var tempFile = '';
var fileExtenstion = '';
var formPayload = {};
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
fileExtenstion = path.extname(filename).toLowerCase();
tempFile = path.join(os.tmpDir(), path.basename(fieldname)+fileExtenstion);
file.pipe(fs.createWriteStream(tempFile));
});
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) {
var jsonValue = '';
try {
jsonValue = JSON.parse(val);
} catch (e) {
jsonValue = val;
}
formPayload[fieldname] = jsonValue;
});
busboy.on('finish', function() {
if(allowedExtensions.length > 0){
if(allowedExtensions.indexOf(fileExtenstion) == -1) {
callback({message: 'extension_not_allowed'}, tempFile, formPayload);
} else {
callback(null, tempFile, formPayload)
}
} else {
callback(null, tempFile, formPayload)
}
});
return req.pipe(busboy);
}
In my controller i can use the module that way:
var uploader = require('../helper/uploader'),
path = require('path');
exports.uploadEmployeeImage = function(req,res){
uploader.processFileUpload(req, ['.jpg', '.jpeg', '.png'], function(uploadError, tempPath, formPayload){
var fileExtenstion = path.extname(tempPath).toLowerCase();
var targetPath = "/exampleUploadDir/testFile" + fileExtenstion;
fs.rename(tempPath, targetPath, function(error) {
if(error){
return callback("cant upload employee image");
}
callback(null, newFileName);
});
});
}
I'm going to take a guess here that the header settings are incorrect.
headers: {'enctype': 'multipart/form-data'},
Should be changed to:
headers: {'Content-Type': 'multipart/form-data'},
Ensure you have an 'id' AND 'name' attribute on the file input - not having an id attribute can cause problems on some browsers. Also, try building the request like this:
var xhr = new XMLHttpRequest();
// not yet supported in most browsers, some examples use
// this but it's not safe.
// var fd = document.getElementById('upload').getFormData();
var fd = new FormData();
var files = document.getElementById('myfileinput').files;
for(var i = 0;i<files.length; i++) {
fd.append("file", files[i]);
}
/* event listeners */
xhr.upload.addEventListener("progress", uploadProgress, false);
xhr.addEventListener("error", uploadFailed, false);
xhr.addEventListener("load", uploadComplete, false);
xhr.addEventListener("abort", uploadCanceled, false);
xhr.open("POST", "your/url");
xhr.send(fd);
angular isn't great with file uploads so doing it by hand might help.

Resources