azure blob storage node.js backend - node.js

I'm following this article on how to upload a blob into a container. This is what I've have
var azure = require('azure-storage');
var blobSvc = azure.createBlobServiceAnonymous('https://<storage-name>.blob.core.windows.net/');
exports.post = function(request, response) {
console.log(request.files); // [1]
blobSvc.createBlockBlobFromLocalFile('dubfiles', 'myblob', request.files.file.name, function(error, result, response){
if(!error){
// file uploaded
console.log(response); //[2]
console.log(result); // [3]
}
});
};
[1] Logs this
{ file:
{ domain: null,
_events: null,
_maxListeners: 10,
size: 859552,
path: 'D:\\local\\Temp\\155976e3a6b8e0aa871c5deee05af9f2',
name: 'heuristic-serch.pdf',
type: 'application/pdf',
hash: false,
lastModifiedDate: Tue Jun 23 2015 08:43:55 GMT+0000 (Coordinated Universal Time),
_writeStream: {
domain: null,
_events: null,
_maxListeners: 10,
path: 'D:\\local\\Temp\\155976e3a6b8e0aa871c5deee05af9f2',
fd: 3,
writable: false,
flags: 'w',
encoding: 'binary',
mode: 438,
bytesWritten: 859552,
busy: false,
_queue: [],
_open: [Function],
drainable: true,
flush: [Function],
write: [Function],
end: [Function],
destroy: [Function],
destroySoon: [Function],
pipe: [Function],
setMaxListeners: [Function],
emit: [Function],
addListener: [Function],
on: [Function],
once: [Function],
removeListener: [Function],
removeAllListeners: [Function],
listeners: [Function] },
open: [Function],
toJSON: [Function],
write: [Function],
end: [Function],
setMaxListeners: [Function],
emit: [Function],
addListener: [Function],
on: [Function], once: [Function],
removeListener: [Function],
removeAllListeners: [Function],
listeners: [Function]
}
}
[2] Logs no response to logs
[3] Logs no result to logs
The container is empty when I check on the azure management portal.
How can this be done correctly?

I'm now able to upload a blob to a container. This is what I have
var azure = require('azure-storage');
exports.post = function(request, response) {
var accountName = request.service.config.appSettings.STORAGE_ACCOUNT_NAME; // storage account name at appSettings
var accountKey = request.service.config.appSettings.STORAGE_ACCOUNT_ACCESS_KEY; //storage account key at appSettings
var host = accountName + '.blob.core.windows.net';
var container = 'container_name';
var blobSvc = azure.createBlobService(accountName, accountKey, host);
blobSvc.createContainerIfNotExists(container, {publicAccessLevel : 'blob'}, function(error, result, response){
if(!error){
console.log('no error occurred!'); // logs if no error occurred
console.log(result); // logs false if container already exists
console.log(response); // logs response including the etag
//Container exists and allows
//anonymous read access to blob
//content and metadata within this container
blobSvc.createBlockBlobFromLocalFile(container, request.files.file.name, request.files.file.path, function(error, result, response){
if(!error){
// file uploaded
// console.log(error);
console.log(response); // logs response
console.log(result); // logs result
}
});
}
});
};

Related

How to test getDerivedStateFromProps with jest

I have looked at How to test getDerivedStateFromProps with Jest and Enzyme but it is not working for me. here is my test
it('should be red processing only, routing, security grey while bg tasks are running', () => {
component = mount(
<ProcessingStatus store={store}/>
);
const instance = component.instance();
//console.log(instance)
component.setProps({ processing_status: {
header:{
error: true,
message: 'This comms matrix is currently processing flows',
statusCode: 200
},
body: {}
} });
console.log(component.state())
console.log(component.props())
expect(component.find(TrafficLight).length).toEqual(3);
expect(component.find(TrafficLight).at(0).props().RedOn).toEqual(true);
expect(component.find(TrafficLight).at(0).props().AmberOn).toEqual(false);
expect(component.find(TrafficLight).at(0).props().GreenOn).toEqual(false);
});
component.state() or instance.state is always empty {}.
This is the contents of component.props()
{ store:
{ getState: [Function: getState],
getActions: [Function: getActions],
dispatch:
{ [Function: mockConstructor]
_isMockFunction: true,
getMockImplementation: [Function],
mock: [Getter/Setter],
mockClear: [Function],
mockReset: [Function],
mockRestore: [Function],
mockReturnValueOnce: [Function],
mockResolvedValueOnce: [Function],
mockRejectedValueOnce: [Function],
mockReturnValue: [Function],
mockResolvedValue: [Function],
mockRejectedValue: [Function],
mockImplementationOnce: [Function],
mockImplementation: [Function],
mockReturnThis: [Function],
mockName: [Function],
getMockName: [Function] },
clearActions: [Function: clearActions],
subscribe: [Function: subscribe],
replaceReducer: [Function: replaceReducer] },
processing_status:
{ header:
{ error: true,
message: 'This comms matrix is currently processing flows',
statusCode: 200 },
body: {} } }
I need this to be triggered as depending on my props values the state changes and renders other conditions.
If it is a connected component it needs to be retrieved differently.
component = mount(
<ProcessingStatus store={store}/>
);
const instance = component.find('ProcessingStatus').instance();
component.setProps({ processing_status: {
header:{
error: true,
message: 'This comms matrix is currently processing flows',
statusCode: 200
},
body: {}
} });
console.log(instance.state);
Provided me with
console.log tests/jest/components/common/ProcessingStatus/index.test.js:122
{ nextStep: 'This comms matrix is currently processing flows' }
What is not clear in previous answers is if there are connected or not and if shallow or mount is being used

duplex stream getting changed in callback:node js

I have a code where I am getting a duplex stream and in that function I call a callback which return me the values from redis.
function (index, arr, null, callback) {
const streamObject = stream;
const Id = arr[index].split(':')[0];
const Version = arr[index].split(':')[1];
console.log("STREAM RECEIVED IN SECOND 1");
console.log(streamObject);//printing for the first time
var response = this.ts.service(Id, Version, streamObject, (fn, type) => {
console.log("STREAM RECEIVED IN SECOND 2");
console.log(streamObject);
}
here when I print the stream object for the first time I get the stream object as follows
STREAM RECEIVED IN SECOND 1
Stream {
domain:
Domain {
domain: null,
_events: { error: [Function] },
_eventsCount: 1,
_maxListeners: undefined,
members: [] },
_events:
{ end: [Function],
data: [Function],
drain: [Function: ondrain],
error: [Function: onerror],
close: [Function: cleanup] },
_eventsCount: 5,
_maxListeners: undefined,
writable: true,
readable: true,
paused: false,
autoDestroy: true,
write: [Function],
push: [Function],
queue: [Function],
end: [Function],
destroy: [Function],
pause: [Function],
resume: [Function] }
and in the second time I get
STREAM RECEIVED IN SECOND 2
Stream {
domain:
Domain {
domain: null,
_events: { error: [Function] },
_eventsCount: 1,
_maxListeners: undefined,
members: [] },
_events: { end: [Function], data: [Function] },
_eventsCount: 2,
_maxListeners: undefined,
writable: false,
readable: false,
paused: false,
autoDestroy: true,
write: [Function],
push: [Function],
queue: [Function],
end: [Function],
destroy: [Function],
pause: [Function],
resume: [Function],
root: null }
so stream is getting modified I am not sure why is it happening, I am not doing anything with the stream in the function called.
this is how my service method look which I am calling
service(id, vn, requestObject, callback) {
log.info("entering transformer service");
var transformerCombinedKey = id + vn;
if (!_.isString(transformerId)) {
throw new TypeError("Expected a string for transformerId");
}
if (!(transformerCombinedKey in this.functionRepositoryStore)) {
//FIXME: after using cache as a implementation should ret
var persistenceClientPromise = this.persistenceClient.getTransformerByIdAndVersion(transformerId, versionNumber)
persistenceClientPromise.then(
(aJSONStringifiedTransformer) => {
if (!aJSONStringifiedTransformer) {
callback(new Error("The given transformerId, " + transformerId + ", was not found."));
return;
}
this.functionRepositoryStore[transformerCombinedKey] = JSON.parse(aJSONStringifiedTransformer).transformerFunction;
this.transformerTypeRepository[transformerCombinedKey] = JSON.parse(aJSONStringifiedTransformer).transformerType;
const code = "var _ = require('lodash'); console.log('runnig VM1'); module.exports = " + this.functionRepositoryStore[transformerCombinedKey] + ';';
var transformerFunction = vm.runInNewContext(code, sandbox);
console.log("Calling callback inside the transformer service===1 ");
console.log(JSON.stringify(transformerFunction));
callback(transformerFunction, this.transformerTypeRepository[transformerCombinedKey]);
return
},
(error) => {
log.error("Error while getting transformer for Id " + transformerId + " and versionNumber " + versionNumber);
callback(error, null);
});
} else {
const code = "var _ = require('lodash'); console.log('runnig VM2'); module.exports = " + this.functionRepositoryStore[transformerCombinedKey] + '; ';
var transformerFunction = vm.runInNewContext(code, sandbox);
console.log("Calling callback inside the transformer service=== ");
console.log(JSON.stringify(transformerFunction));
callback(transformerFunction, this.transformerTypeRepository[transformerCombinedKey]);
}
}
}
this issue I see when I hit my app for the first time, when I hit it again without restarting the app, It works fine stream remain duplex also.
and also if I remove my call to redis this method , then stream doesn't change it works fine.
I am using waterfall and this is the second function of my waterfall model like
async.waterfall([func1,func2], function (err, result) {
if (err) {
console.log(err);
reject(err);
} else {
fulfill(result);
}
});
this first function also do the same thing but it works fine, the output of first is passed to the second.
this is how I create my stream
let streamParser = JSONStream.parse('*');
streamParser.on('data', fn);
let toPassStreamObject = streamObject.pipe(JSONStream.stringify())
.pipe(streamParser)
streamObject is the stream which I get from my DB.
fn (data) {
data['Date'] = data["month"];
delete data['month'];
}
I stuck on this for some time.how to prevent stream from changing.
this is reference to my previous post

MongoDB error while retrieving collection

I'm having a problem getting collection from the database in my Node.js application. I'm using Mongodb 3.6.
That's how I set it up:
var moment = require('moment');
var MongoClient = require('mongodb').MongoClient;
/*
===========================================================================
DB setup
===========================================================================
*/
var state = {
db: null,
}
function get() {
return state.db;
}
exports.connect_database = function(done) {
if (state.db) return done()
MongoClient.connect(process.env.DATABASE_URL, function(err, db) {
if (err) return done(err)
state.db = db
done()
})
}
/* some other functions ... */
exports.return_collection = function(collection_name, callback) {
var result_array = [];
var collection = get().getCollection(collection_name);
var result = collection.find()
result.forEach(function(res) {
result_array.push(res);
}, function(error) {
console.log("error: ")
console.log(error);
if (!error)
callback(result_array);
});
}
In the main file I do:
'use strict';
// LIB IMPORTS
var env = require('node-env-file');
env(__dirname + '/.env');
// LOCAL IMPORTS
var aux = require('./modules/aux.js');
var scheduler = require('./modules/scheduler.js');
var Bot = require('./modules/bot.js');
/*
===========================================================================
DB setup
===========================================================================
*/
aux.connect_database((err) => {
if (err) {
console.log('Unable to connect to Mongo.')
process.exit(1)
} else {
console.log('Connected to db.');
}
})
I can see in the log the Connected to db. prompt, so the connection works fine. After that I try to call some function to add/retrieve data from the db and i get the error:
TypeError: get(...).getCollection is not a function
at Object.exports.return_collection
If I try to print the state.db variable I get the following result:
MongoClient {
domain: null,
_events: {},
_eventsCount: 0,
_maxListeners: undefined,
s:
{ url: 'mongodb://localhost:27017/BotDb',
options:
{ socketOptions: {},
read_preference_tags: null,
readPreference: [Object],
dbName: 'slackBotDb',
servers: [Object],
server_options: [Object],
db_options: [Object],
rs_options: [Object],
mongos_options: [Object],
socketTimeoutMS: 360000,
connectTimeoutMS: 30000,
promiseLibrary: [Function: Promise] },
promiseLibrary: [Function: Promise],
dbCache: {},
sessions: [] },
topology:
Server {
domain: null,
_events:
{ serverOpening: [Function],
serverDescriptionChanged: [Function],
serverHeartbeatStarted: [Function],
serverHeartbeatSucceeded: [Function],
serverHeartbeatFailed: [Function],
serverClosed: [Function],
topologyOpening: [Function],
topologyClosed: [Function],
topologyDescriptionChanged: [Function],
joined: [Function],
left: [Function],
ping: [Function],
ha: [Function],
authenticated: [Function],
error: [Function],
timeout: [Function],
close: [Function],
parseError: [Function],
open: [Object],
fullsetup: [Object],
all: [Object],
reconnect: [Function] },
_eventsCount: 22,
_maxListeners: undefined,
clientInfo:
{ driver: [Object],
os: [Object],
platform: 'Node.js v7.10.0, LE' },
s:
{ coreTopology: [Object],
sCapabilities: null,
clonedOptions: [Object],
reconnect: true,
emitError: true,
poolSize: 5,
storeOptions: [Object],
store: [Object],
host: 'localhost',
port: 27017,
options: [Object],
sessionPool: [Object],
promiseLibrary: [Function: Promise] } } }
What am I missing? In the mongo console everything looks fine:
> db.getCollection("users");
BotDb.users
I can't find any function named getCollection in the API documentation for the Node.js MongoDB native driver. Collections are usually fetched with collection('mycoll'). So you can rewrite this line:
var collection = get().getCollection(collection_name);
to
var collection = get().collection(collection_name);
Note that if you use v3.0 or later of the driver you have to modify the connect function as well. There were some changes done to the connection functions (see here). The callback now returns a client object rather than the db object. So you'll have to change your function to:
exports.connect_database = function(done) {
if (state.db) return done()
MongoClient.connect(process.env.DATABASE_URL, function(err, client) {
if (err) return done(err);
state.db = client.db('database_name');
done();
})
}
Note the 'database_name' string. It should be the name of your database.

adding cookie as a string to phantom page using node.js phantom module

I have phantom server running which recieves a request with a cookie attached inside header. I have to use this cookie while opening a page from phantom. To be more precise, I have a string that I need to add as a cookie while opening a page. I have installed phantom using node module model(npm install phantom). Below is my code which I'm trying but I cannot see any cookies :
phantom.create(function(ph){
ph.createPage(function (page) {
console.log(page);
var cookieAdded = ph.addCookie({
'name': 'OSF Cookie',
'value': req.headers.cookie,
'domain': req.headers.host
});
console.log(cookieAdded);
page.open(url, function (status) {
if (status == 'success') {
console.log("Success");
page.getCookies(function(cookie){
console.log(cookie);
});
page.evaluate(
function () {
console.log(document.headers);
return document.documentElement.outerHTML;
},
function (content) {
// console.log(content);
res.send(content);
console.log('RESPONSE SEND')
ph.exit();
});
}
else {
console.log("Status Failed");
ph.exit();
}
})
});
});
[EDIT]Below is the output :
http://localhost:5000/dashboard/
{ url: 'http://localhost:5000/dashboard/' }
osf=5540e22b8f6ac302b117a4cd.DWKdATsCvxskYqL-QfQYSjmYMvI
{ set: [Function],
get: [Function],
open: [Function],
close: [Function],
includeJs: [Function],
sendEvent: [Function],
release: [Function],
uploadFile: [Function],
goBack: [Function],
goForward: [Function],
reload: [Function],
switchToFrame: [Function],
switchToMainFrame: [Function],
switchToParentFrame: [Function],
switchToFocusedFrame: [Function],
onConsoleMessage: [Function],
onError: [Function],
onResourceRequested: [Function],
injectJs: [Function],
evaluate: [Function],
render: [Function],
getContent: [Function],
getCookies: [Function],
renderBase64: [Function],
setHeaders: [Function],
setContent: [Function],
setViewportSize: [Function],
setPaperSize: [Function],
setZoomFactor: [Function],
setFileOnPicker: [Function],
_evaluate: [Function],
_onResourceRequested: [Function] }
undefined
Success
[]
RESPONSE SEND

Receiving Files with NodeJS using connect

I'd like to receive pictures, audio and video files with nodejs.
I send them via phonegap as a http-request.
With nodeJS I use the connect plugin. I really don't understand what it does and how to manipulate the location the files are getting stored.
var http = require('http');
var connect = require('connect');
var app = connect();
var server = http.createServer(app);
app.use(connect.bodyParser());
app.use(function(req, res) {
console.log(req.files); // Here is object with uploaded files
});
server.listen(8070);
How can I tell connect to store the files somewhere else as in the temp-directory.
And how can I read the requests options to decide where I want to store that file.
Here's what a file is about:
{ file:
{ fieldName: 'file',
originalFilename: 'VID_20131211_124140.mp4',
path: 'C:\\Users\\krause\\AppData\\Local\\Temp\\4120-1fx90bk.mp4',
headers:
{ 'content-disposition': 'form-data; name="file"; filename="VID_20131211_124140.mp4"',
'content-type': 'video/mp4' },
ws:
{ _writableState: [Object],
writable: true,
domain: null,
_events: [Object],
_maxListeners: 10,
path: 'C:\\Users\\krause\\AppData\\Local\\Temp\\4120-1fx90bk.mp4',
fd: null,
flags: 'w',
mode: 438,
start: undefined,
pos: undefined,
bytesWritten: 7046598,
closed: true,
open: [Function],
_write: [Function],
destroy: [Function],
close: [Function],
destroySoon: [Function],
pipe: [Function],
write: [Function],
end: [Function],
setMaxListeners: [Function],
emit: [Function],
addListener: [Function],
on: [Function],
once: [Function],
removeListener: [Function],
removeAllListeners: [Function],
listeners: [Function] },
size: 7046598,
name: 'VID_20131211_124140.mp4',
type: 'video/mp4' } }
I am assuming you just want to use this app to store the POST'ed file in a path other than tmp.
You can set the default upload directory by setting bodyParser.
In Express we do it by
app.use(express.bodyParser({ keepExtensions: true, uploadDir: '/my/files' }));
You can try this is connect:
app.use(connect.multipart({ uploadDir: path }));
Check details here
Personally what I do is, I copy the file from temp directory and put it in a relevant place (if you have different directories for different uploads) using node fs.

Resources