(Node.js) Sheets v4 API values#batchGet function ignores ranges - node.js

I found strange behaviour in Google Sheets API (is it issue?):
Firebase functions log doesn't show any error, but the values#batchGet function is ignoring ranges. It returns the entire dataRange of the sheet in first range.
batchGet "Try This API"
I edited my code based on code Github: Sheets API batchGet ranges parameter issue
sheets.spreadsheets.values.batchGet({
// your options
}, function (err, data, response) {
console.log(response.req.res.request.url);
});
Node.js code portion:
const sheets = google.sheets({ version: 'v4', access_token });
// set auth as a global default:
google.options({ auth: jwt });
const request = {
auth: jwt,
spreadsheetId: 'xxxxx', //<---------- "Project Checklist" and "Control" sheets
ranges: [
"'Project Checklist'!B:K",
"Control!A:F"
],
majorDimension: "ROWS",
dateTimeRenderOption: "FORMATTED_STRING",
valueRenderOption: "FORMATTED_VALUE"
}
This is wrapped inside a Promise...
sheets.spreadsheets.values.batchGet(request, (err, data, response) => {
console.log("inside: sheets.spreadsheets.values.batchGet() --------");
if (err) {
console.log('The Sheets API returned an error: ' + err);
//The API returned an error: Error: API key not valid. Please pass a valid API key.
reject(err);
};
try {
console.log("response:-------------------------------");
console.log(JSON.stringify(response));
console.log("data.valueRanges:-------------------------------");
console.log(data.valueRanges);
resolve(JSON.stringify(data.valueRanges));
} catch (err) {
console.log("Error processing Sheets API response: " + err);
reject(err);
}
})
Firebase log:
Function execution took 3822 ms, finished with status code: 200
undefined
data.valueRanges:-------------------------------
undefined
response:-------------------------------
getJwt() --------------- OK
index.js
'use strict'
const functions = require('firebase-functions');
const { google } = require('googleapis');
var serviceAccount = require("./credentials/admin-service-account-key.json");
function getJwt() {
// Define the required scopes.
var scopes = [
'https://www.googleapis.com/auth/spreadsheets'
];
return new google.auth.JWT(
serviceAccount.client_email,
null,
serviceAccount.private_key,
scopes
);
}
function getSpreadsheetData(jwt) {
return new Promise((resolve, reject) => {
jwt.authorize((error, access_token) => {
if (error) {
console.log('Error in jwt.authorize: ' + error);
reject(error);
} else {
// access_token ready to use to fetch data and return to client
const sheets = google.sheets({ version: 'v4', access_token });
// set auth as a global default:
google.options({ auth: jwt }); //<----------------------
const request = {
auth: jwt,
spreadsheetId: 'xxxxxxxxxxxxxxxxxxxx', //<---------- Project Checklist / Control
range: 'Control!A:F', //
}
sheets.spreadsheets.values.get(request, (err, response) => {
console.log("inside: sheets.spreadsheets.values.get() -------------------------------");
if (err) {
console.log('The Sheets API returned an error: ' + err);
//The API returned an error: Error: API key not valid. Please pass a valid API key.
reject(err);
};
try {
var numRows = response.data.values ? response.data.values.length : 0;
console.log('%d rows retrieved.', numRows);
console.log("response.data:-------------------------------");
console.log(response.data.values);
resolve(response.data.values);
} catch (err) {
console.log("Error processing Sheets API response: " + err);
reject(err);
}
})
}
})
})
}
function getSheetBatchData(jwt) {
return new Promise((resolve, reject) => {
jwt.authorize((error, access_token) => {
if (error) {
console.log('Error in jwt.authorize: ' + error);
reject(error);
} else {
// access_token ready to use to fetch data and return to client
const sheets = google.sheets({ version: 'v4', access_token });
// set auth as a global default:
google.options({ auth: jwt }); //<----------------------
const request = {
auth: jwt,
spreadsheetId: 'xxxxxxxxxxxxxxxxxxxxxxxxxx', //<---------- Project Checklist / Control
ranges: [
"Project Checklist!B:K",
"Control!A:F"
],
majorDimension: "ROWS",
dateTimeRenderOption: "FORMATTED_STRING",
valueRenderOption: "FORMATTED_VALUE"
}
/*
sheets.spreadsheets.values.batchGet({
// your options
}, function (err, data, response) {
console.log(response.req.res.request.url);
});
*/
//https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/batchGet
sheets.spreadsheets.values.batchGet(request, (err, data, response) => {
if (err) {
console.log('The Sheets API returned an error: ' + err);
//The API returned an error: Error: API key not valid. Please pass a valid API key.
reject(err);
};
try {
/*
* Returns: Array: data.valueRanges =
* [
* "range": "....",
* "values": []
* ]
*/
console.log("response:-------------------------------");
console.log(JSON.stringify(response));
console.log("data.valueRanges:-------------------------------");
console.log(data.valueRanges);
resolve(JSON.stringify(data.valueRanges));
} catch (err) {
console.log("Error processing Sheets API response: " + err);
reject(err);
}
});
}
})
})
}
/* Working */
exports.getControlSheetData = functions.https.onCall((data, context) => {
console.log("getData()---------------------------");
if (!context.auth) {
throw new functions.https.HttpsError('failed-precondition', 'The function must be called ' + 'while authenticated.');
} else {
console.log("context.auth ------------ OK");
const uid = context.auth.uid;
console.log(uid);
var jwt = getJwt();
console.log("getJwt() --------------- OK");
return getSpreadsheetData(jwt); //<------------ Requested Spreadsheet's Data
}
})
/* Error */
exports.getBatchData = functions.https.onCall((data, context) => {
console.log("getBatchData()---------------------------");
if (!context.auth) {
throw new functions.https.HttpsError('failed-precondition', 'The function must be called ' + 'while authenticated.');
} else {
console.log("context.auth ------------ OK");
const uid = context.auth.uid;
console.log(uid);
var jwt = getJwt();
console.log("getJwt() --------------- OK");
return getSheetBatchData(jwt); //<------------ Requested Spreadsheet's Data
}
})
New test: index.js
'use strict'
const functions = require('firebase-functions');
const { google } = require('googleapis');
var serviceAccount = require("./credentials/admin-service-account-key.json");
function getJwt() {
// Define the required scopes.
var scopes = [
'https://www.googleapis.com/auth/spreadsheets'
];
return new google.auth.JWT(
serviceAccount.client_email,
null,
serviceAccount.private_key,
scopes
);
}
function getDataTest2(jwt) {
jwt.authorize().then(access_token => {
// access_token ready to use to fetch data and return to client
const sheets = google.sheets({ version: 'v4', access_token });
// set auth as a global default:
google.options({ auth: jwt }); //<----------------------
const request = {
auth: jwt,
spreadsheetId: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
ranges: [
"Project Checklist!B:K",
"Control!A:F"
],
majorDimension: "ROWS",
dateTimeRenderOption: "FORMATTED_STRING",
valueRenderOption: "FORMATTED_VALUE"
}
function callback(data,resp) {
try {
console.log("inside callback-----------");
console.log("returned data:--------------")
console.log(data);
console.log("returned resp:--------------")
console.log(resp);
console.log("expected data.valueRanges------------------------")
console.log(data.valueRanges); //<--------- expected data.valueRanges
return JSON.stringify(data.valueRanges);
} catch(err) {
console.log('The Sheets API returned an error: ' + err);
//The API returned an error: Error: API key not valid. Please pass a valid API key.
return(err);
}
}
sheets.spreadsheets.values.batchGet(request, callback);
}).catch(error => {
console.log('Error in jwt.authorize: ' + error);
reject(error);
})
}
exports.getBatchData = functions.https.onCall((data, context) => {
console.log("getBatchData()---------------------------");
if (!context.auth) {
throw new functions.https.HttpsError('failed-precondition', 'The function must be called ' + 'while authenticated.');
} else {
console.log("context.auth ------------ OK");
const uid = context.auth.uid;
console.log(uid);
var jwt = getJwt();
console.log("getJwt() --------------- OK");
//return getSheetBatchData(jwt); //<------------ Requested Spreadsheet's Data
return getDataTest2(jwt)
}
})
Firebase log:
getJwt()--------------- OK
Function execution took 965 ms, finished with status code: 200
inside callback-----------
returned data: --------------
null
returned resp: --------------
{
status: 200, statusText: 'OK', headers: { 'content-type': 'application/json; charset=UTF-8', vary: 'Origin, X-Origin, Referer', date: 'Tue, 18 Sep 2018 02:28:15 GMT', server: 'ESF', 'cache-control': 'private', 'x-xss-protection': '1; mode=block', 'x-frame-options': 'SAMEORIGIN', 'alt-svc': 'quic=":443"; ma=2592000; v="44,43,39,35"', connection: 'close', 'transfer-encoding': 'chunked' }, config: { adapter: [Function: httpAdapter], transformRequest: { '0': [Function: transformRequest] }, transformResponse: { '0': [Function: transformResponse] }, timeout: 0, xsrfCookieName: 'XSRF-TOKEN', xsrfHeaderName: 'X-XSRF-TOKEN', maxContentLength: 2147483648, validateStatus: [Function], headers: { Accept: 'application/json, text/plain, */*', 'Accept-Encoding': 'gzip', 'User-Agent': 'google-api-nodejs-client/0.2.1 (gzip)', Authorization: 'Bearer ya29.c.ElocBg_A_xxxxxxxxxxxxxxxxxxxx' }, method: 'get', access_token: { access_token: 'ya29.c.ElocBg_A_xxxxxxxxxxxxxxxxxx', token_type: 'Bearer', expiry_date: 1537241286000, id_token: undefined, refresh_token: 'jwt-placeholder' }, url: 'https://sheets.googleapis.com/v4/spreadsheets/xxxxxxxxxxxxxxxxxxxxxxxxxxx/values:batchGet', paramsSerializer: [Function], data: undefined, params: { ranges: [Object], majorDimension: 'ROWS', dateTimeRenderOption: 'FORMATTED_STRING', valueRenderOption: 'FORMATTED_VALUE' } }, request: ClientRequest {
domain: null, _events: { socket: [Function], abort: [Function], aborted: [Function], error: [Function], timeout: [Function], prefinish: [Function: requestOnPrefinish] }, _eventsCount: 6, _maxListeners: undefined, output: [], outputEncodings: [], outputCallbacks: [], outputSize: 0, writable: true, _last: true, upgrading: false, chunkedEncoding: false, shouldKeepAlive: false, useChunkedEncodingByDefault: false, sendDate: false, _removedHeader: { }, _contentLength: 0, _hasBody: true, _trailer: '', finished: true, _headerSent: true, socket: TLSSocket { _tlsOptions: [Object], _secureEstablished: true, _securePending: false, _newSessionPending: false, _controlReleased: true, _SNICallback: null, servername: null, npnProtocol: false, alpnProtocol: false, authorized: true, authorizationError: null, encrypted: true, _events: [Object], _eventsCount: 8, connecting: false, _hadError: false, _handle: null, _parent: null, _host: 'sheets.googleapis.com', _readableState: [Object], readable: false, domain: null, _maxListeners: undefined, _writableState: [Object], writable: false, allowHalfOpen: false, destroyed: true, _bytesDispatched: 563, _sockname: null, _pendingData: null, _pendingEncoding: '', server: undefined, _server: null, ssl: null, _requestCert: true, _rejectUnauthorized: true, parser: null, _httpMessage: [Circular], read: [Function], _consuming: true, write: [Function: writeAfterFIN], _idleNext: null, _idlePrev: null, _idleTimeout: -1 }, connection: TLSSocket {
_tlsOptions: [Object], _secureEstablished: true, _securePending: false, _newSessionPending: false, _controlReleased: true, _SNICallback: null, servername: null, npnProtocol: false, alpnProtocol: false, authorized: true, authorizationError: null, encrypted: true, _events: [Object], _eventsCount: 8, connecting: false, _hadError: false, _handle: null, _parent: null, _host: 'sheets.googleapis.com', _readableState: [Object], readable: false, domain: null, _maxListeners: undefined, _writableState: [Object], writable: false, allowHalfOpen: false, destroyed: true, _bytesDispatched: 563, _sockname: null, _pendingData: null, _pendingEncoding: '', server: undefined, _server: null, ssl: null, _requestCert: true, _rejectUnauthorized: true, parser: null, _httpMessage: [Circular], read: [Function], _consuming: true, write: [Function: writeAfte
...
expected data.valueRanges------------------------
The Sheets API returned an error: TypeError: Cannot read property 'valueRanges' of null

Related

401 Unauthorized when consuming Business Central OData Web Service in NodeJs

I have been trying to connect and retrieve a list of data from Business Central.
The Web Service has been exposed from BC and the OData link works and displays the JSON for the data in "Articles".
I'm trying to consumer that Service with NodeJS using NTLM (i also tried basic authentication and the same issue persists.
Please find below the file responsible for consuming the BC Web Service followed by the 401 error.
I have already stored all the information needed for NTLM config in my .env file, I did try to hardcode them and the same issue
const express = require("express");
const axios = require("axios");
const router = express.Router();
const ntlm = require("express-ntlm");
//const Article = require("../models/Articles.js");
const domain = process.env.DOMAIN;
const username = process.env.USERNAME;
const password = process.env.PASSWORD;
// Define auth function
const auth = ntlm({
debug: console.log,
domain,
username,
password,
ntlm_version: 2,
reconnect: true,
send_401: function (res) {
res.sendStatus(401);
},
badrequest: function (res) {
res.sendStatus(400);
},
});
// Get customer data from Business Central
async function getArticlesFromBC() {
try {
const options = {
auth: {
username,
password,
workstation: process.env.WORKSTATION,
domain,
},
};
const companyId = "CRONUS France S.A.";
const encodedCompanyId = encodeURIComponent(companyId);
const url = `http://${process.env.SERVER}:7048/BC210/ODataV4/Company('${encodedCompanyId}')/ItemListec`;
const response = await axios.get(url, options);
return response.data;
} catch (error) {
console.error(error);
throw new Error("Error retrieving articles data from Business Central");
}
}
// Route to get article data
router.get("/", auth, async (req, res) => {
try {
const user = req.ntlm;
console.log(user);
let articles = await getArticlesFromBC();
res.json(articles);
} catch (error) {
console.error(error);
res
.status(500)
.send("Error retrieving articles data from Business Central");
}
});
module.exports = router;
Explanation
This is the auth function that will use the variables set in .env to authenticate the user in order to access the BC endpoint
// Define auth function
const auth = ntlm({
debug: console.log,
domain,
username,
password,
ntlm_version: 2,
reconnect: true,
send_401: function (res) {
res.sendStatus(401);
},
badrequest: function (res) {
res.sendStatus(400);
},
});
This is the function that connects to the endpoint URL and returns the JSON file, this is where the error triggers
// Get article data from Business Central
async function getArticlesFromBC() {
try {
const options = {
auth: {
username,
password,
workstation: process.env.WORKSTATION,
domain,
},
};
const companyId = "CRONUS France S.A.";
const encodedCompanyId = encodeURIComponent(companyId);
const url = `http://${process.env.SERVER}:7048/BC210/ODataV4/Company('${encodedCompanyId}')/ItemListec`;
const response = await axios.get(url, options);
return response.data;
} catch (error) {
console.error(error);
throw new Error("Error retrieving articles data from Business Central");
}
}
This is the route to access the nodejs api
// Route to get article data
router.get("/", auth, async (req, res) => {
try {
const user = req.ntlm;
console.log(user);
let articles = await getArticlesFromBC();
res.json(articles);
} catch (error) {
console.error(error);
res
.status(500)
.send("Error retrieving articles data from Business Central");
}
});
This is the 401 error json, you can see at the beginning that NTLM authenticates successfully but then it throws the catch block in the get function
[express-ntlm] No Authorization header present
[express-ntlm] No domaincontroller was specified, all Authentication messages are valid.
{
DomainName: 'DESKTOP-1EF91E4',
UserName: 'ahmed',
Workstation: 'DESKTOP-1EF91E4',
Authenticated: true
}
AxiosError: Request failed with status code 401
at settle (D:\ESPRIT\5eme\PFE\B2B-ERP-MERN-App\node_modules\axios\dist\node\axios.cjs:1900:12)
at IncomingMessage.handleStreamEnd (D:\ESPRIT\5eme\PFE\B2B-ERP-MERN-App\node_modules\axios\dist\node\axios.cjs:2944:11)
at IncomingMessage.emit (events.js:412:35)
at endReadableNT (internal/streams/readable.js:1317:12)
at processTicksAndRejections (internal/process/task_queues.js:82:21) {
code: 'ERR_BAD_REQUEST',
config: {
transitional: {
silentJSONParsing: true,
forcedJSONParsing: true,
clarifyTimeoutError: false
},
adapter: [ 'xhr', 'http' ],
transformRequest: [ [Function: transformRequest] ],
transformResponse: [ [Function: transformResponse] ],
timeout: 0,
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
maxBodyLength: -1,
env: { FormData: [Function], Blob: null },
validateStatus: [Function: validateStatus],
headers: AxiosHeaders {
Accept: 'application/json, text/plain, */*',
'User-Agent': 'axios/1.3.3',
'Accept-Encoding': 'gzip, compress, deflate, br'
},
auth: {
username: 'ahmed',
password: undefined,
workstation: 'DESKTOP-1EF91E4',
domain: undefined
},
method: 'get',
url: "http://desktop-1ef91e4:7048/BC210/ODataV4/Company('CRONUS%20France%20S.A.')/ItemListec",
data: undefined
},
request: <ref *1> ClientRequest {
_events: [Object: null prototype] {
abort: [Function (anonymous)],
aborted: [Function (anonymous)],
connect: [Function (anonymous)],
error: [Function (anonymous)],
socket: [Function (anonymous)],
timeout: [Function (anonymous)],
prefinish: [Function: requestOnPrefinish]
},
_eventsCount: 7,
_maxListeners: undefined,
outputData: [],
outputSize: 0,
writable: true,
destroyed: false,
_last: true,
chunkedEncoding: false,
shouldKeepAlive: false,
_defaultKeepAlive: true,
useChunkedEncodingByDefault: false,
sendDate: false,
_removedConnection: false,
_removedContLen: false,
_removedTE: false,
_contentLength: 0,
_hasBody: true,
_trailer: '',
finished: true,
_headerSent: true,
socket: Socket {
connecting: false,
_hadError: false,
_parent: null,
_host: 'desktop-1ef91e4',
_readableState: [ReadableState],
_events: [Object: null prototype],
_eventsCount: 7,
_maxListeners: undefined,
_writableState: [WritableState],
allowHalfOpen: false,
_sockname: null,
_pendingData: null,
_pendingEncoding: '',
server: null,
_server: null,
parser: null,
_httpMessage: [Circular *1],
[Symbol(async_id_symbol)]: 465,
[Symbol(kHandle)]: [TCP],
[Symbol(kSetNoDelay)]: false,
[Symbol(lastWriteQueueSize)]: 0,
[Symbol(timeout)]: null,
[Symbol(kBuffer)]: null,
[Symbol(kBufferCb)]: null,
[Symbol(kBufferGen)]: null,
[Symbol(kCapture)]: false,
[Symbol(kBytesRead)]: 0,
[Symbol(kBytesWritten)]: 0,
[Symbol(RequestTimeout)]: undefined
},
_header: "GET /BC210/ODataV4/Company('CRONUS%20France%20S.A.')/ItemListec HTTP/1.1\r\n" +
'Accept: application/json, text/plain, */*\r\n' +
'User-Agent: axios/1.3.3\r\n' +
'Accept-Encoding: gzip, compress, deflate, br\r\n' +
'Host: desktop-1ef91e4:7048\r\n' +
'Authorization: Basic YWhtZWQ6\r\n' +
'Connection: close\r\n' +
'\r\n',
_keepAliveTimeout: 0,
_onPendingData: [Function: noopPendingOutput],
agent: Agent {
_events: [Object: null prototype],
_eventsCount: 2,
_maxListeners: undefined,
defaultPort: 80,
protocol: 'http:',
options: [Object],
requests: {},
sockets: [Object],
freeSockets: {},
keepAliveMsecs: 1000,
keepAlive: false,
maxSockets: Infinity,
maxFreeSockets: 256,
scheduling: 'lifo',
maxTotalSockets: Infinity,
totalSocketCount: 1,
[Symbol(kCapture)]: false
},
socketPath: undefined,
method: 'GET',
maxHeaderSize: undefined,
insecureHTTPParser: undefined,
path: "/BC210/ODataV4/Company('CRONUS%20France%20S.A.')/ItemListec",
_ended: true,
res: IncomingMessage {
_readableState: [ReadableState],
_events: [Object: null prototype],
_eventsCount: 4,
_maxListeners: undefined,
socket: [Socket],
httpVersionMajor: 1,
httpVersionMinor: 1,
httpVersion: '1.1',
complete: true,
headers: [Object],
rawHeaders: [Array],
trailers: {},
rawTrailers: [],
aborted: false,
upgrade: false,
url: '',
method: null,
statusCode: 401,
statusMessage: 'Unauthorized',
client: [Socket],
_consuming: false,
_dumped: false,
req: [Circular *1],
responseUrl: "http://ahmed:#desktop-1ef91e4:7048/BC210/ODataV4/Company('CRONUS%20France%20S.A.')/ItemListec",
redirects: [],
[Symbol(kCapture)]: false,
[Symbol(RequestTimeout)]: undefined
},
aborted: false,
timeoutCb: null,
upgradeOrConnect: false,
parser: null,
maxHeadersCount: null,
reusedSocket: false,
host: 'desktop-1ef91e4',
protocol: 'http:',
_redirectable: Writable {
_writableState: [WritableState],
_events: [Object: null prototype],
_eventsCount: 3,
_maxListeners: undefined,
_options: [Object],
_ended: true,
_ending: true,
_redirectCount: 0,
_redirects: [],
_requestBodyLength: 0,
_requestBodyBuffers: [],
_onNativeResponse: [Function (anonymous)],
_currentRequest: [Circular *1],
_currentUrl: "http://ahmed:#desktop-1ef91e4:7048/BC210/ODataV4/Company('CRONUS%20France%20S.A.')/ItemListec",
[Symbol(kCapture)]: false
},
[Symbol(kCapture)]: false,
[Symbol(kNeedDrain)]: false,
[Symbol(corked)]: 0,
[Symbol(kOutHeaders)]: [Object: null prototype] {
accept: [Array],
'user-agent': [Array],
'accept-encoding': [Array],
host: [Array],
authorization: [Array]
}
},
response: {
status: 401,
statusText: 'Unauthorized',
headers: AxiosHeaders {
'content-length': '0',
server: 'Microsoft-HTTPAPI/2.0',
'www-authenticate': 'Negotiate',
date: 'Thu, 16 Feb 2023 11:49:51 GMT',
connection: 'close'
},
config: {
transitional: [Object],
adapter: [Array],
transformRequest: [Array],
transformResponse: [Array],
timeout: 0,
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
maxBodyLength: -1,
env: [Object],
validateStatus: [Function: validateStatus],
headers: [AxiosHeaders],
auth: [Object],
method: 'get',
url: "http://desktop-1ef91e4:7048/BC210/ODataV4/Company('CRONUS%20France%20S.A.')/ItemListec",
data: undefined
},
request: <ref *1> ClientRequest {
_events: [Object: null prototype],
_eventsCount: 7,
_maxListeners: undefined,
outputData: [],
outputSize: 0,
writable: true,
destroyed: false,
_last: true,
chunkedEncoding: false,
shouldKeepAlive: false,
_defaultKeepAlive: true,
useChunkedEncodingByDefault: false,
sendDate: false,
_removedConnection: false,
_removedContLen: false,
_removedTE: false,
_contentLength: 0,
_hasBody: true,
_trailer: '',
finished: true,
_headerSent: true,
socket: [Socket],
_header: "GET /BC210/ODataV4/Company('CRONUS%20France%20S.A.')/ItemListec HTTP/1.1\r\n" +
'Accept: application/json, text/plain, */*\r\n' +
'User-Agent: axios/1.3.3\r\n' +
'Accept-Encoding: gzip, compress, deflate, br\r\n' +
'Host: desktop-1ef91e4:7048\r\n' +
'Authorization: Basic YWhtZWQ6\r\n' +
'Connection: close\r\n' +
'\r\n',
_keepAliveTimeout: 0,
_onPendingData: [Function: noopPendingOutput],
agent: [Agent],
socketPath: undefined,
method: 'GET',
maxHeaderSize: undefined,
insecureHTTPParser: undefined,
path: "/BC210/ODataV4/Company('CRONUS%20France%20S.A.')/ItemListec",
_ended: true,
res: [IncomingMessage],
aborted: false,
timeoutCb: null,
upgradeOrConnect: false,
parser: null,
maxHeadersCount: null,
reusedSocket: false,
host: 'desktop-1ef91e4',
protocol: 'http:',
_redirectable: [Writable],
[Symbol(kCapture)]: false,
[Symbol(kNeedDrain)]: false,
[Symbol(corked)]: 0,
[Symbol(kOutHeaders)]: [Object: null prototype]
},
data: ''
}
}
Error: Error retrieving articles data from Business Central
at getArticlesFromBC (D:\ESPRIT\5eme\PFE\B2B-ERP-MERN-App\routes\ArticleRoutes.js:44:11)
at processTicksAndRejections (internal/process/task_queues.js:95:5)
at async D:\ESPRIT\5eme\PFE\B2B-ERP-MERN-App\routes\ArticleRoutes.js:53:20
Even though it works perfectly in postman (NTLM authentication on Server Instance is enabled for that to happen)
express-ntlm seemed to authenticate the user but dosent follow up for some reason.
I switched to httpntlm and it worked perfectly, here is the new code in the node API file :
const express = require("express");
const router = express.Router();
const httpntlm = require("httpntlm");
// Get article data from Business Central
async function getArticlesFromBC() {
const username = process.env.USERNAME;
const password = process.env.PASSWORD;
const domain = process.env.DOMAIN;
const workstation = process.env.WORKSTATION;
const encodedCompanyId = encodeURIComponent("CRONUS France S.A.");
const url = `http://${process.env.SERVER}:7048/BC210/ODataV4/Company('${encodedCompanyId}')/ItemListec`;
const options = {
url: url,
username: username,
password: password,
workstation: workstation,
domain: domain
};
return new Promise((resolve, reject) => {
httpntlm.get(options, (err, res) => {
if (err) {
console.error(err);
reject(err);
} else {
resolve(res.body);
}
});
});
}
// Route to get article data
router.get("/", async (req, res) => {
try {
const articles = await getArticlesFromBC();
res.json(articles);
} catch (error) {
console.error("Error retrieving articles data from Business Central:", error.message);
res.status(500).send("Error retrieving articles data from Business Central");
}
});
module.exports = router;

unable to send file to express/multer backend

I have the following code listing the files in the folder. Then trying to send them to the post/upload in the express backend. But the files is always undefined. However it seems like size and other info get through.
Client send
const axios = require('axios');
var FormData = require('form-data');
//requiring path and fs modules
const path = require('path');
const fs = require('fs');
//joining path of directory
const directoryPath = path.join(__dirname, '');
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
}
//listing all files using forEach
var postInfo = files.forEach(async function (file) {
const form_data = new FormData();
form_data.append('file', fs.createReadStream(file));
const request_config = {
method: "post",
url: 'http://localhost:8080/upload',
port: 8080,
headers: {
"Content-Type": "multipart/form-data; boundary=form_data._boundary"
},
data: form_data
};
try {
var req = await axios(request_config);
// Do whatever you want to do with the file
console.log("Request: ", req);
} catch (e) {
console.error(e);
}
});
console.log(postInfo);
});
receiving code in the backend
const http = require('http')
const port = 8080
const express = require('express');
const app = express();
const multer = require('multer');
const server = http.createServer(app)
let storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, './uploads')
},
filename: function (req, file, cb) {
cb(null, file.fieldname + '-' + Date.now())
}
});
const upload = multer({ storage }).single('file');
app.post('/upload', upload, (req, res) => {
console.log(req.files) // this does log the uploaded image data.
})
// bind the server on port
server.listen(port, (err) => {
if (err) {
return console.log('something bad happened', err)
}
console.log(`server is listening on ${port}`)
})
Log:
node file-receive.js
server is listening on 8080
undefined
undefined
undefined
response: undefined,
isAxiosError: true,
toJSON: [Function] }
{ Error: socket hang up
at createHangUpError (_http_client.js:323:15)
at Socket.socketOnEnd (_http_client.js:426:23)
at Socket.emit (events.js:194:15)
at endReadableNT (_stream_readable.js:1103:12)
at process._tickCallback (internal/process/next_tick.js:63:19)
code: 'ECONNRESET',
config:
{ url: 'http://localhost:8080/upload',
method: 'post',
data:
FormData {
_overheadLength: 169,
_valueLength: 0,
_valuesToMeasure: [Array],
writable: false,
readable: true,
dataSize: 0,
maxDataSize: 2097152,
pauseStreams: true,
_released: true,
_streams: [],
_currentStream: null,
_insideLoop: false,
_pendingNext: false,
_boundary: '--------------------------046964089061111513973474',
_events: [Object],
_eventsCount: 1 },
headers:
{ Accept: 'application/json, text/plain, */*',
'Content-Type': 'application/x-www-form-urlencoded',
'User-Agent': 'axios/0.19.2' },
transformRequest: [ [Function: transformRequest] ],
transformResponse: [ [Function: transformResponse] ],
timeout: 0,
adapter: [Function: httpAdapter],
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
validateStatus: [Function: validateStatus],
port: 8080 },
request:
Writable {
_writableState:
WritableState {
objectMode: false,
highWaterMark: 16384,
finalCalled: false,
needDrain: false,
ending: false,
ended: false,
finished: false,
destroyed: false,
decodeStrings: true,
defaultEncoding: 'utf8',
length: 0,
writing: false,
corked: 0,
sync: true,
bufferProcessing: false,
onwrite: [Function: bound onwrite],
writecb: null,
writelen: 0,
bufferedRequest: null,
lastBufferedRequest: null,
pendingcb: 0,
prefinished: false,
errorEmitted: false,
emitClose: true,
bufferedRequestCount: 0,
corkedRequestsFree: [Object] },
writable: true,
_events:
[Object: null prototype] {
response: [Function: handleResponse],
error: [Function: handleRequestError] },
_eventsCount: 2,
_maxListeners: undefined,
_options:
{ protocol: 'http:',
maxRedirects: 21,
maxBodyLength: 10485760,
path: '/upload',
method: 'POST',
headers: [Object],
agent: undefined,
agents: [Object],
auth: undefined,
hostname: 'localhost',
port: '8080',
nativeProtocols: [Object],
pathname: '/upload' },
_redirectCount: 0,
_redirects: [],
_requestBodyLength: 983,
_requestBodyBuffers: [ [Object], [Object], [Object] ],
_onNativeResponse: [Function],
_currentRequest:
ClientRequest {
_events: [Object],
_eventsCount: 6,
_maxListeners: undefined,
output: [],
outputEncodings: [],
outputCallbacks: [],
outputSize: 0,
writable: true,
_last: true,
chunkedEncoding: true,
shouldKeepAlive: false,
useChunkedEncodingByDefault: true,
sendDate: false,
_removedConnection: false,
_removedContLen: false,
_removedTE: false,
_contentLength: null,
_hasBody: true,
_trailer: '',
finished: true,
_headerSent: true,
socket: [Socket],
connection: [Socket],
_header:
'POST /upload HTTP/1.1\r\nAccept: application/json, text/plain, */*\r\nContent-Type: application/x-www-form-urlencoded\r\nUser-Agent: axios/0.19.2\r\nHost: localhost:8080\r\nConnection: close\r\nTransfer-Encoding: chunked\r\n\r\n',
_onPendingData: [Function: noopPendingOutput],
agent: [Agent],
socketPath: undefined,
timeout: undefined,
method: 'POST',
path: '/upload',
_ended: false,
res: null,
aborted: undefined,
timeoutCb: null,
upgradeOrConnect: false,
parser: null,
maxHeadersCount: null,
_redirectable: [Circular],
[Symbol(isCorked)]: false,
[Symbol(outHeadersKey)]: [Object] },
_currentUrl: 'http://localhost:8080/upload' },
response: undefined,
isAxiosError: true,
toJSON: [Function] }
I was able to reproduce the issue and was able to fix the code with the following ajdustments (note that I'm uploading a single file for sake of simplicty).
On the client side you basically just provide the form-data to axios and call getHeaders() to get the already set up header - also you need to adjust the file-name in the form.
// client.js
...
const form_data = new FormData();
form_data.append('file', fs.createReadStream(__dirname + '/file-you-want-to-upload'));
const request_config = {
method: "post",
url: 'http://localhost:8080/upload',
port: 8080,
data: form_data,
headers: form_data.getHeaders()
};
...
On the backend side you need to make sure to access file not files as you're using upload.single and actually send a response in your handler as the resposse would hang otherwise:
//server.js
...
const upload = multer({storage}).single('file');
app.post('/upload', upload, (req, res) => {
console.log(req.file) // this does log the uploaded image data.
res.send("File uploaded");
});
...

Firebase schedule function subpub axios POST request

I'm using firebase functions to schedule a puppeteer task from a firebase pubsub namespace.
I also want to post the response back to an API endpoint outside google using axios.
I tried Axios directly from the worker but I get a 500 error.
My goal:
Use firebase schedule and pubsub namespace to generate puppeteer
jobs - complete
Post results back to external endpoint using axios - need your help :)
Code:
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const functions = require("firebase-functions");
const admin = require("firebase-admin");
const puppeteer = require('puppeteer');
const url = require('url');
// Request Data From A URL
var axios = require('axios');
var https = require('https');
// var cors = require("cors");
// Initalise App
admin.initializeApp();
const db = admin.firestore();
const workers = {
extract: async ({ uri, post_id, store_id, domain }) => {
let theTitle = null;
let thePrice = null;
let host = url.parse(uri).host;
let postId = post_id;
let storeId = store_id;
let theDomain = domain;
const SELECTORS = {
amazonPrice: '#priceblock_ourprice',
ebayPrice: '#prcIsum',
const browser = await puppeteer.launch({ args: ['--no-sandbox', '--disable-setuid-sandbox'] });
console.log('spawning chrome headless');
console.log(postId);
console.log(storeId);
console.log(domain);
const page = await browser.newPage();
// Goto page and then do stuff
console.log('going to ', uri);
await page.goto(uri, {
waitUntil: ["domcontentloaded", "networkidle0"]
});
console.log('waiting for page to load ');
console.log(host);
// theTitle = await page.title();
try {
theTitle = await page.title();
// find amazon price
if (host === 'www.amazon.co.uk' || 'amazon.co.uk') {
const priceInput = await page.$(SELECTORS.amazonPrice)
thePrice = await page.evaluate(element => element.textContent, priceInput)
}
// find ebay price
if (host === 'www.ebay.co.uk' || 'ebay.co.uk') {
const priceInput = await page.$(SELECTORS.ebayPrice)
thePrice = await page.evaluate(element => element.value, priceInput)
}
else {
console.log('failed scrape at', host);
}
}
catch (error) {
console.error('There was an error processing the page:', error);
}
finally {
// close browser
if (browser !== null) {
await browser.close();
}
}
console.log(theTitle);
console.log(thePrice);
const response = {
title: theTitle,
price: thePrice,
};
console.log('post' + postId + storeId + thePrice + theDomain);
axios.post('endpoint', {
price: thePrice,
store_id: storeId,
post_id: postId,
domain: theDomain,
},
{
headers: {
'Content-Type': 'multipart/form-data',
}
})
.then(function (response) {
console.log(response);
})
.catch(function (error) {
console.log(error);
});
// axios post end
return response;
}
};
exports.taskRunner = functions.runWith({ memory: '2GB' }).pubsub
// export const taskRunner = functions.region('europe-west2').runWith( { memory: '2GB' }).pubsub
.schedule('*/15 * * * *').onRun(async (context) => {
// Consistent timestamp
const now = admin.firestore.Timestamp.now();
// Query all documents ready to perform
const query = db.collection('tasks').where('performAt', '<=', now).where('status', '==', 'scheduled');
const tasks = await query.get();
// Jobs to execute concurrently.
const jobs = [];
// Loop over documents and push job.
tasks.forEach(snapshot => {
const { worker, options } = snapshot.data();
const job = workers[worker](options)
// Update doc with status on success or error
// .then(() => snapshot.ref.update({ status: 'complete' }))
.catch((err) => snapshot.ref.update({ status: 'error' }));
jobs.push(job);
});
// Execute all jobs concurrently
return await Promise.all(jobs);
});
Error
Error: Request failed with status code 500
at createError (/srv/node_modules/axios/lib/core/createError.js:16:15)
at settle (/srv/node_modules/axios/lib/core/settle.js:17:12)
at IncomingMessage.handleStreamEnd (/srv/node_modules/axios/lib/adapters/http.js:236:11)
at emitNone (events.js:111:20)
at IncomingMessage.emit (events.js:208:7)
at endReadableNT (_stream_readable.js:1064:12)
at _combinedTickCallback (internal/process/next_tick.js:139:11)
at process._tickDomainCallback (internal/process/next_tick.js:219:9)
config:
{ url: 'endpoint',
method: 'post',
data: '{"£59.99":null,"store_id":32,"post_id":25,"domain":"amazon.co.uk"}',
headers:
{ Accept: 'application/json, text/plain, */*',
'Content-Type': 'multipart/form-data',
'User-Agent': 'axios/0.19.2',
'Content-Length': 65 },
transformRequest: [ [Function: transformRequest] ],
transformResponse: [ [Function: transformResponse] ],
timeout: 0,
adapter: [Function: httpAdapter],
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
validateStatus: [Function: validateStatus] },
request:
ClientRequest {
domain:
Domain {
domain: null,
_events: [Object],
_eventsCount: 1,
_maxListeners: undefined,
members: [Array] },
_events:
{ socket: [Function],
abort: [Function],
aborted: [Function],
error: [Function],
timeout: [Function],
prefinish: [Function: requestOnPrefinish] },
_eventsCount: 6,
_maxListeners: undefined,
output: [],
outputEncodings: [],
outputCallbacks: [],
outputSize: 0,
writable: true,
_last: true,
upgrading: false,
chunkedEncoding: false,
shouldKeepAlive: false,
useChunkedEncodingByDefault: true,
sendDate: false,
_removedConnection: false,
_removedContLen: false,
_removedTE: false,
_contentLength: null,
_hasBody: true,
_trailer: '',
finished: true,
_headerSent: true,
socket:
TLSSocket {
_tlsOptions: [Object],
_secureEstablished: true,
_securePending: false,
_newSessionPending: false,
_controlReleased: true,
_SNICallback: null,
servername: 'SERVERNAME',
npnProtocol: false,
alpnProtocol: false,
authorized: true,
authorizationError: null,
encrypted: true,
_events: [Object],
_eventsCount: 9,
connecting: false,
_hadError: false,
_handle: [Object],
_parent: null,
_host: 'HOST',
_readableState: [Object],
readable: true,
domain: [Object],
_maxListeners: undefined,
_writableState: [Object],
writable: false,
allowHalfOpen: false,
_bytesDispatched: 259,
_sockname: null,
_pendingData: null,
_pendingEncoding: '',
server: undefined,
_server: null,
ssl: [Object],
_requestCert: true,
_rejectUnauthorized: true,
parser: null,
_httpMessage: [Circular],
[Symbol(asyncId)]: 538,
[Symbol(bytesRead)]: 0 },
connection:
TLSSocket {
_tlsOptions: [Object],
_secureEstablished: true,
_securePending: false,
_newSessionPending: false,
_controlReleased: true,
_SNICallback: null,
servername: 'HOST',
npnProtocol: false,
alpnProtocol: false,
authorized: true,
authorizationError: null,
encrypted: true,
_events: [Object],
_eventsCount: 9,
connecting: false,
_hadError: false,
_handle: [Object],
_parent: null,
_host: 'HOST',
_readableState: [Object],
readable: true,
domain: [Object],
_maxListeners: undefined,
_writableState: [Object],
writable: false,
allowHalfOpen: false,
_bytesDispatched: 259,
_sockname: null,
_pendingData: null,
_pendingEncoding: '',
server: undefined,
_server: null,
ssl: [Object],
_requestCert: true,
_rejectUnauthorized: true,
parser: null,
_httpMessage: [Circular],
[Symbol(asyncId)]: 538,
[Symbol(bytesRead)]: 0 },
_header: 'POST ENDPOINT HTTP/1.1\r\nAccept: application/json, text/plain, */*\r\nContent-Type: multipart/form-data\r\nUser-Agent: axios/0.19.2\r\nContent-Length: 65\r\nHost: HOST\r\nConnection: close\r\n\r\n',
_onPendingData: [Function: noopPendingOutput],
agent:
Agent {
domain: null,
_events: [Object],
_eventsCount: 1,
_maxListeners: undefined,
defaultPort: 443,
protocol: 'https:',
options: [Object],
requests: {},
sockets:
more_vert
Logs are subject to Cloud Logging's
After some work and trial & error. I settled on using the Request package to solve for the issue - https://www.npmjs.com/package/request
Not had time to investigate why Axios was not working but will review and if enlightened I will post here.

How to access final GraphQL-Reponse in nest.js with interceptor

I have implemented a LoggingInterceptor which should be able to access the final GraphQL-Response with its data- and error properties + the original request body and the authenticated user, which has been added to the request by AuthGuard before.(EDIT: Partially solved by #jay-mcdoniel: user and body are accessible through GqlExecutionContext.create(context).getContext())
Indeed the Interceptor just provides one fully resolved GraphQL-Object.
#Injectable()
export class LoggingInterceptor implements NestInterceptor {
intercept(context: ExecutionContext, next: CallHandler): Observable<any> {
return next.handle().pipe(tap(
(allData) => console.log(allData),
(error)=> console.log(error)));
}
}
This is my Interceptor-Class. It's just calling the RxJS-Operator tap to log the current values of the observable.
If I run the following GraphQL-Request...
mutation {
login(data: { username: "admin", password: "123456" }) {
id
username
token
}
}
... my server answers correctly with the following response-body:
{
"data": {
"login": {
"id": "6f40be3b-cda9-4e6d-97ce-ced3787e9974",
"username": "admin",
"token": "someToken"
}
}
}
But the content of allData which get's logged to console by my interceptor is the following:
{
id: '6f40be3b-cda9-4e6d-97ce-ced3787e9974',
isAdmin: true,
username: 'admin',
firstname: null,
lastname: null,
email: null,
created: 2019-07-05T15:11:31.606Z,
token: 'someToken'
}
Instead I would like to see the information of the real response-body.
I have additionally tried to access the HttpResponse by context.switchToHttp().getResponse(). But this only contains the parameters of the mutation-login-method:
{
data: [Object: null prototype] { username: 'admin', password: '123456' }
}
EDIT:
console.log(GqlExecutionContext.create(context).getContext());
prints (still no GraphQL-ResponseBody):
{
headers: {
/*...*/
},
user: /*...*/,
body: {
operationName: null,
variables: {},
query: 'mutation {\n login(data: {username: "admin", password: ' +
'"123456"}) {\n token\n id\n username\n isAdmin\n }\n' +
'}\n'
},
res: ServerResponse {
_events: [Object: null prototype] { finish: [Function: bound resOnFinish] },
_eventsCount: 1,
_maxListeners: undefined,
outputData: [],
outputSize: 0,
writable: true,
_last: false,
chunkedEncoding: false,
shouldKeepAlive: true,
useChunkedEncodingByDefault: true,
sendDate: true,
_removedConnection: false,
_removedContLen: false,
_removedTE: false,
_contentLength: null,
_hasBody: true,
_trailer: '',
finished: false,
_headerSent: false,
socket: Socket {
connecting: false,
_hadError: false,
_parent: null,
_host: null,
_readableState: [ReadableState],
readable: true,
_events: [Object],
_eventsCount: 8,
_maxListeners: undefined,
_writableState: [WritableState],
writable: true,
allowHalfOpen: true,
_sockname: null,
_pendingData: null,
_pendingEncoding: '',
server: [Server],
_server: [Server],
timeout: 120000,
parser: [HTTPParser],
on: [Function: socketOnWrap],
_paused: false,
_httpMessage: [Circular],
[Symbol(asyncId)]: 566,
[Symbol(kHandle)]: [TCP],
[Symbol(lastWriteQueueSize)]: 0,
[Symbol(timeout)]: Timeout {
/*...*/
},
[Symbol(kBytesRead)]: 0,
[Symbol(kBytesWritten)]: 0
},
connection: Socket {
connecting: false,
_hadError: false,
_parent: null,
_host: null,
_readableState: [ReadableState],
readable: true,
_events: [Object],
_eventsCount: 8,
_maxListeners: undefined,
_writableState: [WritableState],
writable: true,
allowHalfOpen: true,
_sockname: null,
_pendingData: null,
_pendingEncoding: '',
server: [Server],
_server: [Server],
timeout: 120000,
parser: [HTTPParser],
on: [Function: socketOnWrap],
_paused: false,
_httpMessage: [Circular],
[Symbol(asyncId)]: 566,
[Symbol(kHandle)]: [TCP],
[Symbol(lastWriteQueueSize)]: 0,
[Symbol(timeout)]: Timeout {
_idleTimeout: 120000,
_idlePrev: [TimersList],
_idleNext: [TimersList],
_idleStart: 3273,
_onTimeout: [Function: bound ],
_timerArgs: undefined,
_repeat: null,
_destroyed: false,
[Symbol(refed)]: false,
[Symbol(asyncId)]: 567,
[Symbol(triggerId)]: 566
},
[Symbol(kBytesRead)]: 0,
[Symbol(kBytesWritten)]: 0
},
_header: null,
_onPendingData: [Function: bound updateOutgoingData],
_sent100: false,
_expect_continue: false,
req: IncomingMessage {
/*...*/
},
locals: [Object: null prototype] {},
[Symbol(isCorked)]: false,
[Symbol(outHeadersKey)]: [Object: null prototype] {
'x-powered-by': [Array],
'access-control-allow-origin': [Array]
}
},
_extensionStack: GraphQLExtensionStack { extensions: [ [CacheControlExtension] ] }
}
That's how I did that in my Logging Interceptor
#Injectable()
export class LoggingInterceptor implements NestInterceptor {
constructor(private readonly logger: Logger) {}
intercept(context: ExecutionContext, next: CallHandler): Observable<any> {
// default REST Api
if (context.getType() === 'http') {
...
...
}
// Graphql
if (context.getType<GqlContextType>() === 'graphql') {
const gqlContext = GqlExecutionContext.create(context);
const info = gqlContext.getInfo();
const res: Response = gqlContext.getContext().res;
// Get user that sent request
const userId = context.getArgByIndex(2).req.user.userId;
const parentType = info.parentType.name;
const fieldName = info.fieldName;
const body = info.fieldNodes[0]?.loc?.source?.body;
const message = `GraphQL - ${parentType} - ${fieldName}`;
// Add request ID,so it can be tracked with response
const requestId = uuidv4();
// Put to header, so can attach it to response as well
res.set('requestId', requestId);
const trace = {
userId,
body
};
this.logger.info(`requestId: ${requestId}`, {
context: message,
trace
});
return next.handle().pipe(
tap({
next: (val: unknown): void => {
this.logNext(val, context);
}
})
);
}
return next.handle();
}
/**
* Method to log response message
*/
private logNext(body: unknown, context: ExecutionContext): void {
// default REST Api
if (context.getType() === 'http') {
...
...
}
if (context.getType<GqlContextType>() === 'graphql') {
const gqlContext = GqlExecutionContext.create(context);
const info = gqlContext.getInfo();
const parentType = info.parentType.name;
const fieldName = info.fieldName;
const res: Response = gqlContext.getContext().res;
const message = `GraphQL - ${parentType} - ${fieldName}`;
// Remove secure fields from request body and headers
const secureBody = secureReqBody(body);
const requestId = res.getHeader('requestId');
// Log trace message
const trace = {
body: { ...secureBody }
};
this.logger.info(`requestId: ${requestId}`, {
context: message,
trace
});
}
}
}
The interceptor is actually called before and after the response, or it should be at least, so that you can have pre-request logic (request in) and post-request logic (response out). You should be able to do all pre-request processing before you call next.hanlde() and then you should be able to use the RxJS Observable operators such as tap or map after a pipe() call. Your allData variable should have all the information from the request/response, and you can even use the context variable for getting even more information.
What does allData currently print for you? Have you tried GqlExecutionContext.create(context).getContext().req or GqlExecutionContext.create(context).getContext().res? These are shown being used in the Guards documentation to get the request and response objects like you would with a normal HTTP call.

Firebase image upload, resize via cloud functions (https), and save to firestore

I am trying to achieve the following, however as a bit of a newbie, I can't see what I'm doing wrong:
File upload triggers a cloud function (https.onRequest())
Resize the image 3 times (100x100 thumb, 500x500 thumb, resize original to limit it's size)
Save all 3 images to the firebase storage bucket (Google Storage)
Get the respective signed URLs of the 3 images
Save signed URLs to Firestore
The following code works, but not consistently:
In my index.js file I create the HTTP trigger by:
export const image = functions.https.onRequest(uploadImage);
That calls:
import * as admin from "firebase-admin";
import * as Busboy from "busboy";
import * as fs from "fs";
import * as os from "os";
import * as path from "path";
const cors = require("cors")({ origin: true });
const spawn = require("child-process-promise").spawn;
const gcconfig = {
projectId: "PROJECT_ID",
keyFilename: "key.json"
};
const gcs = require("#google-cloud/storage")(gcconfig);
export const uploadImage = (req, res) => {
cors(req, res, () => {
if (req.method === "POST") {
const busboy = new Busboy({ headers: req.headers });
const docRef = req.query.docRef;
const fieldRef = req.query.fieldRef;
const storageRef = req.query.storageRef;
let fileData = null;
// Max height and width of the thumbnail in pixels.
const THUMB_MAX_HEIGHT = 100;
const THUMB_MAX_WIDTH = 100;
const THUMB_PREFIX = "thumb_";
const RESIZED_MAX_HEIGHT = 500;
const RESIZED_MAX_WIDTH = 500;
const RESIZED_PREFIX = "resized_";
const ORIGINAL_MAX_HEIGHT = 1000;
const ORIGINAL_MAX_WIDTH = 1400;
// Listen for event when Busboy finds a file to stream.
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
const tempLocalFile = path.join(os.tmpdir(), filename);
fileData = { file: filename, type: mimetype };
// Download file local tmp.
file.pipe(fs.createWriteStream(tempLocalFile));
});
busboy.on("finish", () => {
// File and directory paths.
const tempLocalFile = path.join(os.tmpdir(), fileData.file);
const tempLocalThumbFile = path.join(os.tmpdir(),`${THUMB_PREFIX}${fileData.file}`);
const tempLocalResizedFile = path.join(os.tmpdir(),`${RESIZED_PREFIX}${fileData.file}`);
const origFilePath = path.normalize(path.join(storageRef, fileData.file));
const thumbFilePath = path.normalize(path.join(storageRef, `${THUMB_PREFIX}${fileData.file}`));
const resizedFilePath = path.normalize(path.join(storageRef, `${RESIZED_PREFIX}${fileData.file}`));
// Cloud Storage files.
const bucket = gcs.bucket("PROJECT_ID.appspot.com");
const metadata = {
contentType: fileData.type,
"Cache-Control": "public,max-age=3600"
};
// Generate a thumbnail called tempLocalThumbFile
const promise = spawn(
"convert",
[
tempLocalFile,
"-thumbnail",
`${THUMB_MAX_WIDTH}x${THUMB_MAX_HEIGHT}>`,
tempLocalThumbFile
],
{ capture: ["stdout", "stderr"] }
);
return promise
.then(() => {
// Generate a resized_ called tempLocalResizedFile
return spawn(
"convert",
[
tempLocalFile,
"-thumbnail",
`${RESIZED_MAX_WIDTH}x${RESIZED_MAX_HEIGHT}>`,
tempLocalResizedFile
],
{ capture: ["stdout", "stderr"] }
);
})
.then(() => {
// Resize original to ensure it's not massive
return spawn(
"convert",
[
tempLocalFile,
"-thumbnail",
`${ORIGINAL_MAX_WIDTH}x${ORIGINAL_MAX_HEIGHT}>`,
tempLocalFile
],
{ capture: ["stdout", "stderr"] }
);
})
.then(() => {
// upload images to storage
const thumbUp = bucket.upload(tempLocalThumbFile, {
destination: thumbFilePath,
metadata: metadata
});
const resizeUp = bucket.upload(tempLocalResizedFile, {
destination: resizedFilePath,
metadata: metadata
});
const origUp = bucket.upload(tempLocalFile, {
destination: origFilePath,
metadata: metadata
});
return Promise.all([thumbUp, resizeUp, origUp]);
})
.then(() => {
// Once the image has been uploaded delete the local files to free up disk space.
fs.unlinkSync(tempLocalFile);
fs.unlinkSync(tempLocalThumbFile);
fs.unlinkSync(tempLocalResizedFile);
const config = {
action: "read",
expires: "03-01-2500"
};
// Get the Signed URLs for the thumbnail and original image.
return Promise.all([
bucket.file(thumbFilePath).getSignedUrl(config),
bucket.file(resizedFilePath).getSignedUrl(config),
bucket.file(origFilePath).getSignedUrl(config)
]);
})
.then(results => {
console.log("Got Signed URLs.", results);
const thumbFileUrl = results[0][0];
const resizeFileUrl = results[1][0];
const origFileUrl = results[2][0];
// Add the URLs to the Database
return admin
.firestore()
.doc(docRef)
.update({
[fieldRef + ".original"]: origFileUrl,
[fieldRef + ".resized"]: resizeFileUrl,
[fieldRef + ".thumb"]: thumbFileUrl
});
})
.then(() => {
console.log("Image URLs saved to Firestore.");
res.status(200).send({
message: "File Saved."
});
})
.catch(err => {
console.log("Error:", err);
res.status(500).send({
error: err
});
});
});
busboy.end(req.rawBody);
} else {
return res.status(500).json({
message: "Not Allowed"
});
}
});
};
The error message I receive when it fails is:
Error: { ChildProcessError: convert /tmp/ff658860-cc0f-11e8-bd7d-178b6a853dfe.png -thumbnail 100x100> /tmp/thumb_ff658860-cc0f-11e8-bd7d-178b6a853dfe.png failed with code 1 at ChildProcess. (/user_code/node_modules/child-process-promise/lib/index.js:132:23) at emitTwo (events.js:106:13) at ChildProcess.emit (events.js:191:7) at maybeClose (internal/child_process.js:920:16) at Socket. (internal/child_process.js:351:11) at emitOne (events.js:96:13) at Socket.emit (events.js:188:7) at Pipe._handle.close [as _onclose] (net.js:509:12) name: 'ChildProcessError', code: 1, childProcess: ChildProcess { domain: Domain { domain: null, _events: [Object], _eventsCount: 1, _maxListeners: undefined, members: [] }, _events: { error: [Function: t], close: [Function] }, _eventsCount: 2, _maxListeners: undefined, _closesNeeded: 3, _closesGot: 3, connected: false, signalCode: null, exitCode: 1, killed: false, spawnfile: 'convert', _handle: null, spawnargs: [ 'convert', '/tmp/ff658860-cc0f-11e8-bd7d-178b6a853dfe.png', '-thumbnail', '100x100>', '/tmp/thumb_ff658860-cc0f-11e8-bd7d-178b6a853dfe.png' ], pid: 12, stdin: Socket { connecting: false, _hadError: false, _handle: null, _parent: null, _host: null, _readableState: [Object], readable: false, domain: [Object], _events: [Object], _eventsCount: 2, _maxListeners: undefined, _writableState: [Object], writable: false, allowHalfOpen: false, destroyed: true, _bytesDispatched: 0, _sockname: null, _pendingData: null, _pendingEncoding: '', server: null, _server: null, write: [Function: writeAfterFIN], _idleNext: null, _idlePrev: null, _idleTimeout: -1 }, stdout: Socket { connecting: false, _hadError: false, _handle: null, _parent: null, _host: null, _readableState: [Object], readable: false, domain: [Object], _events: [Object], _eventsCount: 4, _maxListeners: undefined, _writableState: [Object], writable: false, allowHalfOpen: false, destroyed: true, _bytesDispatched: 0, _sockname: null, _pendingData: null, _pendingEncoding: '', server: null, _server: null, read: [Function], _consuming: true, _idleNext: null, _idlePrev: null, _idleTimeout: -1, write: [Function: writeAfterFIN] }, stderr: Socket { connecting: false, _hadError: false, _handle: null, _parent: null, _host: null, _readableState: [Object], readable: false, domain: [Object], _events: [Object], _eventsCount: 4, _maxListeners: undefined, _writableState: [Object], writable: false, allowHalfOpen: false, destroyed: true, _bytesDispatched: 0, _sockname: null, _pendingData: null, _pendingEncoding: '', server: null, _server: null, read: [Function], _consuming: true, _idleNext: null, _idlePrev: null, _idleTimeout: -1, write: [Function: writeAfterFIN] }, stdio: [ [Object], [Object], [Object] ] }, stdout: '', stderr: 'convert: improper image header /tmp/ff658860-cc0f-11e8-bd7d-178b6a853dfe.png\' # error/png.c/ReadPNGImage/3927.\nconvert: no images defined/tmp/thumb_ff658860-cc0f-11e8-bd7d-178b6a853dfe.png\' # error/convert.c/ConvertImageCommand/3210.\n' }
I'm not familiar with busboy but believe I'm ending the process correctly, but as this typically only works for the first upload after saving the cloud function, I suspect the code may not be ending cleanly?
Any help is greatly appreciated. Thank you.

Resources