I am using the Express JS server to execute AWS MWS API. As per MWS documentation, _GET_REMOTE_FULFILLMENT_ELIGIBILITY_ return excel file object.
I created API in node js but I am not able to get proper excel. I got weird characters in the downloaded excel file.
You can see the downloaded excel file in the attachment.
const getRemoveFulfillmentEligibilityDataCon = async(req,res) => {
const mwsRequestData = {
Version: '2009-01-01',
Action: 'GetReport',
'SellerId': 'MWS_SELLER_ID',
'MWSAuthToken': 'MWS_AUTH_TOKEN',
ReportId: 'XXXXXXXXXXXXXX',
};
try {
const response = await amazonMws.reports.search(mwsRequestData);
/* make the worksheet */
var ws = XLSX.utils.json_to_sheet(response.data);
var wb = XLSX.utils.book_new();
XLSX.utils.book_append_sheet(wb, ws);
XLSX.writeFile(wb, "sheetjs.xlsx");
return response;
} catch (error) {
console.log('error ', error);
return error;
}
}
Finally, I got a solution. I changed my method to call API as below and I got a proper excel file.
I used node-fetch and then send request to Amazon MWS by node-fetch.
node-fetch decode content encoding (gzip/deflate) properly and convert string output to UTF-8 automatically.
var param = {};
param['AWSAccessKeyId'] = 'xxxxxxxxxxxxx';
param['Action'] = 'GetReport';
param['MarketplaceId'] = 'xxxxxxxxxxxxx';
param['SellerId'] = 'xxxxxxxxxxxxx';
param['ReportId'] = 'xxxxxxxxxxxxx';
param['ItemCondition'] = 'New';
param['SignatureMethod'] = 'HmacSHA256';
param['SignatureVersion'] = '2';
param['Timestamp'] = encodeURIComponent(new Date().toISOString());
param['Version'] = '2009-01-01';
secret = 'xxxxxxxxxxxxx';
var url = [];
for(var i in param){
url.push(i+"="+param[i])
}
url.sort();
var arr = url.join("&");
var sign = 'POST\n'+'mws.amazonservices.com\n'+'/Reports/2009-01-01\n'+arr;
const crypto = require('crypto');
let s64 = crypto.createHmac("sha256", secret).update(sign).digest('base64');
let signature = encodeURIComponent(s64);
var bodyData = arr+"&Signature="+signature;
const fileName = "sheetjs.xlsx";
var apiResponse = await fetch('https://mws.amazonservices.com/Reports/2009-01-01', {
method: 'POST',
body: bodyData,
headers: {
'content-type': 'application/x-www-form-urlencoded',
'Accept': '',
},
})
.then(res => {
const dest = fs.createWriteStream('./'+fileName);
res.body.pipe(dest).on('finish', function(){
//console.log('done');
return {'Status': 200, 'Message': 'Downloaded'};
});
})
.catch(error => {
console.log('Request failed', error);
});
return apiResponse;
}
Related
I have an api post end point which would update a customer's information in dynamodb. It is set to authenticate using AWS_IAM. I am getting 403 from my lambda when calling this api. I have allowed execute-api:Invoke permission to the api for the role lambda uses. I see in this post that I need to create a canonical request. I was able to come up with the below code and I still get a 403. I can't figure out what is missing and wish if a different eye can spot the problem. Please help!
"use strict";
const https = require("https");
const crypto = require("crypto");
exports.handler = async (event, context, callback) => {
try {
var attributes = {
customerId: 1,
body: { firstName: "abc", lastName: "xyz" }
};
await updateUsingApi(attributes.customerId, attributes.body)
.then((result) => {
var jsonResult = JSON.parse(result);
if (jsonResult.statusCode === 200) {
callback(null, {
statusCode: jsonResult.statusCode,
statusMessage: "Attributes saved successfully!"
});
} else {
callback(null, jsonResult);
}
})
.catch((err) => {
console.log("error: ", err);
callback(null, err);
});
} catch (error) {
console.error("error: ", error);
callback(null, error);
}
};
function sign(key, message) {
return crypto.createHmac("sha256", key).update(message).digest();
}
function getSignatureKey(key, dateStamp, regionName, serviceName) {
var kDate = sign("AWS4" + key, dateStamp);
var kRegion = sign(kDate, regionName);
var kService = sign(kRegion, serviceName);
var kSigning = sign(kService, "aws4_request");
return kSigning;
}
function updateUsingApi(customerId, newAttributes) {
var request = {
partitionKey: `MY_CUSTOM_PREFIX_${customerId}`,
sortKey: customerId,
payLoad: newAttributes
};
var data = JSON.stringify(request);
var apiHost = new URL(process.env.REST_API_INVOKE_URL).hostname;
var apiMethod = "POST";
var path = `/stage/postEndPoint`;
var { amzdate, authorization, contentType } = getHeaders(host, method, path);
const options = {
host: host,
path: path,
method: method,
headers: {
"X-Amz-Date": amzdate,
Authorization: authorization,
"Content-Type": contentType,
"Content-Length": data.length
}
};
return new Promise((resolve, reject) => {
const req = https.request(options, (res) => {
if (res && res.statusCode !== 200) {
console.log("response from api", res);
}
var response = {
statusCode: res.statusCode,
statusMessage: res.statusMessage
};
resolve(JSON.stringify(response));
});
req.on("error", (e) => {
console.log("error", e);
reject(e.message);
});
req.write(data);
req.end();
});
}
function getHeaders(host, method, path) {
var algorithm = "AWS4-HMAC-SHA256";
var region = "us-east-1";
var serviceName = "execute-api";
var secretKey = process.env.AWS_SECRET_ACCESS_KEY;
var accessKey = process.env.AWS_ACCESS_KEY_ID;
var contentType = "application/x-amz-json-1.0";
var now = new Date();
var amzdate = now
.toJSON()
.replace(/[-:]/g, "")
.replace(/\.[0-9]*/, "");
var datestamp = now.toJSON().replace(/-/g, "").replace(/T.*/, "");
var canonicalHeaders = `content-type:${contentType}\nhost:${host}\nx-amz-date:${amzdate}\n`;
var signedHeaders = "content-type;host;x-amz-date";
var payloadHash = crypto.createHash("sha256").update("").digest("hex");
var canonicalRequest = [
method,
path,
canonicalHeaders,
signedHeaders,
payloadHash
].join("/n");
var credentialScope = [datestamp, region, serviceName, "aws4_request"].join(
"/"
);
const sha56 = crypto
.createHash("sha256")
.update(canonicalRequest)
.digest("hex");
var stringToSign = [algorithm, amzdate, credentialScope, sha56].join("\n");
var signingKey = getSignatureKey(secretKey, datestamp, region, serviceName);
var signature = crypto
.createHmac("sha256", signingKey)
.update(stringToSign)
.digest("hex");
var authorization = `${algorithm} Credential=${accessKey}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`;
return { amzdate, authorization, contentType };
}
I am creating an excel file at nodejs end and returning base64 data to reactJS to download the file. At nodejs end, I am using promise all and fetch data from a server in chunks and append data into Excel as
worksheet.addRows(data);
For data around 20-30k, it is working fine but for data like 100k, it shows me an error heap out of memory at nodejs end.
I have increase memory allocate to nodejs also but same error
node --max_old_space_size=5000 app.js
What I am doing wrong any suggestions?
Nodejs
const axios = require('axios');
var excel = require("exceljs");
const workbook = new excel.Workbook();
const worksheet = workbook.addWorksheet("My Sheet");
worksheet.columns = [
{ header: "TicketId", key: "ticketId" },
{ header: "Email", key: 'user_email' },
{ header: "User", key : 'user_name' },
{ header: "Subject", key: "subject" },
...//many more headers
];
exports.getTicketData = async (req, res, next) => {
res.connection.setTimeout(0);
const { body } = req;
const token = body.token;
const organization_id = body.organization_id;
const server = body.server;
const sideFilter = body.sideFilter;
let baseurl = 'url for server end to fetch data';
if (baseurl) {
let data = new Array();
let limit = 3000;
const promises = [];
try {
let count = await getCount(token,limit, organization_id, baseurl, sideFilter);
for(var i = 1;i<=count;i++) {
promises.push(getData(i,limit,organization_id,token, baseurl, sideFilter));
}
await Promise.all(promises).then((results) => {
}).catch((e) => {
throw e;
});
var base64File = await writeExcelAndUpload(workbook);
return res.status(200).json({ file:base64File });
} catch (err) {
return res.status(400).json({ type:'error', msg:'File not generated please contact support staff' });
}
} else {
return res.status(400).json({ type:'error', msg:'please define server name' });
}
};
let getData = (page,limit, organization_id,token, baseurl, sideFilter) =>{
return new Promise((resolve, reject) => {
axios.post(baseurl+`/v2/get-export`, {
page:page,
organization_id:organization_id,
per_page:limit,
filter: "",
sorted:"",
...sideFilter
},{ headers: {"Authorization" : `Bearer ${token}`} }).then(function (response) {
let dataTemp = response.data.data.data.map((t,i)=>{
return {
...t,
name:t.name,
...//many more columns like 70
}
});
worksheet.addRows(dataTemp);
resolve(true);
}).catch(function (error) {
reject(error);
});
});
}
let getCount = (token,limit, organization_id, baseurl, sideFilter) => {
// run an api and return count against limit
}
let writeExcelAndUpload = async (workbook) => {
const fileBuffer = await workbook.xlsx.writeBuffer();
let base64File = Buffer.from(fileBuffer).toString('base64');
base64File = 'data:application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;base64,'+base64File;
return base64File;
}
Client side reactjs
exportLink = () => {
postData ={
...
};
return axios.post(`${baseurl}/api/ticketing/get-ticket`, postData).then(function (response) {
const downloadLink = document.createElement("a");
const fileName = "export.xlsx";
downloadLink.href = response.data.file;
downloadLink.download = fileName;
downloadLink.click();
}).catch(function(error){
throw error;
});
}
Well, it is kinda expected that you may get a heap out of memory when working with such an amount of entries like 100k.
I could suggest you start using pagination, and instead of fetching e.g. 100k of entries at once fetch 1k of entries do what you need with them, then fetch the next 1k of entries repeat until you processed all entries.
I have a NodeJS function that writes several small svg files locally and then is attempting to upload those files to cloud bucket.
in the function log, i am only seeing the message that file written to local disk. now will upload. But there is no file in the bucket and no error logged anywhere. i have made sure the timeout is set to 9 min (max) so i am sure its not timing out. what else shoudl i check?
any pointers will be appreciated.
exports.createQRCode = functions.storage.object().onFinalize(async (object) =>{
const qrcodeMonkeyKey = functions.config().qrcodemonkey.key;
//console.log(`key for qrcode monkey is ${qrcodeMonkeyKey}`);
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
console.log(fileBucket);
console.log(filePath);
if(!filePath.toLowerCase().endsWith('.csv'))
return console.log('not a csv so no need to anything fancy');
const bucket = admin.storage().bucket(fileBucket);
const filePathComps = filePath.split('/');
const folderName = filePathComps[filePathComps.length-3];
if(folderName !== "qrcode")
return console.log('not a qr code csv so no need to anything fancy');
const fileName = filePathComps[filePathComps.length-1];
console.log(fileName);
const path = require('path');
const os = require('os');
const fs = require('fs');
const tempFilePath = path.join(os.tmpdir(), fileName);
const metadata = {
contentType: contentType,
};
await bucket.file(filePath).download({destination: tempFilePath});
const csv = require('csv-parser')
const results = [];
fs.createReadStream(tempFilePath)
.pipe(csv({headers:
['uri','filename','foldername']
,skipLines:1
}))
.on('data', async (data) => {
const x = data;
results.push(data);
//results.push({id:x.id,phoneNumber:x.phoneNumber,isInternational:x.isInternational,message:x.messageText,respStatus:resp.status,responsedata:resp.data});
})
.on('end',async () => {
pArray = [];
results.forEach(x =>{
pArray.push(createQRCodeAndUpload(qrcodeMonkeyKey,x.filename,x.uri,x.foldername));
});
const finaloutput = await Promise.all(pArray);
console.log(JSON.stringify(finaloutput));
return;
});
});
const createQRCodeAndUpload = async (qrcodeMonkeyKey,fileName, url,foldername) =>{
const bucket = admin.storage().bucket('vmallapp.appspot.com');
const path = require('path');
const os = require('os');
const fs = require('fs');
var axios = require("axios").default;
console.log('processing ' + url);
if(url !==""){
const dataToSend = {
data : url,
config :{
body:'circle',
eye:'frame14',
eyeBall:'ball16',
bodyColor:"#032b5c",
bgColor:"#84d4e2",
"logo":"ae600e1267b9e477f0b635b60ffaec1d1c18d93b.png"
},
size:1200,
download:false,
file:'svg',
gradientOnEyes:true
}
var options = {
method: 'POST',
url: 'https://qrcode-monkey.p.rapidapi.com/qr/custom',
headers: {
'content-type': 'application/json',
'x-rapidapi-host': 'qrcode-monkey.p.rapidapi.com',
'x-rapidapi-key': qrcodeMonkeyKey
},
data: dataToSend
};
var response = await axios.request(options);
console.log('qrcode monkey returned status' + response.status);
const outputFilePath = path.join(os.tmpdir(), `${fileName}.svg`);
fs.writeFileSync(outputFilePath, response.data);
console.log(`${fileName}.svg written to local disk. now will upload`);
try{
await bucket.upload(outputFilePath, {
destination: `qrcode/output/${fileName}.svg`
});
}catch(error){
console.log('error in uploding ' + error);
}
console.log('lets delete the file now and clean up local storage');
fs.unlinkSync(outputFilePath);
return 'all done';
}
}
I've been trying for several days but I can't do the diel json parser at this url http://sdmx.istat.it/SDMXWS/rest/dataflow/IT1//?detail=full&references=none&format=jsonstructure
I can write it to a file and if I try to upload the file to one of the many online tools it tells me that it is valid.
Also the url test in the same online tools gives me valid json.
this is my code what am I wrong?
var express = require('express');
var router = express.Router();
const fetch = require('node-fetch');
const JSONstat = require("jsonstat-toolkit");
const JSONstatUtils = require("jsonstat-suite");
const fs = require('fs');
router.get('/', async(req, res) => {
var ws_entry_point ='http://sdmx.istat.it/SDMXWS/rest';
var resource = 'dataflow';
var agencyID ='IT1';
var detail = 'full';
var references = 'none';
var format = 'jsonstructure';
var queryUrl = ws_entry_point + '/' + resource + '/' + agencyID + '//?detail=' + detail + '&references=' + references + '&format=' + format;
//http://sdmx.istat.it/SDMXWS/rest/dataflow/IT1//?detail=full&references=none&format=jsonstructure
console.log( queryUrl );
fetch(queryUrl, {
method: 'GET',
headers: {
'Content-Type': 'application/json, charset=UTF-8'
}}).then(checkResponseStatus)
.then(async response => {
try {
const data = await response.json()
console.log('response data?', data)
} catch(error) {
console.log('Error happened here!')
console.error(error)
}
}).catch(err => console.log(err));
// fs.writeFile('dataflow.json', res.data, function (err) {
// if (err) return console.log(err);
//
// console.log('write response.text > dataflow.json');
// fs.readFile('dataflow.json', 'utf8', function (err, dataF) {
// if (err) throw err; // we'll not consider error handling for now
// console.log('read < dataflow.json');
// //var obj = JSON.parse(dataF);
// console.log(dataF);
// });
// });
});
function checkResponseStatus(res) {
if(res.ok){
return res
} else {
throw new Error(`The HTTP status of the reponse: ${res.status} (${res.statusText})`);
}
}
any help is appreciated, regards,
Maurizio
The server does not respond with the requested application/json content-type, instead it sends a response with content-type application/vnd.sdmx.structure+json and a response content that contains a json-like string but with some incorrect whitespaces.
You can fix this by using .text() on the response and manually parsing the content after trimming it:
const rawData = await response.text();
const data = JSON.parse(rawData.trim());
console.log('response data?', data)
Hi I'm trying to update a Google Doc with NodeJS using the Google Drive API and I'm getting this error:
{
"error": {
"code": 500,
"message": null
}
}
Here's the relevant code :
var j = new google.auth.JWT(
creds.client_email,
null,
creds.private_key,
[
"https://www.googleapis.com/auth/drive"
]
);
async function refreshTokens() {
startedWaiting = true;
return j.authorize((r,t) => {
startedWaiting = false;
timeTillNeedRefresh = t["expiry_date"] - Date.now();
setTimeout(function() {
refreshTokens();
// console.log("doing Q", Q);
}, timeTillNeedRefresh);
tokens = t;
console.log("GOT A TOKEN", tokens);
Q.forEach(x=>x());
Q = [];
});
}
async function start() {
await refreshTokens();
}
start();
function myFetch(opts) {
if(!opts) opts = {};
var cb = opts.cb || empty;
var headers = {
'Accept-Encoding': 'gzip',
'User-Agent': 'google-api-nodejs-client/0.7.2 (gzip)',
Authorization:tokens.token_type +" "+ tokens.access_token,
Accept:"application/json"
};
if(opts.headers) {
for(k in opts.headers) {
headers[k] = opts.headers[k];
}
}
fetch(
opts.url
|| "",
{
method:opts.method || "GET",
headers: headers
})
.then(res => res.text())
.then(body => {
cb(body);
});
}
updateFile({
id: "1vebqOamZ9QB4HqfUaQN-U9Zt4y33eij-I21glMvaPVQ",
content: "hi there"
});
async function allFuncs(tmp) {
if(tokens === null) {
console.log("DOING it later")
Q.push(tmp);
await refreshTokens();
} else if(startedWaiting || timeTillNeedRefresh <= 0) {
console.log("NOT for a while");
Q.push(tmp);
} else {
console.log("THIS is what should happen");
start = Date.now();
tmp();
}
}
async function updateFile(opts) {
var id = opts.id,
content = opts.content || "";
if(id) {
allFuncs(() => {
myFetch({
url:`https://www.googleapis.com/upload/drive/v3/files/${id}?uploadType=media`,
method:"PATCH",
headers: {
body: opts.content,
"Content-Type": "application/vnd.google-apps.document"
},
cb(b) {
console.log(b, "DID I DO IT?!");
}
});
});
}
}
I tried looking up what this error means but I couldn't find anything related to nodejs...
Does anyone know if its a header issue or is there any way to fix it?
I don't know if its possible to do wih a service key, or if you HAVE to verify the user in order to do so??
SEEMINGLY if the service key email has edit permissions for the file, it should just be able to edit it at will.
You want to overwrite the existing Google Document by a text of hi there with Drive API using Service Account.
The Google Document is shared with the Service Account.
You can use googleapis.
I could understand like above. If my understanding is correct, how about this sample script? In this sample script, I used the files.update method of Drive API.
Sample script:
Before you run the script, please set the json file path downloaded when the Service Account is created. And please confirm the file ID of Google Document, again.
const stream = require('stream');
const {google} = require('googleapis');
const creds = require('###'); // Please set the json file path downloaded when the Service Account is created.
const jwtClient = new google.auth.JWT(
creds.client_email,
null,
creds.private_key,
['https://www.googleapis.com/auth/drive'],
null
);
const id = "1vebqOamZ9QB4HqfUaQN-U9Zt4y33eij-I21glMvaPVQ"; // Please set the file ID of Google Document
const content = "hi there"; // Please set the text.
const drive = google.drive({version: 'v3', auth: jwtClient});
const buf = Buffer.from(content, 'binary');
const buffer = Uint8Array.from(buf);
var bufferStream = new stream.PassThrough();
bufferStream.end(buffer);
const media = {
mimeType: 'application/vnd.google-apps.document',
body: bufferStream,
};
drive.files.update({
fileId: id,
media: media,
}, (err, res) => {
if (err) {
console.log(err);
return;
}
console.log(res.data);
});
Note:
When you run the script, the existing Google Document is overwritten. So please be careful this. I recommend to use a sample Document for testing.
If this script didn't work, please confirm the following points.
Drive API is enabled at API console.
The Service Account can be used.
The Google Document is shared with the Service Account.
The version of googleapis is the latest one.
Reference:
Files: update
Edit:
You don't want to use googleapis.
You want to overwrite Google Document with a text value without using googleapis.
From your comments, I understood like above. If my understanding is correct, how about this sample script? When you run this script, please confirm the following points.
About the script,
Please set privateKey and clientEmail from JSON file of Service Account.
Please set the file ID of the existing Google Document you want to overwrite.
Drive API is enabled at API console.
The Service Account can be used.
The Google Document is shared with the Service Account.
The version of googleapis is the latest one.
Sample script:
const cryptor = require('crypto');
const request = require('request');
// Get access token using Service Account
function getAccessToken(serviceAccount) {
const scopes = ["https://www.googleapis.com/auth/drive"];
const url = "https://www.googleapis.com/oauth2/v4/token";
const header = {
alg: "RS256",
typ: "JWT",
};
const now = Math.floor(Date.now() / 1000);
const claim = {
iss: serviceAccount.clientEmail,
scope: scopes.join(" "),
aud: url,
exp: (now + 3600).toString(),
iat: now.toString(),
};
const signature = Buffer.from(JSON.stringify(header)).toString('base64') + "." + Buffer.from(JSON.stringify(claim)).toString('base64');
var sign = cryptor.createSign('RSA-SHA256');
sign.update(signature);
const jwt = signature + "." + sign.sign(serviceAccount.privateKey, 'base64');
return new Promise(function(resolve, reject) {
request({
method: "post",
url: url,
body: JSON.stringify({
assertion: jwt,
grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer",
}),
}, (err, res, body) => {
if (err) {
console.log(err);
return;
}
const obj = JSON.parse(body);
resolve(obj.access_token);
});
});
}
// Overwrite file in Google Drive
function overWriting(object) {
const metadata = {
mimeType: "application/vnd.google-apps.document",
};
const url = "https://www.googleapis.com/upload/drive/v3/files/" + object.googleDocumentFileId + "?uploadType=multipart";
const boundary = "xxxxxxxxxxx";
var data = "--" + boundary + "\r\n";
data += "Content-Disposition: form-data; name=\"metadata\"\r\n";
data += "Content-Type: application/json; charset=UTF-8\r\n\r\n";
data += JSON.stringify(metadata) + "\r\n";
data += "--" + boundary + "\r\n";
data += "Content-Disposition: form-data; name=\"file\"; filename=\"sample.txt\"\r\n";
data += "Content-Type: text/plain" + "\r\n\r\n";
const payload = Buffer.concat([
Buffer.from(data, "utf8"),
new Buffer(object.textData, 'binary'),
Buffer.from("\r\n--" + boundary + "--", "utf8"),
]);
const options = {
method: 'patch',
url: url,
headers: {
"Content-Type": "multipart/related; boundary=" + boundary,
'Authorization': 'Bearer ' + object.accessToken,
},
body: payload,
};
request(options, (error, response, body) => {
console.log(body);
});
}
async function main() {
const serviceAccount = {
privateKey: "###", // private_key of JSON file retrieved by creating Service Account
clientEmail: "###", // client_email of JSON file retrieved by creating Service Account
};
var object = {
googleDocumentFileId: "###", // Set the file ID of the existing Google Document
textData: "hi there",
};
const accessToken = await getAccessToken(serviceAccount);
if (accessToken) {
object.accessToken = accessToken;
overWriting(object);
}
}
main();