Unable to upload large files using nodejs/axios - node.js

I am writing a nodejs client that would upload files (files can be both binary or text files) from my local dev machine to my server which is written in Java, configuring which is not an option. I am using the following code to upload files, it works fine for files upto 2 gb, but beyond that it throws an error mentioned below. Now you may think that the server might not be allowing files more than 2 gb but I have successfully uploaded files upto 10 gb using Rest clients like Postman and Insomnia on the same instance.
const fs = require("fs");
const path = require("path");
const axios = require("axios");
const FormData = require("form-data");
function uploadAxios({ filePath }) {
let formData;
try {
formData = new FormData();
formData.append("filedata", fs.createReadStream(filePath));
} catch (e) {
console.error(e)
}
axios
.post(
`https://myinstance.com`,
formData,
{
headers: {
...formData.getHeaders(),
"Content-Type": "multipart/form-data",
Authorization:
"Basic xyz==",
},
maxContentLength: Infinity,
maxBodyLength: Infinity,
// maxContentLength: 21474836480,
// maxBodyLength: 21474836480, // I have tried setting these values with both numbers and the keyword Infinity but nothing works
}
)
.then(console.log)
.catch(console.error);
}
const filePath = "C:\\Users\\phantom007\\Documents\\BigFiles\\3gb.txt";
uploadAxios({ filePath });
Error I get:
#
# Fatal error in , line 0
# API fatal error handler returned after process out of memory
#
<--- Last few GCs --->
es[7844:0000023DC49CE190] 47061 ms: Mark-sweep 33.8 (41.8) -> 33.8 (41.8) MB, 417.2 / 0.1 ms (+ 947.1 ms in 34029 steps since start of marking, biggest step 431.0 ms, walltime since start of marking 15184 ms) finalize incremental marking via stack guard[7844:0000023D
C49CE190] 48358 ms: Mark-sweep 34.4 (41.8) -> 31.8 (40.5) MB, 1048.4 / 0.0 ms (+ 0.0 ms in 1 steps since start of marking, biggest step 0.0 ms, walltime since start of marking 1049 ms) finalize incremental marking via task GC in old spac
<--- JS stacktrace --->
==== JS stack trace =========================================
Security context: 000002E294C255E9 <JSObject>
0: builtin exit frame: new ArrayBuffer(aka ArrayBuffer)(this=0000022FFFF822D1 <undefined>,65536)
1: _read [fs.js:~2078] [pc=0000004AD942D301](this=0000039E67337641 <ReadStream map = 000002F26D804989>,n=65536)
2: read [_stream_readable.js:454] [bytecode=000002A16EB59689 offset=357](this=0000039E67337641 <ReadStream map = 000002F26D804989>,n=0)
3: push [_stream_readable.js:~201]...
FATAL ERROR: Committing semi space failed. Allocation failed - process out of memory
It looks like the error is because it has exceed the memory limit, i know by passing the flag --max-old-space-size i can overcome this, but i want this to be scalable and not hardcode an upper limit.
PS: My dev machine has 12 GB free memory
Edit: I added the error trace.

I'm using multer to define limit, see next code:
app.use(multer({
storage: storage,
dest: path.join(pathApp),
limits: {
fileSize: 5000000
},
fileFilter: function fileFilter(req, file, cb) {
var filetypes = /json/;
var mimetype = filetypes.test(file.mimetype);
var extname = filetypes.test(path.extname(file.originalname));
if (mimetype && extname) {
console.log("Port ".concat(app.get('port')) + " - Uploading file " + file.originalname);
return cb(null, true, req);
}
cb(JSON.stringify({
"success": false,
"payload": {
"app": "upload",
"function": "upload"
},
"error": {
"code": 415,
"message": 'File type not valid'
}
}));
}
}).single('file1'));

Related

node js - Download pdf via HTTP post and upload to azure blob storage

I've an online API that returns PDF files via HTTP post requests.
Once I get back the response with the file I would like to upload the file in an azure blob storage.
I've tried everything on here and I'm unable to get it work.
Where I'm now:
async function getPDFfile(idSession) {
let connectionJson = {
"DeviceId": "device-id",
"SessionId": idSession,
"ContainerId": contaierID
}
axios({
url: 'https://URL/exportPDF/',
method: 'POST',
Headers: connectionJson,
responseType: "arraybuffer",
responseEncoding: "binary"
}).then((response) => {
console.log(response.data)
});
If I print this I will get a <Buffer but as I cannot really see what's Inside I cannot use it.
If I use axios without params:
const response = await axios
.post('https://URL/exportPDF/', connectionJson)
return response.data
Here I get loads of unicode characers with some information, I've tried to upload this but the PDF file is only few bytes and obviously does not work.
I've tried to get the response as blob but same, was not working.
Could you please help me with figure this out?
this should run on azure function. Thank you for your time.
---- Edit
While the first method doesn't return anything in the data, the second without parameters returns a very long string with:
%PDF-1.4
%´┐¢´┐¢´┐¢´┐¢
1 0 obj
<<
/CreationDate(D:20220423222622+01'00')
/Creator(empira MigraDoc 1.50.5147 \(www.migradoc.com\))
/Title(TrustID)
/Producer(PDFsharp 1.50.5147-gdi \(www.pdfsharp.com\))
>>
endobj
2 0 obj
<<
/Type/Catalog
/Pages 3 0 R
>>
endobj
3 0 obj
<<
/Type/Pages
/Count 3
/Kids[4 0 R 23 0 R 28 0 R]
>>
endobj
4 0 obj
<<
/Type/Page
/MediaBox[0 0 841.89 595.276]
/Parent 3 0 R
/Contents 5 0 R
/Resources
<<
/ProcSet [/PDF/Text/ImageB/ImageC/ImageI]
/ExtGState
<<
/GS0 6 0 R
/GS1 10 0 R
>>
/XObject
<<
/I0 9 0 R
>>
/Font
<<
/F0 14 0 R
/F1 18 0 R
/F2 22 0 R
>>
>>
/Group
<<
/CS/DeviceRGB
/S/Transparency
>>
>>
endobj
5 0 obj
<<
/Length 2094
/Filter/FlateDecode
>>
stream
x´┐¢´┐¢Y╦Ä´┐¢´┐¢´┐¢W´┐¢L:##´┐¢B´┐¢´┐¢´┐¢´┐¢Cw$´┐¢´┐¢~´┐¢´┐¢´┐¢╚¬´┐¢v´┐¢´┐¢´┐¢´┐¢3´┐¢*´┐¢8q"´┐¢´┐¢´┐¢´┐¢Io´┐¢´┐¢´┐¢´┐¢O´┐¢´┐¢´┐¢No?´┐¢qz´┐¢E´┐¢4'[^´┐¢´┐¢(Ì£W>´┐¢´┐¢9&´┐¢´┐¢´┐¢w█½´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢█ôVA´┐¢´┐¢O´┐¢´┐¢
>´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢~´┐¢S´┐¢´┐¢ ´┐¢´┐¢´┐¢█À_´┐¢K(_bt┬╣´┐¢´┐¢´┐¢!´┐¢´┐¢;9V&´┐¢G´┐¢´┐¢´┐¢;=´┐¢´┐¢´┐¢G´┐¢═┐]´┐¢E┌│Ì░´┐¢´┐¢[´┐¢´┐¢´┐¢fs´┐¢(p´┐¢╦Å´┐¢Z´┐¢´┐¢´┐¢>´┐¢´┐¢¤ô´┐¢.|´┐¢´┐¢´┐¢r´┐¢NQ ´┐¢Èû´┐¢´┐¢´┐¢)´┐¢d5´┐¢?´┐¢W´┐¢´┐¢´┐¢´┐¢´┐¢.:´┐¢$´┐¢´┐¢X´┐¢si´┐¢´┐¢´┐¢´┐¢´┐¢C´┐¢´┐¢´┐¢´┐¢Z5´┐¢j´┐¢U´┐¢Lr´┐¢qw3╚½3m)´┐¢>´┐¢´┐¢´┐¢Ðí´┐¢´┐¢´┐¢Yy´┐¢,/B´┐¢´┐¢´┐¢u´┐¢qW´┐¢ÐÅ´┐¢Ôö¿´┐¢E;´┐¢´┐¢ k´┐¢m=´┐¢´┐¢"´┐¢k´┐¢´┐¢´┐¢}´┐¢´┐¢m´┐¢´┐¢´┐¢\´┐¢*´┐¢y{´┐¢V=´┐¢%´┐¢8´┐¢´┐¢´┐¢k !D´┐¢´┐¢KGM´┐¢´┐¢´┐¢i´┐¢&´┐¢´┐¢
´┐¢´┐¢´┐¢´┐¢J*Y´¢ºX"6´┐¢|[j´┐¢´┐¢´┐¢´┐¢*´┐¢z)´┐¢´┐¢´┐¢>n&,w6´┐¢e´┐¢´┐¢5`Av´┐¢´┐¢´┐¢\ZD´┐¢e´┐¢I´┐¢´┐¢´┐¢´┐¢#´┐¢R ´┐¢a´┐¢´┐¢h2W´┐¢´┐¢´┐¢´┐¢´┐¢a´┐¢´┐¢´┐¢diT}T 3´┐¢,´┐¢il´┐¢´┐¢L´┐¢t}´┐¢E´┐¢´┐¢´┐¢v´┐¢´┐¢~f´┐¢R╚ôF0´┐¢´┐¢╔û
Su´┐¢´┐¢´┐¢36´┐¢´┐¢x´┐¢b´┐¢´┐¢´┐¢,´┐¢j´┐¢´┐¢v2R┘ÉÍô}W%´┐¢`F)´┐¢´┐¢c%´┐¢b´┐¢´┐¢´┐¢´┐¢)´┐¢6´┐¢/my$"5´┐¢\▄ƒ´┐¢T<´┐¢EN´┐¢´┐¢´┐¢´┐¢gXo7´┐¢´┐¢f´┐¢´┐¢
ng´┐¢╬ɦñnD|b}´┐¢¦░P´┐¢´┐¢´┐¢$($´┐¢Èæ´┐¢´┐¢r1
´┐¢═Æ´┐¢`´┐¢´┐¢c´┐¢´┐¢h´┐¢ ](9´┐¢´┐¢´┐¢´┐¢´┐¢AßÜé´┐¢J>:´┐¢´┐¢u$`´┐¢E´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢P´┐¢^0´┐¢´┐¢8h´┐¢´┐¢´┐¢8 g´┐¢´┐¢%zD´┐¢´┐¢´┐¢´┐¢7yRib´┐¢S´┐¢]´┐¢´┐¢´┐¢´┐¢´┐¢k´┐¢´┐¢A#´┐¢´┐¢´┐¢´┐¢-µ▒╣´┐¢´┐¢k´┐¢B´┐¢(:.´┐¢´┐¢)´┐¢po<´┐¢´┐¢Q´┐¢´┐¢0 ´┐¢´┐¢=*#Z´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢<Bs╩ö´┐¢_r ´┐¢g´┐¢G´┐¢db´┐¢6GT´┐¢´┐¢´┐¢´┐¢,´┐¢┘ÿ%´┐¢zy´┐¢´┐¢UD´┐¢e´┐¢1l´┐¢´┐¢q╬è´┐¢´┐¢9p´┐¢j´┐¢´┐¢´┐¢´┐¢]D´┐¢a)´┐¢´┐¢j´┐¢´┐¢9,´┐¢_9´┐¢%´┐¢c&´┐¢´┐¢´┐¢"z´┐¢´┐¢S=$´┐¢´┐¢´┐¢\▄ñ35´┐¢´┐¢´┐¢i´┐¢9Q´┐¢▀¢´┐¢´┐¢~´┐¢´┐¢_´┐¢´┐¢.´┐¢þ║║´┐¢´┐¢´┐¢^¤¡y´┐¢´┐¢iC´┐¢´┐¢´┐¢´┐¢´┐¢O´┐¢ÌÀn´┐¢m* ´┐¢´┐¢´┐¢u´┐¢kk´┐¢#´┐¢R´┐¢´┐¢tÈï W´┐¢´┐¢G(Í«h´┐¢´┐¢´┐¢D´┐¢´┐¢k>khX´┐¢%#´┐¢J]p´┐¢#´┐¢1´┐¢´┐¢ÈÑ´┐¢O´┐¢f´┐¢´┐¢´┐¢´┐¢gl´┐¢´┐¢´┐¢T~LF´┐¢´┐¢yG´┐¢=´┐¢-´┐¢´┐¢´┐¢╔×3A─¬L´┐¢´┐¢Zx´┐¢Jf´┐¢v´┐¢´┐¢´┐¢´┐¢:´┐¢´┐¢FH´┐¢nW1{lX´┐¢´┐¢ZYlF´┐¢´┐¢tPm:´┐¢*y´┐¢´┐¢.a´┐¢═×=´┐¢.´┐¢´┐¢´┐¢ ÃÄ{q´┐¢v´┐¢Y´┐¢0LE´┐¢´┐¢
yÎ▓´┐¢´┐¢´┐¢´┐¢1Tο´┐¢´┐¢k´┐¢RP´┐¢´┐¢w´┐¢;m´┐¢Da´┐¢A2´┐¢N´┐¢Xq´┐¢M´┐¢´┐¢´┐¢¤╝$´┐¢K´┐¢S´┐¢Ì╣9'´┐¢,U´┐¢├×´┐¢´┐¢"G´┐¢hWZ´┐¢´┐¢v─¼h´┐¢´┐¢EDr´┐¢`´┐¢Ae´┐¢ãÄ5$´┐¢´┐¢´┐¢Y´┐¢i´┐¢´┐¢e5´┐¢´┐¢(´┐¢8_
´┐¢2o´┐¢´┐¢´┐¢Pc´┐¢´┐¢) ´┐¢#´┐¢s´┐¢,2´┐¢">´┐¢´┐¢=)´┐¢´┐¢iM4j´┐¢´┐¢´┐¢O´┐¢´┐¢1
´┐¢´┐¢4´┐¢?´┐¢´┐¢R?{´┐¢´┐¢´┐¢´┐¢ÐÖCe´┐¢´┐¢µñ┐´┐¢_´┐¢´┐¢"}´┐¢´┐¢Ln´┐¢#TZK´┐¢´┐¢4´┐¢$´┐¢´┐¢"M´┐¢´┐¢´┐¢´┐¢JD´┐¢r$´┐¢´┐¢1G´┐¢´┐¢´┐¢´┐¢|´┐¢H2Îû´┐¢´┐¢´┐¢´┐¢´┐¢5G´┐¢´┐¢´┐¢#´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢O{<D´┐¢´┐¢#´┐¢;K´┐¢M´┐¢´┐¢╩©´┐¢a8SD6s´┐¢´┐¢´┐¢´┐¢ ´┐¢G´┐¢´┐¢f ´┐¢´┐¢´┐¢´┐¢whw!"#´┐¢´┐¢´┐¢-´┐¢3A´┐¢´┐¢q´┐¢´┐¢´┐¢/´┐¢`´┐¢]g´┐¢qi´┐¢´┐¢5O}N´┐¢(´┐¢´┐¢0 ´┐¢1´┐¢´┐¢´┐¢´┐¢h´┐¢´┐¢
Q´┐¢6├Ç´┐¢kU´┐¢´┐¢´┐¢!´┐¢g/(´┐¢´┐¢uq4´┐¢n´┐¢n´┐¢~´┐¢╔©UR'4´┐¢´┐¢;P´┐¢aK´┐¢5ð¢´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢ã¼R´┐¢´┐¢{´┐¢#7{´┐¢´┐¢t´┐¢¤╗J*´┐¢G´┐¢´┐¢´┐¢´┐¢´┐¢,:´┐¢´┐¢zB|´┐¢x´┐¢~-´┐¢´┐¢u´┐¢eH´┐¢vcwH"´┐¢c´┐¢´┐¢´┐¢ xC▄º´┐¢)O´┐¢´┐¢ny´┐¢yZEx´┐¢#e´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢}ÌÇ´┐¢´┐¢ZF0´┐¢fbT´┐¢´┐¢´┐¢´┐¢´┐¢.´┐¢Z(´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢╬í<´┐¢O|W´┐¢t!´┐¢´┐¢X=´┐¢U´┐¢'4$´┐¢´┐¢U6´┐¢´┐¢ ´┐¢w7f´┐¢$t´┐¢R´┐¢´┐¢­ƒëëI9´┐¢%!´┐¢═ë`G´┐¢´┐¢´┐¢´┐¢´┐¢y
´┐¢b´┐¢´┐¢´┐¢´┐¢´┐¢<L'A!´┐¢w%h=´┐¢´┐¢´┐¢Bom´┐¢´┐¢q´┐¢´┐¢~uP7Vg(c8Gt>´┐¢}´┐¢´┐¢xg ´┐¢t´┐¢´┐¢5ð¡z´┐¢(#´┐¢;!´┐¢▄ò´┐¢j´┐¢═ºk2c&´┐¢k´┐¢z´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢┬é´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢)´┐¢=´┐¢V´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢W\48´┐¢2´┐¢nG´┐¢´┐¢m´┐¢´┐¢ãü´┐¢Xs´┐¢´┐¢E*#´┐¢´┐¢#´┐¢Aj´┐¢´┐¢qJ╬┤c|w´┐¢´┐¢D´┐¢´┐¢´┐¢"7SËÿrL´┐¢u´┐¢1´┐¢´┐¢1´┐¢,]5´┐¢´┐¢´┐¢´┐¢ ´┐¢´┐¢´┐¢´┐¢´┐¢F´┐¢:´┐¢´┐¢\´┐¢´┐¢´┐¢$´┐¢m:q´┐¢6´┐¢´┐¢.}´┐¢ >0´┐¢´┐¢^╦À´┐¢HO´┐¢-´┐¢amO´┐¢´┐¢´┐¢E1´┐¢X╔äO´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢/´┐¢<´┐¢´┐¢V}'´┐¢ ´┐¢^´┐¢´┐¢Qqz#5´┐¢´┐¢´┐¢d/y´┐¢´┐¢´┐¢Ãë3Zi´┐¢´┐¢ G=i[/9
´┐¢G_´┐¢C´┐¢´┐¢|´┐¢3´┐¢T´┐¢´┐¢´┐¢´┐¢/´┐¢´┐¢´┐¢ZV´┐¢´┐¢A´┐¢´┐¢´┐¢´┐¢´┐¢´┐¢b´┐¢BaÌ×´┐¢´┐¢´┐¢´┐¢<
if I try to save the response.data in a file on my disk it creates a 3 pages pdf with the correct some data showing on the PDF tab reader but the pages are blank
const response = await axios
.post('https://APIURL/exportPDF/', connectionJson)
//console.log(((response.data).toString()))
fs.writeFile('c:/temp/my.pdf', response.data, (err) => {
// if (err) throw err;
console.log('The file has been saved!');
});
};
as I've been reading that the file might not be completely downloaded I've tried:
const finishedDownload = util.promisify(stream.finished);
const writer = fs.createWriteStream('c:/temp/myfile.pdf');
let connectionJson = {
"Username": "",
"Password": "",
}
const response = await axios({
method: 'post',
url: 'https://APIURL/exportPDF/',
responseType: 'stream',
headers: connectionJson
});
response.data.pipe(writer);
await finishedDownload(writer);
but writes a file 0bytes.
Fixed it including responseEncoding:binary and option while writing the file
'binary'
Please read the comments in the code!
async function axaios(idSession) {
let connectionJson = {
"DeviceId": "device-id",
"SessionId": idSession,
"ContainerId": contaierID,
}
//important! this option is needed
let conf = {"responseType": "arraybuffer",
"responseEncoding": "binary"}
const response = await axios
.post('https://APIULR/exportPDF/', connectionJson, conf)
//console.log(response.data)
//'binary' needed as option!
await fs.writeFile('c:/temp/my.pdf', response.data, 'binary' , (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
}
```
Thank you all for your suggestions!
Thanks a lot KJ

http-proxy (reverse proxy) server hangs up in a random time

Problem: When I send a request after this log (memory), it shows "socket hang up (connResetException)" error. And the NodeJS server stops working.
[1:0x330e8a0] 22724 ms: Mark-sweep 16.4 (25.2) -> 10.6 (29.9) MB,
3.3 / 0.0 ms (+ 0.1 ms in 2 steps since start of marking, biggest step 0.1
ms, walltime since start of marking 7 ms) (average mu
= 0.999, current mu = 1.000) finalize incremental marking
via task GC in old space requested
Architecture: I use http-proxy as a reverse proxy. It enables the SSO(Single-Sign-On) and sends its traffics to the application.
Error Log in Proxy Server(Only happens in the production - more traffics)
/node_modules/http-proxy/lib/http-proxy/index.js:120
throw err;
^
Error: socket hang up
at connResetException (internal/errors.js:561:14)
at Socket.socketCloseListener (_http_client.js:380:25)
at Socket.emit (events.js:214:15)
at TCP.<anonymous> (net.js:658:12) {
code: 'ECONNRESET'
}
Since socket hangs up happens with MANY cases, I researched and experimented with various cases. And I'm thinking the memory leak issue from my node can be the issue.
node --trace_gc src/index.js commands print 2~3 allocation failures per request.
Weird logs
[1:0x449d780] 20364 ms: Scavenge 11.9 (13.2) -> 11.3 (13.2) MB, 3.2 / 0.0 ms \
(average mu = 0.961, current mu = 0.961) allocation failure
Source Code
var apiProxy = httpProxy.createProxyServer();
app.use(cookieParser());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json())
app.use(passport.initialize({}));
app.use(passport.session({}));
app.use(session({
secret: 'secret',
resave: false,
saveUninitialized: true,}
));
app.get('/source*',
function(req, res, next) {
req.query.RelayState = req.url;
if(req.user) {
apiProxy.web(req, res, {target: xx});
Problem: [http-proxy expressjs] hangs up in a random time. It usually works for two days and the server goes 502 Error. I need to manually restart the container to restore it.
/node_modules/http-proxy/lib/http-proxy/index.js:120
throw err;
^
Error: socket hang up
at connResetException (internal/errors.js:561:14)
at Socket.socketCloseListener (_http_client.js:380:25)
at Socket.emit (events.js:214:15)
at TCP.<anonymous> (net.js:658:12) {
code: 'ECONNRESET'
}
Any idea is welcome... please help 🙏

AWS Lambda - Not able to a access JS file bundled in Layer (Node.js)

I have created a lambda function on node.js and attached a layer with it. But when I execute my lambda function, it is not reading JS bundled in layer. Please let me know if I am making any mistake here.
Followed this post and created a layer 'my-utility' and uploaded nodejs.zip.
AWS Console > Lambda > Layers > Create Layer
Layer Structure
my-utility
- nodejs
- node_modules
- myutil.js
- package.json
- package-lock.json
- nodejs.zip
myutil.js
function myFun(name) {
console.log("Hello.. " + name);
}
Lambda Code (Node.js 10.x)
const myutil = require('/opt/nodejs/myutil.js');
exports.handler = async (event) => {
const response = {
statusCode: 200,
body: JSON.stringify('Hello from Lambda!'),
};
console.log('Layer Code :', myutil);
return response;
};
After testing above lambda, it gives the below result:
Response:
{
"statusCode": 200,
"body": "\"Hello from Lambda!\""
}
Request ID:
"5dbfd4ab-04d2-47eb-89dd-c9c6a650cbb0"
Function Logs:
START RequestId: 5dbfd4ab-04d2-47eb-89dd-c9c6a650cbb0 Version: $LATEST
2019-06-18T18:35:35.125Z 5dbfd4ab-04d2-47eb-89dd-c9c6a650cbb0 INFO Layer Code : {}
END RequestId: 5dbfd4ab-04d2-47eb-89dd-c9c6a650cbb0
REPORT RequestId: 5dbfd4ab-04d2-47eb-89dd-c9c6a650cbb0 Duration: 133.56 ms Billed Duration: 200 ms Memory Size: 128 MB Max Memory Used: 26 MB
If you notice, when I am trying to print 'myutil' constant, it is printed as empty. That means, layer code is not injected during lambda execution.
INFO Layer Code : {}
I reached to AWS support team and get this issue resolved. I have to code function like below in myutil.js. This link also helped.
module.exports = {
myFun:function (name) {
console.log("Hello.. " + name);
return 'narendra';
}
};
I could see lambda called js function from layer and printed return value properly.
Lambda Function Logs
START RequestId: 39bfa864-9a31-4c0c-b9d3-ce7c2b3d1aaf Version: $LATEST
2019-06-18T21:28:06.505Z 39bfa864-9a31-4c0c-b9d3-ce7c2b3d1aaf INFO Hello.. narendra
2019-06-18T21:28:06.505Z 39bfa864-9a31-4c0c-b9d3-ce7c2b3d1aaf INFO Layer Code : narendra
END RequestId: 39bfa864-9a31-4c0c-b9d3-ce7c2b3d1aaf
REPORT RequestId: 39bfa864-9a31-4c0c-b9d3-ce7c2b3d1aaf Duration: 85.38 ms Billed Duration: 100 ms Memory Size: 128 MB Max Memory Used: 26 MB
If you are trying to access static files inside a Lambda layer using serverless, make sure they are getting packaged by downloading the .zip of the layer from the AWS Layers section, and if they are there you can output the contents of the /opt folder to your CloudWatch log to make sure your files are there.
console.log('/opt/');
fs.readdirSync('/opt/').forEach(file => {
console.log(file);
});

How to load very large csv files in nodejs?

I'm trying to load 2 big csv into nodejs, first one has a size of 257 597 ko and second one 104 330 ko. I'm using the filesystem (fs) and csv modules, here's my code :
fs.readFile('path/to/my/file.csv', (err, data) => {
if (err) console.err(err)
else {
csv.parse(data, (err, dataParsed) => {
if (err) console.err(err)
else {
myData = dataParsed
console.log('csv loaded')
}
})
}
})
And after ages (1-2 hours) it just crashes with this error message :
<--- Last few GCs --->
[1472:0000000000466170] 4366473 ms: Mark-sweep 3935.2 (4007.3) -> 3935.2 (4007.
3) MB, 5584.4 / 0.0 ms last resort GC in old space requested
[1472:0000000000466170] 4371668 ms: Mark-sweep 3935.2 (4007.3) -> 3935.2 (4007.
3) MB, 5194.3 / 0.0 ms last resort GC in old space requested
<--- JS stacktrace --->
==== JS stack trace =========================================
Security context: 000002BDF12254D9 <JSObject>
1: stringSlice(aka stringSlice) [buffer.js:590] [bytecode=000000810336DC91 o
ffset=94](this=000003512FC822D1 <undefined>,buf=0000007C81D768B9 <Uint8Array map
= 00000352A16C4D01>,encoding=000002BDF1235F21 <String[4]: utf8>,start=0,end=263
778854)
2: toString [buffer.js:664] [bytecode=000000810336D8D9 offset=148](this=0000
007C81D768B9 <Uint8Array map = 00000352A16C4D01>,encoding=000002BDF1...
FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed - JavaScript heap out of memo
ry
1: node::DecodeWrite
2: node_module_register
3: v8::internal::FatalProcessOutOfMemory
4: v8::internal::FatalProcessOutOfMemory
5: v8::internal::Factory::NewRawTwoByteString
6: v8::internal::Factory::NewStringFromUtf8
7: v8::String::NewFromUtf8
8: std::vector<v8::CpuProfileDeoptFrame,std::allocator<v8::CpuProfileDeoptFrame
> >::vector<v8::CpuProfileDeoptFrame,std::allocator<v8::CpuProfileDeoptFrame> >
9: v8::internal::wasm::SignatureMap::Find
10: v8::internal::Builtins::CallableFor
11: v8::internal::Builtins::CallableFor
12: v8::internal::Builtins::CallableFor
13: 00000081634043C1
The biggest file is loaded but node runs out of memory for the other. It's probably easy to allocate more memory, but the main issue here is the loading time, it seems very long despite the size of files. So what is the correct way to do it? Python loads these csv really fast with pandas btw (3-5 seconds).
Stream works perfectly, it took only 3-5 seconds :
var csv = require('csv-parser')
var data = []
fs.createReadStream('path/to/my/data.csv')
.pipe(csv())
.on('data', function (row) {
data.push(row)
})
.on('end', function () {
console.log('Data loaded')
})
fs.readFile will load the entire file into memory, but fs.createReadStream will read the file in chunks of the size you specify.
This will prevent it from running out of memory
You may want to stream the CSV, instead of reading it all at once:
csv-parse has streaming support: http://csv.adaltas.com/parse/
or, you may want to take a look at csv-stream: https://www.npmjs.com/package/csv-stream

node js upload size unit is strange

I am working on upload module of my server and I set file uploads with multiparty. I am currently trying to limit the upload size simply i a doing something like this
req.on("data", function(dt) {
bytes += dt.length;
if (bytes > 2048) {
req.connection.destroy();
console.log("connection destroyed due to huge file size");
}
console.log(bytes);
});
I thought this length is in bytes and tried to limit it with 2mb
but i noticed this unit is a bit strange for testing i uploaded a 148 kb file but the length of the variable i created so far is 421 it is neither in bits nor bytes why it is so strange number? where do this extra ~300k come from?
Did you try filesystem module for checking size of the file?
E.g.
var fs = require("fs");
var stats = fs.statSync("myfile.txt");
var fileSizeInBytes = stats.size;

Resources