Nest js upload is not saving file - nestjs

I'm following the nestjs documentation for File upload, my endpoint is getting the file, but the file is not stored.
I'm using the same configuration than NesJS
#Post('upload')
#UseInterceptors(FileInterceptor('file'))
uploadFile(#UploadedFile() file) {
console.log(file);
}
My app.module file, I added the import for:
MulterModule.register({
dest: './uploads'
})
But the file is not stored in the directory uploads. The complete log is:
undefined
{
fieldname: 'file',
originalname: 'nopornimage.png',
encoding: '7bit',
mimetype: 'image/png',
buffer: <Buffer 89 50 4e 47 0d 0a 1a 04 d00 01 73 52 47 42 00 ae ce 04 ... 20087 more bytes>,
size: 20137
}
(Yes, including the undefined)
What am I doing wrong?

you should specify the destination. multer options
#Post('upload')
#UseInterceptors(FileInterceptor('file', {
dest: 'uploads/'
}))
uploadFile(#UploadedFile() file) {
console.log(file);
}
or you can use createWriteStream in fs module to save file by yourself.
import {createWriteStream} from 'fs'
#Post('upload')
#UseInterceptors(FileInterceptor('file'))
uploadFile(#UploadedFile() file) {
const ws = createWriteStream('custom_filename')
ws.write(file.buffer)
console.log(file);
}

you should register multer module in your domain module not in app
module(unless app module is the only module in your project ) for
example if your controller belongs to task domain you should
register multer in task module not in app module
imports: [
TypeOrmModule.forFeature([TaskRepository]),
MulterModule.register({ dest: './upload' }),
],
controllers: [TaskController],
providers: [TaskService],
})
export class TaskModule {}
create upload directory and determine it's path in multer
registration

#Post('upload')
#UseInterceptors(FileInterceptor('file', {dest: 'uploads/'}))
uploadFile(#UploadedFile() file: Express.Multer.File) {
console.log(file);
}
You can use 'dest' keyword with decortor

Related

Uploading from AWS Lambda (Nodejs Express with multer) to S3 Bucket, returning white small box

I'm trying to upload from my lambda (nodejs express) to s3 bucket. But whenever I upload, and look for my uploaded file in S3, it only shows a white small box. 1
I already try converting my file buffer to base64, but it still not working.
My uploaded file only show if I upload it using my local api(localhost).
Here's my code:
// multer middleware
const multer = require("multer");
const helpers = require("../helpers/image-upload-helper");
const storage =multer.memoryStorage();
let upload = multer({
storage: storage,
fileFilter: helpers.imageFilter,
}).any();
//controller
try {
if(req.files){
for (const file of req.files) {
const ImageName = randomImageNameGenerator.randomImageName()
const params = {
Bucket: process.env.BUCKET_NAME,
Key: ImageName,
Body: file.buffer,
ContentType : file.mimetype,
}
const command = new PutObjectCommand(params)
const myData = await s3.send(command)
}
}
//log of my command
PutObjectCommand {
middlewareStack: {
add: [Function: add],
addRelativeTo: [Function: addRelativeTo],
clone: [Function: clone],
use: [Function: use],
remove: [Function: remove],
removeByTag: [Function: removeByTag],
concat: [Function: concat],
applyToStack: [Function: cloneTo],
identify: [Function: identify],
resolve: [Function: resolve]
},
input: {
Bucket: 'orex-product-images',
Key: 'b465138efab90aba02e5376ef247f536cfb1e7e32e34877bf21ab1bd655b3749',
Body: <Buffer 89 50 4e 47 0d 0a 1a 0a 00 00 00 0d 49 48 44 52 00 00 01 f8 00 00 00 f5 08 06 00 00 00 bc c1 e7 15 00 00 00 01 73 52 47 42 00 ae ce 1c e9 00 00 20 00 ... 10640 more bytes>,
ContentType: 'image/png'
}
}
// log of myData
{
'$metadata': {
httpStatusCode: 200,
requestId: '6C1EM009PP420NRK',
extendedRequestId: 'ZfGR4AR4mElYOSGes68YqEegptyO5PY5iPCvplP89wr1nqT4DZHwo0D0bl5qyZ7aAB0HaDaTAKU=',
cfId: undefined,
attempts: 1,
totalRetryDelay: 0
},
ETag: '"96425366df243451e35a941524b2a019a6ad2b"',
ServerSideEncryption: 'ABDS256',
VersionId: 'rpgj.L5AwGNCcKVzatIY5zHf_SYVNWt0'
}
Note: I didn't see any error in my cloud watch
1 Example of what the white box looks like
For those that arrived here as I did with the same issue, this answer solved it for me:
Using Lambda to get image from S3 returns a white box in Python
And for those using serverless, this is also relevant:
Serverless I image upload to S3 broken after deploy, local worked only

Javascript on client side: how to upload a zip file to AWS S3 bucket through pre-signed URL?

My web allows user to drag and drop a zip file and upload it to AWS S3 bucket. The steps are:
User drag-and-drops a zip file to the drop-zone of the UI;
User clicks send;
A request is made to AWS Lambda function and the function will generate a pre-signed URL that allows the user to upload any file.
An axios PUT request is made to the pre-signed S3 URL to upload the file.
I used local node.js code to test the pre-signed S3 URL:
const fileToUpload = fs.readFileSync(test_file_path);
console.log("fileToUpload: type: ", typeof fileToUpload, ", content: ", fileToUpload);
try {
const uploadResponse = await axios({
method: 'PUT',
url: presignedUrl,
data: fileToUpload,
headers: {
'Content-Type': '',
},
maxContentLength: Infinity,
maxBodyLength: Infinity
});
return uploadResponse.data;
} catch (error) {
console.error('Error while uploading object to S3:', error.message);
}
And it works well, which proves that the generated pre-signed URL is valid.
However, on client side Reactjs:
console.log(`formState.file: type: ${formState.file}, content: ${formState.file}`);
const uploadResponse = await axios({
method: 'PUT',
url: presignedS3Url,
data: formState.file,
headers: {
'Content-Type': ''
},
maxContentLength: Infinity,
maxBodyLength: Infinity
});
It fails and the request ends in a 403 forbidden error.
The difference is that in nodejs code, the fileToUpload is:
type: object, content: <Buffer 50 4b 03 04 14 00 08 00 08 00 78 84 cb 50 00 00 00 00 00 00 00 00 24 ae 12 01 3e 00 20 00 31 2e 32 2e 38 34 30 2e 31 31 33 35 36 34 2e 31 30 2e 31 2e ... 10573784 more bytes>
Whereas in client side, the formState.file is initialized by react-dropzone lib and has the type: formState.file: type: [object File] and its content is:
path: "1.2.840.113564.10.1.312260962047571814316520322884140128208155.zip"
lastModified: 1625164188712
lastModifiedDate: Fri Jul 02 2021 03:29:48 GMT+0900 (Japan Standard Time) {}
name: "1.2.840.113564.10.1.312260962047571814316520322884140128208155.zip"
size: 10573834
type: "application/zip"
webkitRelativePath: ""
[[Prototype]]: File
length: 1
I am not entirely sure that this is the cause. A few thoughts:
fs.readFileSync() is nodejs only, and it is not available in client side Reactjs.
On client side, should I get the zip file in the form of <Buffer ....> and how should I do it?
Or is it ok with the current [object File] type on client side? maybe there is another way to upload it to S3 bucket?

i18next - Loading translations from a JSON file

I'm trying to load translations from a JSON file using i18next library on Node. The path of the JSON file points to the correct location.
I'm getting the following error:
i18next::translator: missingKey en translation test test
import i18next from 'i18next';
import Backend from 'i18next-fs-backend';
const instance = i18next.use(Backend).createInstance({
lng: config.language,
debug: true,
fallbackLng: 'en',
initImmediate: false,
backend: {
loadPath: join(__dirname, `${config.language}.json`),
},
}, (error, t) => {
console.log(t('foo'));
});
JSON file:
{
"foo": "bar"
}
Specifying the translations directly in createInstance using resources property works perfectly.
I tried everything I could think of, with no success.
Found the solution!
import i18next from 'i18next';
import Backend from 'i18next-fs-backend';
const instance = i18next.use(Backend).createInstance();
instance.init({
lng: config.language,
debug: true,
fallbackLng: 'en',
initImmediate: false,
backend: {
loadPath: join(__dirname, `${config.language}.json`),
},
}, (error, t) => {
console.log(t('foo'));
});

ECONNRESET Error while uploading more than 10 files using multer in node js

I need to add 100 files at a time using multer multiple file upload. For now 10 file's can be upload easly , but if I try to add more than 10 files it will throw an error like below,
Error: read ECONNRESET
at TLSWrap.onStreamRead (internal/stream_base_commons.js:205:27) {
message: 'read ECONNRESET',
errno: 'ECONNRESET',
code: 'NetworkingError',
syscall: 'read',
region: 'xx-xx-1',
hostname: 'xxxxxxx.s3.amazonaws.com',
retryable: true,
time: 2020-08-05T05:52:39.950Z,
statusCode: 400,
storageErrors: []
}
Can you guys pla help , also I need to know the max count of files that multer accepts to upload, is there any way to set timeout for upload function only.
router.post('/files/fileuploads', upload.fields([{
name: 'multi-files', maxCount: 100
}]), methods.multipleUpload)
const upload = multer({
storage: multerS3({
s3: s3,
bucket: 'xxxxxx',
metadata: function (req, file, cb) {
cb(null, {fieldName: file.originalname});
},
key: function (req, file, cb) {
cb(null, file.originalname)
}
}),
fileFilter: fileFilter
});
If anyone needs help with this in the future, here's a blog post on how to upload 100 files at the same time through Node, even though the frontend is using Vue. The backend should be the same.
In the end what I think you should change is simply the backend route to be something like:
router.post("/files/fileuploads", upload.array('file', 101), async (req, res) => {
return res.status(200).send('OK')
})

cloudant couchdb multipart insert throwing function_clause error

I am trying to insert a media document to cloudant couchdb using below below code.
var uuid = require('uuid').v4;
fs.readFile('./test.jpeg', function(err, data) {
if (!err) {
var newAttachmentObj = {
type: 'media',
media_mime: 'image/jpeg',
media_type: 'Photo',
media_filename: 'rabbit1'
}
var filename = 'rabbit1';
var media_mime = 'image/jpeg';
var attachment_id = uuid();
var media_data = data;
console.log(data);
console.log(newAttachmentObj);
console.log(attachment_id);
db.multipart.insert(newAttachmentObj,
[{ name: filename, data: media_data, content_type: media_mime }], attachment_id, function (err, body) {
console.log(body);
if (err) {
console.log('Error: Creating a media doc in cloudant.');
console.log(err);
// console.log(JSON.stringify(err));
} else {
console.log(body);
}
})
}
});
Document is getting created in the cloudant couchdb and also we can view the uploaded attachment, but the callback function returns an error as shown below.
{ Error: function_clause
at Request._callback (/home/boatman/anoop/forwarding-module/node_modules/cloudant-nano/lib/nano.js:248:15)
at Request.self.callback (/home/boatman/anoop/forwarding-module/node_modules/request/request.js:188:22)
at emitTwo (events.js:125:13)
at Request.emit (events.js:213:7)
at Request.<anonymous> (/home/boatman/anoop/forwarding-module/node_modules/request/request.js:1171:10)
at emitOne (events.js:115:13)
at Request.emit (events.js:210:7)
at IncomingMessage.<anonymous> (/home/boatman/anoop/forwarding-module/node_modules/request/request.js:1091:12)
at Object.onceWrapper (events.js:314:30)
at emitNone (events.js:110:20)
at IncomingMessage.emit (events.js:207:7)
at endReadableNT (_stream_readable.js:1045:12)
at _combinedTickCallback (internal/process/next_tick.js:138:11)
at process._tickDomainCallback (internal/process/next_tick.js:218:9)
name: 'Error',
error: 'unknown_error',
reason: 'function_clause',
ref: 944644368,
scope: 'couch',
statusCode: 500,
request:
{ method: 'PUT',
headers: { 'content-type': 'multipart/related' },
uri: 'https://XXXXXX:XXXXXX#account_id-bluemix.cloudant.com/db_media/a73d3788-d661-4944-964b-bcffce0286bd',
multipart: [ [Object], [Object] ] },
headers:
{ 'cache-control': 'must-revalidate',
'content-type': 'application/json',
date: 'Tue, 17 Apr 2018 08:37:28 GMT',
'x-couch-request-id': '5097b3e876',
'x-couch-stack-hash': '944644368',
'x-frame-options': 'DENY',
'strict-transport-security': 'max-age=31536000',
'x-content-type-options': 'nosniff',
'x-cloudant-request-class': 'write',
'x-cloudant-backend': 'bm-cc-uk-04',
via: '1.1 lb1.bm-cc-uk-04 (Glum/1.50.4)',
statusCode: 500,
uri: 'https://XXXXXX:XXXXXX#account_id-bluemix.cloudant.com/db_media/a73d3788-d661-4944-964b-bcffce0286bd' },
errid: 'non_200',
description: 'couch returned 500' }
Please find the image below.
I tried to reproduce your conditions with CouchDB 2.1.1, not Cloudant, therefore it may not be an exact reproduction, but I thought I'd share my results.
I created the following server.js file based on your code with a little modification. I used package nano for CouchDB not cloudant-nano since I'm not using Cloudant:
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"; // Ignore rejection, becasue CouchDB SSL certificate is self-signed
var uuid = require('uuid').v4;
const fs = require('fs')
const nano = require('nano')('https://admin:****#192.168.1.106:6984');
// Database 'reproduce' is already created in CouchDB 2.1.1
const db = nano.db.use('reproduce');
fs.readFile('./test.jpeg', function(err, data) {
if(err){
console.log('err -> ', err)
}else if (!err) {
var newAttachmentObj = {
type: 'media',
media_mime: 'image/jpeg',
media_type: 'Photo',
media_filename: 'rabbit1'
}
var filename = 'rabbit1';
var media_mime = 'image/jpeg';
var attachment_id = uuid();
var media_data = data;
console.log(data);
console.log(newAttachmentObj);
console.log(attachment_id);
db.multipart.insert(newAttachmentObj,
[{ name: filename, data: media_data, content_type: media_mime }], attachment_id, function (err, body) {
console.log(body);
if (err) {
console.log('Error: Creating a media doc in cloudant.');
console.log(err);
// console.log(JSON.stringify(err));
} else {
console.log(body);
}
})
}
});
I placed a sample test.jpeg in the same directory as my server.js file. Then I ran the code with command $ node server.js and I got the following results:
$ node server.js
<Buffer ff d8 ff e1 00 32 45 78 69 66 00 00 49 49 2a 00 08 00 00 00 01 00 98 82 02 00 0e 00 00 00 1a 00 00 00 00 00 00 00 52 61 77 70 69 78 65 6c 20 4c 74 64 ... >
{ type: 'media',
media_mime: 'image/jpeg',
media_type: 'Photo',
media_filename: 'rabbit1' }
ec6a36d1-952e-4d86-9865-3587c6079fb5
{ ok: true,
id: 'ec6a36d1-952e-4d86-9865-3587c6079fb5',
rev: '1-896eca9e9980509aeaa8539b281c3257' }
{ ok: true,
id: 'ec6a36d1-952e-4d86-9865-3587c6079fb5',
rev: '1-896eca9e9980509aeaa8539b281c3257' }
Obviously, I don't receive the errors your getting.

Resources