I'm trying to find a way to save a file in a loopback application that was send as base64 encoded string.
So I have a model with all possible field and also a base64 encoded image, “receipt”.
{
...modelData,
"receipt": "datadata:image/jpeg;base64,whatever=="
}
So... what I'm thinking of is this
Model.beforeRemote('create', function(ctx, model, next) {
const file = ctx.args.data.receipt;
app.models.Attachment.upload(...)
// and replace base64 with returned filename
})
But I don't clearly understand how and what should I pass to that upload method. Or maybe it has to be some different approach.
Thanks!
This isn't really a remote hook but rather a remote method, yet I believe you can adapt the code to your own needs.
We basically create a remote method called "upload" for Documents model and it takes care of uploading it to a predefined container name.
const CONTAINERS_URL = '/api/container/';
const fs = require('fs');
const path = require('path');
const encodedFileContainer = 'files';
module.exports = function(Document) {
Document.upload = function(encodedFile, fileName, cb) {
let uploadStream = Document.app.models.container.uploadStream(
encodedFileContainer,
fileName
);
uploadStream.end(encodedFile, 'base64', async err => {
if (err) return cb(err);
let newRecord = await Document.create(
{
fileName,
url: CONTAINERS_URL + encodedFileContainer + '/download/' + fileName,
container: encodedFileContainer
},
function(err, obj) {
if (err !== null) {
cb(err);
} else {
cb(null, obj);
}
}
);
});
};
Document.remoteMethod('upload', {
description: 'Uploads a file',
accepts: [
{
arg: 'endcodedFile',
type: 'string',
required: true,
description: 'base64 encoded file',
http: {source: 'form'}
},
{
arg: 'fileName',
type: 'string',
required: true,
description: 'file name',
http: {source: 'form'}
}
],
returns: {
arg: 'fileObject',
type: 'object',
root: true
},
http: {verb: 'post'}
});
Related
In my Express backend, I have set up a connection with S3 Bucket for uploading images, and it works.
However additionally, I would like to be able to store a reference link (S3 url) of the saved image in my Mongo Database.
I have been trying to play around with req.file object but somehow, I cannot get the req.file.location, whereas req.file.buffer works okay (as in the example below in itemController.js). Is there any problem in my s3.js configuration? Or pehraps I would need a different approach to get req.file.location instead of buffer?
Below my bucket configuration s3.js
// s3.js
const AWS = require('aws-sdk')
// s3 bucket configuration
const awsConfig = {
accessKeyid : process.env.S3_ACCESS_KEY,
secretAccessKey : process.env.S3_ACCESS_SECRET,
region : process.env.S3_REGION
}
const S3 = new AWS.S3(awsConfig)
//s3 bucket upload function
const uploadToS3 = (fileData) => {
return new Promise ((resolve, reject) =>{
const params = {
Bucket : process.env.S3_BUCKET_NAME,
Key: `${Date.now().toString()}.jpg`,
Body: fileData
}
S3.upload(params, (err, data) =>{
if(err){
console.log(err)
reject(err)
}
console.log(data)
return resolve(data)
})
})
}
module.exports = {
uploadToS3
}
Here is my itemController.js
const Item = require('../models/itemModel')
const Worker = require('../models/workerModel')
const mongoose = require('mongoose')
const multer = require('multer')
const { uploadToS3 } = require('../s3')
//! Multer configuration
const multerConfig = {
limits: 1024 * 1024 * 5,
fileFilter: function (req, file, done) {
if (file.mimetype === "image/jpg"|| file.mimetype === "image/png" || file.mimetype ==='image/jpeg') {
done(null, true)
} else {
done("Niewłaściwy plik, użyj .jpg .jpeg .png", false)
}
}
}
const upload = multer(multerConfig)
//! CREATE new item
const createItem = async (req, res) => {
// multer middleware that handles file upload
upload.single("image")(req, res, async () => {
//destructuring form req.body
const {
title,
model,
producer,
serialNumber,
yearOfProduction,
atEmployee,
seller,
warrantyDate,
purchaseDate,
image,
} = req.body
if (!title){
return res.status(400).json({error:'Błąd! Wymagane jest podanie chociaż nazwy narzędzia.'})
}
//try-catch to create new Item and catch error. Add "await" because of "async" - Js promise above
try {
const item = await Item.create({
title,
model,
producer,
serialNumber,
yearOfProduction,
atEmployee,
seller,
warrantyDate,
purchaseDate,
image: req.file ? req.file.buffer : image,
})
if (req.file) {
// upload file to S3 and store the URL in the database
const result = await uploadToS3(req.file.buffer)
item.imageUrl = result.location
await item.save()
}
res.status(200).json(item)
} catch(error) {
res.status(400).json({error: error.message})
}
})
}
...
And here is my ItemModel.js
const mongoose = require('mongoose')
//mongoose function to create new model Schema
const Schema = mongoose.Schema
const itemSchema = new Schema ({
title: {
type: String,
required: true,
},
producer: {
type: String,
required: false,
},
model: {
type: String,
required: false,
},
serialNumber: {
type: String,
required: false,
},
yearOfProduction:{
type: Number,
required: false
},
seller:{
type: String,
required: false
},
purchaseDate: {
type: Date,
default: Date.now
},
warrantyDate: {
type: Date,
required: false,
},
//Linking Worker model to an Item
atEmployee: {
type: mongoose.Schema.Types.ObjectId,
required: false,
ref:'Worker',
},
image: {
type: String,
required: false,
}
}, { timestamps: true })
module.exports = mongoose.model('Item', itemSchema)
This is how I actually solved it
itemController.js
const Item = require('../models/itemModel')
const Worker = require('../models/workerModel')
const mongoose = require('mongoose')
const multer = require('multer')
const { uploadToS3 } = require('../s3')
//! Multer configuration
const multerConfig = {
limits: 1024 * 1024 * 5,
fileFilter: function (req, file, done) {
if (file.mimetype === "image/jpg"|| file.mimetype === "image/png" || file.mimetype ==='image/jpeg') {
done(null, true)
} else {
done("Niewłaściwy plik, użyj .jpg .jpeg .png", false)
}
}
}
const upload = multer(multerConfig)
//! CREATE new item
const createItem = async (req, res) => {
// multer middleware that handles file upload
upload.single("image")(req, res, async () => {
//destructuring form req.body
const {
title,
model,
producer,
serialNumber,
yearOfProduction,
atEmployee,
seller,
warrantyDate,
purchaseDate,
image,
} = req.body
if (!title){
return res.status(400).json({error:'Błąd! Wymagane jest podanie chociaż nazwy narzędzia.'})
}
//try-catch to create new Item and catch error. Add "await" because of "async" - Js promise above
try {
let item = {}
if (req.file) {
// upload file to S3 and store the URL in the database if image has been uploaded
const result = await uploadToS3(req.file.buffer)
item = await Item.create({
title,
model,
producer,
serialNumber,
yearOfProduction,
atEmployee,
seller,
warrantyDate,
purchaseDate,
image: result.Location,
})
//if no image, show nothing
} else {
item = await Item.create({
title,
model,
producer,
serialNumber,
yearOfProduction,
atEmployee,
seller,
warrantyDate,
purchaseDate,
})
}
res.status(200).json(item)
} catch(error) {
res.status(400).json({error: error.message})
}
})
}
I'm trying to convert one google sheet into a pdf file. Actually, that, seems ok. But i can't put it directly in one specifics folder ...
Can you help me ?
const getData = await getSpreadSheetData(newSpreadsheetsId);
if (!getData) {
// nop
return;
}
let url = getData.data.spreadsheetUrl;
if (!url) {
// nop
return
}
url = url.replace(/edit$/, '');
const url_ext = 'export?exportFormat=pdf&format=pdf&portrait=true'
url = url + url_ext;
const dest = fs.createWriteStream('test.pdf');
await g.drive.files.export(
{
fileId: `${newSpreadsheetsId}`, // Please set the file ID of Google Docs.
mimeType: "application/pdf"
},
{ responseType: "stream" },function(err, response) {
if (err) {
console.log(err);
return;
}
if (!response) {
// nop
return
}
response.data
.on("end", function() {
console.log("Done.");
})
.on("error", function(err) {
console.log("Error during download", err);
return process.exit();
})
.pipe(dest);
})
getSpreadSheetData retrieve me all the data from one spreadsheetID
I'm not an expert with pipe etc ...
I have trying some options like this link :
Github - google Drive export pdf in Landscape
And i don't want this file on my server, or transiting by my server ... :/
after few hours there is the solution :
g = auth
const exportAsPdfInFolder = await g.drive.files.export(
{
fileId: fileId,
mimeType: 'application/pdf',
alt: 'media',
},
{ responseType: 'stream' },
async (err, result) => {
if (err) console.log(err);
else {
const media = {
mimeType: 'application/pdf',
body: result?.data,
};
await g.drive.files.create(
{
requestBody: {
name: newTitlePDF,
parents: [folderParentId],
},
media: media,
fields: 'id',
},
async (err: any, file: any) => {
if (err) {
// Handle error
console.error(err);
} else {
console.log('File Id: ', file.data.id);
}
},
);
}
},
);
Reference:
Files: create
Here, I have some difficulty to how to create new record whenever i have create one csv file and upload it,
then now how can i create new record throgh this,
please help me.!
This is my Csv file.
name,companyId,engineId,colorId
car1,123,123,123
car2,456,456,456
uploadAvatar: function (req, res) {
let fileInputName = 'assets/excel/carData.csv';
let fileOutputName = 'myOutputFile.json';
req.file('excel').upload({
// don't allow the total upload size to exceed ~10MB
dirname: require('path').resolve(sails.config.appPath, './assets/excel'),
saveAs: 'carData.csv',
maxBytes: 10000000
}, function whenDone(err, uploadedFiles) {
if (err) {
return res.serverError(err);
}
// If no files were uploaded, respond with an error.
if (uploadedFiles.length === 0) {
return res.badRequest('No file was uploaded');
}
csvToJson.generateJsonFileFromCsv(fileInputName, fileOutputName);
csvToJson.fieldDelimiter(',') .getJsonFromCsv(fileInputName);
let json = csvToJson.getJsonFromCsv("assets/excel/carData.csv");
csvToJson.formatValueByType().getJsonFromCsv(fileInputName);
//var result = [];
var name = "";
var comapanyId = "";
var engineId = "";
var colorId = "";
for (let i = 0; i < json.length; i++) {
console.log(json[i]);
**// How to create new record ?**
}
return res.json({
message: uploadedFiles.length + ' file(s) uploaded successfully!',
});
});
},
This is my model file
module.exports = {
tableName: 'Car',
schema: true,
attributes: {
name: {
type: 'STRING'
},
companyId: {
model: 'Master',
columnName: 'companyId'
},
engineId: {
model: 'Master',
columnName: 'engineId'
},
colorId: {
model: 'Master',
columnName: 'colorId'
},
car: {
collection: 'Order',
via: 'carId'
},
},
};
You have to call the .createEach([cars]) method.
I'm having a problem parsing an ".xlsx" or ".xls" file with SheetJS ("xlsx" on npm) i don´t know what I'm doing wrong, but I always get the same output
[
{
"__EMPTY": "i"
},
{
"__EMPTY":"«Z.7¦§dÞZµìe°
I'm using an empty controller in loopback 4 in case you recognize the syntax, and the problem doesn't seem to bee a loopback since I'm able to save the file on the server and open it without a problem.
It seems that xlsx module it's unable to parse my files for some reason, can anyone take a look and see if something it's wrong?
Here it's my code:
import { inject } from '#loopback/context';
import { ParamsDictionary, RequestHandler } from 'express-serve-static-core';
import * as multer from "multer";
import { unlinkSync } from "fs";
import * as xlsx from "xlsx"
import {
requestBody,
RestBindings,
RequestBodyObject,
post,
Request,
Response
} from '#loopback/rest';
const MULTIPART_FORM_DATA: RequestBodyObject = {
description: 'multipart/form-data value.',
required: true,
content: {
'multipart/form-data': {
// Skip body parsing
'x-parser': 'stream',
schema: { type: 'object' },
},
},
}
export class TemplateActionsController {
constructor() { }
#post('/parse-template', {
responses: {
200: {
content: {
'multipart/form-data': {
schema: {
type: 'object',
},
},
},
description: '',
},
},
})
async parseTemplate(
#requestBody(MULTIPART_FORM_DATA)
request: Request,
#inject(RestBindings.Http.RESPONSE) response: Response,
): Promise<object> {
//const storage = multer.memoryStorage();
const storage = multer.diskStorage({
filename: (req: Request<ParamsDictionary>, file: Express.Multer.File, callback: (error: Error | null, filename: string) => void) => {
callback(null, file.originalname);
}
});
const upload = multer.default({ storage });
return new Promise<object>((resolve, reject) => {
let middleware: RequestHandler<ParamsDictionary> = upload.any();
console.log('----------------------------------------------------------');
//console.log(request);
middleware(request as any, response as any, (err: any) => {
if (err) return reject(err);
let arrFiles: Express.Multer.File[];
arrFiles = (request as Express.Request).files as Express.Multer.File[];
console.log('----------------------------------------------------------');
console.log(arrFiles[0]);
let workbook: xlsx.WorkBook = xlsx.read(arrFiles[0].path);
var sheet_name_list: string[] = workbook.SheetNames;
let firstSheet: xlsx.WorkSheet = workbook.Sheets[sheet_name_list[0]]
let strResult: any = xlsx.utils.sheet_to_json(firstSheet);
console.log('----------------------------------------------------------');
console.log(sheet_name_list);
console.log('----------------------------------------------------------');
console.log(strResult);
try {
unlinkSync(arrFiles[0].path);
} catch (e) {
//error deleting the file
}
resolve(strResult);
});
});
}
}
the line that parses the file it's this one:
let strResult: any = xlsx.utils.sheet_to_json(firstSheet);
My input excel file (template.xlsx) only has simple data in the first sheet:
I can't find any other issue that looks like this anywhere.
If anyone can help please tell me.
Much appreciated.
Omar
It seems that I was using:
let workbook: xlsx.WorkBook = xlsx.read(arrFiles[0].path);
instead of:
let workbook: xlsx.WorkBook = xlsx.readFile(arrFiles[0].path);
It was my mistake, now everything it's working fine.
Im trying to get progress status values while uploading files to google Drive using nodeJs.
controller.js
exports.post = (req, res) => {
//file content is stored in req as a stream
// 1qP5tGUFibPNaOxPpMbCQNbVzrDdAgBD is the folder ID (in google drive)
googleDrive.makeFile("file.txt","1qP5tGUFibPNaOxPpMbCQNbVzrDdAgBD",req);
};
googleDrive.js
...
makeFile: function (fileName, root,req) {
var fileMetadata = {
'name': fileName,
'mimeType': 'text/plain',
'parents': [root]
};
var media = {
mimeType: 'text/plain',
body: req
};
var r = drive.files.create({
auth: jwToken,
resource: fileMetadata,
media: media,
fields: 'id'
}, function (err, file) {
if (err) {
// Handle error
console.error(err);
} else {
// r => undefined
console.log("Uploaded: " + r);
}
});
},
...
i followed this link but got always an undefined value
How about this modification?
Modification point:
It used onUploadProgress.
Modified script:
makeFile: function (fileName, root,req) {
var fileMetadata = {
'name': fileName,
'mimeType': 'text/plain',
'parents': [root]
};
var media = {
mimeType: 'text/plain',
body: req
};
var r = drive.files.create({
auth: jwToken,
resource: fileMetadata,
media: media,
fields: 'id'
}, {
onUploadProgress: function(e) {
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(e.bytesRead.toString());
},
}, function (err, file) {
if (err) {
// Handle error
console.error(err);
} else {
console.log("Uploaded: " + file.data.id);
}
});
},
Note:
If you want to show the progression as "%", please use the file size.
It was confirmed that this script worked at googleapis#33.0.0.
References:
axios
test of google/google-api-nodejs-client
In my environment, I'm using the script like above. But if this didn't work in your environment and if I misunderstand your question, I'm sorry.