Basically I am using firebase function and hosting with node and react. I can upload image courtesy of How to perform an HTTP file upload using express on Cloud Functions for Firebase (multer, busboy)
but how do you upload image and data at the same time?
export const addProduct = (product, imageUrl) => {
return (dispatch) => {
return new Promise((resolve, reject) => {
const fileData = new FormData();
fileData.append("imageUrl", imageUrl);
fileData.append("productData", product);
axios({
method: "post",
url: "/api/products/add-product",
data: fileData,
headers: {
"Content-Type": "multipart/form-data",
},
});
});
};
};
NodeJS
const router = express.Router();
const Busboy = require("busboy");
router.post("/api/products/add-product", async (req, res, next) => {
if (req.method === "POST") {
const busboy = new Busboy({ headers: req.headers });
const uploads = {};
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
console.log(
`File [${fieldname}] filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`
);
});
}
});
Your client-side code looks OK.
On the server-side you can tell busboy to extract fields as well as files:
const fields = {};
const files = [];
const busboy = new Busboy({headers: req.headers});
busboy.on("field", (key, value) => (fields[key] = value));
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {/*...*/});
busboy.end(req.rawBoy);
This way you can access fields["productData"] later in your code.
Note that you need to use rawBody to access the unparsed body in Cloud Functions: https://firebase.google.com/docs/functions/http-events#read_values_from_the_request
Related
I have a Lambda Node function which is called by a webhook from a thirdparty server. The TP server sends a file download URL and some other data.
The download URL is temporary, so I need to push the file to an S3 for long term storage.
The rudimentary function below, downloads the file and then tries to upload to the S3.
This works when the file is a plain text, but images/pdfs etcs are corrupted when they reach the S3.
const AWS = require("aws-sdk");
const https = require('https');
const path = require('path');
const s3 = new AWS.S3({apiVersion: '2006-03-01'});
exports.handler = async (event, context, callback) => {
var payload = event.body;
const url_host = payload.host;
const url_path = payload.path; //URL of file which needs to be downloaded
const get_params = {
host: url_host,
path: url_path,
port: 443,
method: 'GET',
headers: { }
};
var resp = await https_get_processor(get_params); //File downloaded here
var uploadParams = {
Bucket: "bucket_name",
Key: '',
Body: resp //Preparing to upload the received file
};
uploadParams.Key = path.basename(url_path); //Generating filename
s3.upload (uploadParams, function (err, data) {
if (err) {
console.log("Error", err);
} if (data) {
console.log("Upload Success", data.Location);
}
});
response = {...} //Generic Response
return response;
};
async function https_get_processor(get_params)
{
return await new Promise((resolve, reject) =>
{
var data = "";
const req = https.request(get_params, res => {
res.on('data', chunk => { data += chunk })
res.on('end', () =>
{
resolve(data);
})
});
req.on('error', (err) => {
reject(err);
});
req.end();
});
}
Response is a Buffer in such case, so try changing request processing by pushing each chunk into an array, and then merge Buffer chunks and pass them.
Try this:
var data = [];
const req = https.request(get_params, res => {
res.on('data', chunk => data.push(chunk))
res.on('end', () =>
{
resolve(Buffer.concat(data));
})
I use expo document picker to pick a file, and multer in the back end to save it into a folder.
When I send the image, I get a 200 status response but the image is not saved.
What have I been doing wrong ?
React native code :
const pickPicture = async () => {
let result = await DocumentPicker.getDocumentAsync({});
console.log(result.file);
console.log(result);
const data = new FormData();
data.append("avatar", {
uri: result.uri,
type: result.type,
name: result.fileName,
file: result.file,
});
axios
.post(
`http://localhost:8000/uploadAvatar`,
data,
{
withCredentials: true,
headers: {
"Content-Type": "multipart/form-data",
},
transformRequest: (infos, error) => {
return data;
},
}
)
.catch(function (error) {
console.log(error);
})
.then(function (res) {
console.log(res);
});
Node code :
const storage = multer.diskStorage({
destintion: (req, file, callback) => {
callback(null, "avatars")
},
filename: (req, file, callback) => {
callback(null, Date.now() = path.extname(file.originalname));
}
});
const upload = multer({storage: storage});
app.use("/uploadAvatar", upload.single("avatar"), (req, res) => {
res.status(200).json("Image enregistrée !")
})
Path of the image to upload result.file returned by DocumentPicker is a reference of the image to the device storage not actually image data.
It required to fetch raw image data as BLOB and upload BLOB data to the server.
let result = await DocumentPicker.getDocumentAsync({});
console.log(result.file);
console.log(result);
const imageData = await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.onload = function () {
resolve(xhr.response);
};
xhr.onerror = function (e) {
reject(new TypeError("Network request failed"));
};
xhr.responseType = "blob";
xhr.open("GET",result.file, true);
xhr.send(null);
});
const data = new FormData();
data.append("avatar", {
uri: result.uri,
type: result.type,
name: result.fileName,
file: imageData,
});
I'm trying to implement an API endpoint that allows for multiple file uploads.
I don't want to write any file to disk, but to buffer them and pipe to S3.
Here's my code for uploading a single file. Once I attempt to post multiple files to the the endpoint in route.js, it doesn't work.
route.js - I'll keep this as framework agnostic as possible
import Busboy from 'busboy'
// or const Busboy = require('busboy')
const parseForm = async req => {
return new Promise((resolve, reject) => {
const form = new Busboy({ headers: req.headers })
let chunks = []
form.on('file', (field, file, filename, enc, mime) => {
file.on('data', data => {
chunks.push(data)
})
})
form.on('error', err => {
reject(err)
})
form.on('finish', () => {
const buf = Buffer.concat(chunks)
resolve({
fileBuffer: buf,
fileType: mime,
fileName: filename,
fileEnc: enc,
})
})
req.pipe(form)
})
}
export default async (req, res) => {
// or module.exports = async (req, res) => {
try {
const { fileBuffer, ...fileParams } = await parseForm(req)
const result = uploadFile(fileBuffer, fileParams)
res.status(200).json({ success: true, fileUrl: result.Location })
} catch (err) {
console.error(err)
res.status(500).json({ success: false, error: err.message })
}
}
upload.js
import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')
export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
const params = {
Bucket: 'my-s3-bucket',
Key: fileParams.fileName,
Body: buffer,
ContentType: fileParams.fileType,
ContentEncoding: fileParams.fileEnc,
}
return s3.upload(params).promise()
}
I couldn't find a lot of documentation for this but I think I've patched together a solution.
Most implementations appear to write the file to disk before uploading it to S3, but I wanted to be able to buffer the files and upload to S3 without writing to disk.
I created this implementation that could handle a single file upload, but when I attempted to provide multiple files, it merged the buffers together into one file.
The one limitation I can't seem to overcome is the field name. For example, you could setup the FormData() like this:
const formData = new FormData()
fileData.append('file[]', form.firstFile[0])
fileData.append('file[]', form.secondFile[0])
fileData.append('file[]', form.thirdFile[0])
await fetch('/api/upload', {
method: 'POST',
body: formData,
}
This structure is laid out in the FormData.append() MDN example. However, I'm not certain how to process that in. In the end, I setup my FormData() like this:
Form Data
const formData = new FormData()
fileData.append('file1', form.firstFile[0])
fileData.append('file2', form.secondFile[0])
fileData.append('file3', form.thirdFile[0])
await fetch('/api/upload', {
method: 'POST',
body: formData,
}
As far as I can tell, this isn't explicitly wrong, but it's not the preferred method.
Here's my updated code
route.js
import Busboy from 'busboy'
// or const Busboy = require('busboy')
const parseForm = async req => {
return new Promise((resolve, reject) => {
const form = new Busboy({ headers: req.headers })
const files = [] // create an empty array to hold the processed files
const buffers = {} // create an empty object to contain the buffers
form.on('file', (field, file, filename, enc, mime) => {
buffers[field] = [] // add a new key to the buffers object
file.on('data', data => {
buffers[field].push(data)
})
file.on('end', () => {
files.push({
fileBuffer: Buffer.concat(buffers[field]),
fileType: mime,
fileName: filename,
fileEnc: enc,
})
})
})
form.on('error', err => {
reject(err)
})
form.on('finish', () => {
resolve(files)
})
req.pipe(form) // pipe the request to the form handler
})
}
export default async (req, res) => {
// or module.exports = async (req, res) => {
try {
const files = await parseForm(req)
const fileUrls = []
for (const file of files) {
const { fileBuffer, ...fileParams } = file
const result = uploadFile(fileBuffer, fileParams)
urls.push({ filename: result.key, url: result.Location })
}
res.status(200).json({ success: true, fileUrls: urls })
} catch (err) {
console.error(err)
res.status(500).json({ success: false, error: err.message })
}
}
upload.js
import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')
export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
const params = {
Bucket: 'my-s3-bucket',
Key: fileParams.fileName,
Body: buffer,
ContentType: fileParams.fileType,
ContentEncoding: fileParams.fileEnc,
}
return s3.upload(params).promise()
}
I have a REST API that upload images to s3 and returns the response. The API works perfectly using Postman.
The problem arrises when calling the API from frontend. I am using Angular 6.
I am getting Error: Unsupported content type: application/json error. Although I am setting the headers properly.
Here is my Angular 6 code.
export class UploadComponent {
percentDone: number;
uploadSuccess: boolean;
constructor(private http: HttpClient) {}
upload(file: File) {
this.singleBasicUpload(file);
}
singleBasicUpload(file: File) {
const headers = new HttpHeaders({
'Content-Type': 'multipart/form-data',
});
const options = { headers: headers };
this.http.post(`${BASE_URL}/upload`, file, options).subscribe(response => {
console.log('response', response);
});
}
}
And here is my S3 code in backend Node.js
AWS.config.update({
accessKeyId: constants.IAM_USER_KEY,
secretAccessKey: constants.IAM_USER_SECRET,
});
const BUCKET_NAME = constants.BUCKET_NAME;
const ACL = 'public-read';
const S3 = new AWS.S3();
export async function S3Upload(req, res) {
const chunks = [];
let fname;
let fileType;
let fileEncodingType;
const busboy = new Busboy({
headers: req.headers,
});
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
fname = filename.replace(/ /g, '_');
fileType = mimetype;
fileEncodingType = encoding;
file.on('data', data => {
// you will get chunks here will pull all chunk to an array and later concat it.
console.log(chunks.length);
chunks.push(data);
});
file.on('end', () => {
console.log(`File [${filename}] Finished`);
});
});
busboy.on('finish', () => {
const userId = UUID();
const params = {
Bucket: BUCKET_NAME, // your s3 bucket name
Key: `${userId}-${fname}`,
Body: Buffer.concat(chunks), // concatinating all chunks
ACL,
ContentEncoding: fileEncodingType, // optional
ContentType: fileType, // required
};
// we are sending buffer data to s3.
S3.upload(params, (err, s3res) => {
if (err) {
res.send({
err,
status: 'error',
});
} else {
return res.send({
data: s3res,
message: 'Image successfully uploaded.',
});
}
});
});
req.pipe(busboy);
}
Network description
I am working on an ios app which sends images and text to my firebase server using mutipart/form-data URLRequest. In order to process the data in my cloud function, I am using the method mentioned in documentation to parse the mutipart/form-data into JSON format, and here is my code:
const Busboy = require('busboy');
exports.test = functions.https.onRequest((req, res) => {
console.log("start");
console.log(req.rawBody.toString());
if (req.method === 'POST') {
var busboy = new Busboy({ headers: req.headers});
busboy.on('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => {
console.log('field');
});
busboy.on('finish', function() {
console.log('finish');
res.json({
data: null,
error: null
});
});
req.pipe(busboy);
} else {
console.log('else...');
}
});
However, the above code doesn't seem to work, and here is the output from console:
Function execution started
start
--Boundary-43F22E06-B123-4575-A7A3-6C144C213D09
Content-Disposition: form-data; name="json"
{"name":"Alex","age":"24","friends":["John","Tom","Sam"]}
--Boundary-43F22E06-B123-4575-A7A3-6C144C213D09--
finish
Function execution took 517 ms, finished with status code: 200
As you can see, the on('field') function never execute. What did I miss?
Also, here is the code in swift for sending httpRequest:
var request = URLRequest(url: myCloudFunctionURL)
request.httpMethod = "POST"
request.setValue("multipart/form-data; boundary=myBoundary", forHTTPHeaderField: "Content-Type")
request.addValue(userToken, forHTTPHeaderField: "Authorization")
request.httpBody = myHttpBody
let session = URLSession.shared
session.dataTask(with: request) { (data, response, requestError) in
// callback
}.resume()
You will have to call busboy.end(req.rawBody); instead of req.pipe(busboy) as described in the example of the documentation. I dont know why .pipe doesnt work. Calling .end will produce the same result but with a different way.
const Busboy = require('busboy');
exports.helloWorld = functions.https.onRequest((req, res) => {
const busboy = new Busboy({ headers: req.headers });
let formData = {};
busboy.on('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => {
// We're just going to capture the form data in a JSON document.
formData[fieldname] = val;
console.log('Field [' + fieldname + ']: value: ' + val)
});
busboy.on('finish', () => {
res.send(formData);
});
// The raw bytes of the upload will be in req.rawBody.
busboy.end(req.rawBody);
});
Enjoy this simple express middleware which converts all the Content-Type: multipart/form-data into you req.body in json format :)
const Busboy = require('busboy');
const expressJsMiddleware = (req, res, next) => {
const busboy = new Busboy({ headers: req.headers });
let formData = {};
busboy.on(
"field",
(fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => {
formData = { ...formData, [fieldname]: val };
},
);
busboy.on("finish", () => {
req.body = formData;
next();
});
req.pipe(busboy);
};