I've been trying to save uploaded image files to IPFS in NodeJs , while it seems Pinata saves them, the files are pretty much gibberish (after downloading the images are broken).
My code :
// Nodejs route.
exports.postImage = async (req, res, next) => {
// Using multer to get the file.
fileUploadMiddleware(req, res, async (err) => {
// getting bunch of data from query string.
let meta = {
origin,
originid,
context,
ownerid,
format
} = req.query;
if(!meta.format || !req.files) {
return next(new ErrorResponse("File format not specified", 404));
}
if(!meta.originid) {
meta.originid = uuidv4();
}
// NOTE: is this the right way to get the data of the file ?
const buffer = req.files[0].buffer;
const filename = `${metadata.origin}_${metadata.originid}.${ metadata.format }`;
let stream;
try {
stream = Readable.from(buffer);
// HACK to make PINATA WORK.
stream.path = filename;
}
catch(e) {
logger.logError(e);
return false;
}
const options = {
pinataMetadata: {
name: filename,
keyvalues: {
context: metadata.context,
ownerid: metadata.ownerid
}
},
pinataOptions: {
cidVersion: 0
}
};
try {
var result = await pinata.pinFileToIPFS(stream, options);
console.log("SUCCESS ", result);
return result;
}
catch(e) {
logger.logError(e);
return null;
}
res.status(200).json({
success: true,
data: 'You got access'
})
});
}
So basically creating the stream based on the uploaded file buffer and sending it away to Pinata. Where do I go wrong?
const buffer = req.files[0].buffer;
If you used MemoryStorage. buffer property would be available. It is not available for diskStorage because it will save the file locally.:
const storage = multer.memoryStorage()
const upload = multer({ storage: storage })
Also I think it not req.files[0]
const buffer = req.file.buffer;
after I get the buffer, I convert it to FormData using form-data npm package:
import FormData from "form-data";
const formData = new FormData();
formData.append("file", buffer, {
contentType,
filename: fileName + "-" + uuidv4(),
});
then you send a post request to pinata
const url = `https://api.pinata.cloud/pinning/pinFileToIPFS`;
const fileRes = await axios.post(url, formData, {
maxBodyLength: Infinity,
headers: {
// formData.getBoundary() is specific to npm package. native javascript FormData does not have this method
"Content-Type": `multipart/form-data: boundary=${formData.getBoundary()}`,
pinata_api_key: pinataApiKey,
pinata_secret_api_key: pinataSecretApiKey,
},
});
Related
Dose the Kuzzle or Minio development teams have a working example of using the Kuzzle S3 plugin for Minio? I have the following but my file isnt being uploaded and the pre-signed url is referring to https://your-s3-bucket.s3.eu-west-3.amazonaws.com/
const fs = require("fs");
const fsPromises = require('fs').promises;
// Create a JS File object instance from a local path using Node.js
const fileObject = require("get-file-object-from-local-path");
// Promise based HTTP client for the browser and node.js
const axios = require('axios');
// Loads the Kuzzle SDK modules
const {
Kuzzle,
WebSocket
} = require('kuzzle-sdk');
var start = new Date();
const webSocketOptionsObject = {
"autoReconnect": true,
"ssl": true,
"port": 443
};
const kuzzle = new Kuzzle(new WebSocket('myurl.com', webSocketOptionsObject));
const credentials = { username: 'xyz123', password: 'fithenmgjtkj' };
const path = __dirname + "\\" + "yellow_taxi_data.csv"; // the "\\" is for Windows path
var fileData = {};
// check file exists
fs.access(path, fs.F_OK, (err) => {
if (err) {
console.error(err)
return
}
fileData = new fileObject.LocalFileData(path);
// Adds a listener to detect connection problems
kuzzle.on('networkError', error => {
console.error('Network Error:', error);
});
});
const connectToKuzzle = async () => {
// Connects to the Kuzzle server
await kuzzle.connect();
return await kuzzle.auth.login('local', credentials);
// console.log('jwt auth token: ', jwt);
}
const disConnectFromKuzzle = async () => {
console.log('Disconnected from Kuzzle');
kuzzle.disconnect();
var time = new Date() - start;
// sec = Math.floor((time/1000) % 60);
console.log('Execution time in milliseconds: ', time);
}
const presignedURL = async () => {
// Get a Presigned URL
const result = await kuzzle.query({
controller: 's3/upload',
action: 'getUrl',
uploadDir: 'proxybucket', // directory name inside the Bucket specified in the s3 plugin bucket name
filename: fileData.name
});
console.log("result: ", result);
return result;
}
const loadFileStream = async () => {
console.log('getting file: ', path);
targetFile = null;
await fs.promises.readFile(path)
.then(function (result) {
console.log("file loaded------", result.length);
targetFile = result;
})
.catch(function (error) {
console.log(error);
return;
});
return targetFile;
}
const kuzzleValidate = async (kuzzleResource) => {
// console.log("kuzzleResource: ", kuzzleResource.result.fileKey);
// validate
// Validate and persist a previsously uploaded file.
// https://docs.kuzzle.io/official-plugins/s3/2/controllers/upload/validate/
const Presult = await kuzzle.query({
// Kuzzle API params
"controller": "s3/upload",
"action": "validate",
// File key in S3 bucket
"fileKey": kuzzleResource.result.fileKey
});
console.log('validate: ', Presult.result.fileUrl);
}
const uploadFile = async (fileBuffer, kuzzleResource, jwt) => {
// options at https://github.com/axios/axios
const axiosOptions = {
headers: {
'Content-Type': fileData.type
},
maxBodyLength: 200000000 // 200,000,000 bytes 200 Mb
};
// PUT the fileBuffer to the Kuzzle S3 endpoint
// https://github.com/axios/axios
axios.defaults.headers.common['Authorization'] = jwt;
const response = await axios.put(kuzzleResource.result.uploadUrl, fileBuffer, axiosOptions)
.then((response) => {
console.log('file uploaded......');
})
.catch(function (error) {
console.log("File upload error: ", error);
return;
});
return "Upload successful";
}
if (fileData) {
connectToKuzzle().then((jwt) => {
console.log(jwt);
// upload(jwt);
presignedURL().then((kuzzleResource) => {
loadFileStream().then((fileBuffer) => {
uploadFile(fileBuffer, kuzzleResource, jwt).then((doneMessage) => {
console.log("doneMessage: ", doneMessage);
}).then(() => {
kuzzleValidate(kuzzleResource).then(() => {
disConnectFromKuzzle();
});
});
});
});
});
}
I'm looking to upload to a Minio bucket and obtain a pre-signedURL so I can store it in a document later.
You can change the endpoint configuration to set a different s3-compatible endpoint who can be a Minio one.
This configuration can be changer under the plugins.s3.endpoint key. You should also disable the usage of default s3 path.
Example:
app.config.set('plugins.s3.endpoint', 'https://minio.local');
app.config.set('plugins.s3.s3ClientOptions.s3ForcePathStyle', false);
Im trying to use Multer to upload an array of images. At the client side i have a FormData called pictures.
pictures array, from react-native-image-picker:
const [pictures, setPictures] = useState([]);
const imagePickerCallBack = data => {
const picturesData = [...pictures];
const index = picturesData.length;
const image = {
image: data.uri,
fileName: data.fileName,
type: data.type,
index: index,
};
picturesData.push(image);
setPictures(picturesData);
setLoad(false);
};
Step 1 - Create formData with all images:
const data = new FormData();
pictures.forEach(pic => {
data.append('pictures', {
fileName: pic.fileName,
uri: pic.image,
type: pic.type,
});
});
const headers = {
'Content-Type': 'multipart/form-data',
'x-access-token': token,
};
const diaryUpdatePost = await post(`diary/uploadPictures/${diary}`, body, {
headers,
});
Step 2 - Get the request at server side. Im setting up multer and routers:
const router = express.Router();
const multer = require('multer');
const storage = multer.diskStorage({
destination(req, file, cb) {
cb(null, 'uploads/');
},
filename(req, file, cb) {
cb(null, `${file.fieldname}-${Date.now()}`);
},
});
const upload = multer({ storage, limits: { fieldSize: 25 * 1024 * 1024 } });
// Multer with the same FormData (client)
router.post('/uploadPictures/:name', upload.array('pictures'), diaryController.uploadDiaryPictures);
And finally my diaryController, where i need to get all files:
exports.uploadDiaryPictures = async (req, res) => {
// Logging []. I cant access files from here
console.log(`files ${req.files}...`);
};
I already tried to use express-fileupload, but req.files return undefined. Some ideia to help? Thx.
You need to give a count of files you expect to upload:
upload.array('pictures', <number_of_pictures>)
Or if it is allowed to be any number:
upload.any('pictures')
You should also add the file itself to your form data
data.append('pictures', {
name: pic.fileName,
file: pic.image,
type: pic.type,
});
None of the answers here helped. The solution for me was to iteratively append EACH file object from the files array to the same field name given in Multer, instead of appending the files array itself to the field name given in Multer.
So from this:
export const addFiles= createAsyncThunk(
"addFiles",
async (payload: any, thunkApi) => {
const formData = new FormData();
// Here was the problem -- I was appending the array itself
// to the "files" field
formData.append("files", payload.files);
formData.append("data", JSON.stringify(payload?.data || {}));
const response = await axios.post('/user/products/files', formData);
if(response){
return response;
}
return thunkApi.rejectWithValue("");
}
);
I did this:
export const addFiles= createAsyncThunk(
"addFiles",
async (payload: any, thunkApi) => {
const formData = new FormData();
// The following loop was the solution
for (const file of payload.files) {
formData.append("files", file);
}
formData.append("data", JSON.stringify(payload?.data || {}));
const response = await axios.post('/user/products/files', formData);
if(response){
return response;
}
return thunkApi.rejectWithValue("");
}
);
This was my Multer configuration:
multer({ dest: "/uploads" }).array("files")
The files posted to my endpoint were then available to me at:
req.files
PS: Although the accepted answer kind of did that, he did not mention that you cannot append the entire array at once, which was the main problem for me.
var title="this is title";
var content="this is content";
const config = { headers: {'Accept': 'application/json', 'Content-Type': 'multipart/form-data' } };
const form = new FormData()
let file =event.target.files[0]
form.append('file', file)
form.append('title', title)
form.append('content', content)`enter code here`
Axios.post("http://localhost:3001/article/get/123", form,config ).then((res)=>{
console.log(res.data)
})
in node I have used multer for upload image or anything.
Below is the code for upload which I have used as a middleware.
const util = require("util");
const path = require("path");
const multer = require("multer");
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, "./Uploads") // folder path where to upload
},
filename: function (req, file, cb) {
cb(null, file.originalname + "-" + Date.now() + path.extname(file.originalname))
}
});
const maxSize = 1 * 20000 * 20000; // file size validation
const uploadFiles = multer({ storage: storage, limits: { fileSize: maxSize } }).array("myfiles", 10); // key name should be myfiles in postman while upload
const uploadFilesMiddleware = util.promisify(uploadFiles);
module.exports = uploadFilesMiddleware;
Below is the function which I have created in controller for upload and file check.
fileUpload = async (req, res) => {
try {
let userCode = req.headers.user_code;
await upload(req, res);
if (req.files.length <= 0) {
return res.status(httpStatusCode.OK).send(responseGenerators({}, httpStatusCode.OK, 'Kindly select a file to upload..!!', true));
}
let response = [];
for (const element of req.files) {
let data = await service.addFileData(element, userCode);
response.push(data); // for file path to be stored in database
}
if (response && response.length > 0) {
return res.status(httpStatusCode.OK).send(responseGenerators(response, httpStatusCode.OK, 'File uploaded sucessfully..!!', false));
} else {
return res.status(httpStatusCode.OK).send(responseGenerators({}, httpStatusCode.OK, 'Failed to upload file kindly try later..!!', true));
}
} catch (error) {
logger.warn(`Error while fetch post data. Error: %j %s`, error, error)
return res.status(httpStatusCode.INTERNAL_SERVER_ERROR).send(responseGenerators({}, httpStatusCode.INTERNAL_SERVER_ERROR, 'Error while uploading file data', true))
}
}
and the route will go like this.
router.post('/upload/file', fileUploadController.fileUpload);
And be sure to keep same name in postman while file upload as in middleware.
The above code is in react.js. I want to do same work in node.js and the file will be upload from the public folder. main issue is how to upload image file in format like we have in frontend event.target.files[0]
I want to stream a file upload request in multipart/form-data to another server and change some fields name at the same time.
I don't want to store temporarily a file on disk and don't want to store the file completely in memory either.
I tried to use multer, busboy and multiparty. I think I got closer by using custom Transform streams but it is not working yet.
const express = require('express');
const request = require('request');
const { Transform } = require('stream');
const router = express.Router();
class TransformStream extends Transform {
_transform(chunk, encoding, callback) {
// here I tried to manipulate the chunk
this.push(chunk);
callback();
}
_flush(callback) {
callback();
}
}
router.post('/', function pipeFile(req, res) {
const transformStream = new TransformStream();
req.pipe(transformStream).pipe(request.post('http://somewhere.com'));
res.sendStatus(204);
});
I tried to manipulate chunks in _transform without success (EPIPE). It sounds quit hacky, are they any better solutions ?
Here is a solution using replacestream along with content-disposition.
const replaceStream = require('replacestream');
const contentDisposition = require('content-disposition');
router.post('/', function pipeFile(req, res) {
let changeFields = replaceStream(/Content-Disposition:\s+(.+)/g, (match, p1) => {
// Parse header
let {type, parameters} = contentDisposition.parse(p1);
// Change the desired field
parameters.name = "foo";
// Prepare replacement
let ret = `Content-Disposition: ${type}`;
for(let key in parameters) {
ret += `; ${key}="${parameters[key]}"`;
}
return ret;
})
req.pipe(changeFields)
.pipe(request.post('http://somewhere.com'))
.on('end', () => {
res.sendStatus(204);
});
});
This worked for a single file multipart upload using express, multiparty, form-data, pump and got.
const stream = require('stream');
const express = require('express');
const multiparty = require("multiparty");
const got = require("got");
const FormData = require('form-data');
const pump = require('pump');
const app = express();
app.post('/upload', (req, res) => {
const url = "<<multipart image upload endpoint>>";
var form = new multiparty.Form();
form.on("part", function(formPart) {
var contentType = formPart.headers['content-type'];
var formData = new FormData();
formData.append("file", formPart, {
filename: formPart.filename,
contentType: contentType,
knownLength: formPart.byteCount
});
const resultStream = new stream.PassThrough();
try {
// Pipe the formdata to the image upload endpoint stream and the result to the result stream
pump(formData, got.stream.post(url, {headers: formData.getHeaders(), https:{rejectUnauthorized: false}}), resultStream, (err) =>{
if(err) {
res.send(error);
}
else {
// Pipe the result of the image upload endpoint to the response when there are no errors.
resultStream.pipe(res);
}
resultStream.destroy();
});
}
catch(err) {
resultStream.destroy();
console.log(err);
}
});
form.on("error", function(error){
console.log(error);
})
form.parse(req);
});
I am trying to upload chunks of base64 to node js server and save those chunks into one file
let chunks = [];
app.post('/api', (req, res) => {
let {blob} = req.body;
//converting chunks of base64 to buffer
chunks.push(Buffer.from(blob, 'base64'));
res.json({gotit:true})
});
app.post('/finish', (req, res) => {
let buf = Buffer.concat(chunks);
fs.writeFile('finalvideo.webm', buf, (err) => {
console.log('Ahh....', err)
});
console.log('SAVED')
res.json({save:true})
});
Problem with the above code is video is not playable I don't why Am I really doing something wrong and I've also tried writable streams it is not working either
UPDATE - I
Instead of sending blobs I've implemented to send binary but even though I am facing a problem like TypeError: First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.
client.js
postBlob = async blob => {
let arrayBuffer = await new Response(blob).arrayBuffer();
let binary = new Uint8Array(arrayBuffer)
console.log(binary) // logging typed Uint8Array
axios.post('/api',{binary})
.then(res => {
console.log(res)
})
};
server.js
let chunks = [];
app.post('/api', (req, res) => {
let {binary} = req.body;
let chunkBuff = Buffer.from(binary) // This code throwing Error
chunks.push(chunkBuff);
console.log(chunkBuff)
res.json({gotit:true})
});
//Somehow combine those chunks into one file
app.post('/finish', (req, res) => {
console.log('Combinig the files',chunks.length);
let buf = Buffer.concat(chunks);
console.log(buf) //empty buff
fs.writeFile('save.webm', buf, (err) => {
console.log('Ahh....', err)
});
res.json({save:true})
});
UPDATE - II
I am able to receive the binary chunk and append to a stream but in the final video only first chunk is playing I don't know what happened to other chunks and the video ends.
code
const writeMyStream = fs.createWriteStream(__dirname+'/APPENDED.webm', {flags:'a', encoding:null});
app.post('/api', (req, res) => {
let {binary} = req.body;
let chunkBuff = Buffer.from(new Uint8Array(binary));
writeMyStream.write(chunkBuff);
res.json({gotit:true})
});
UPDATE - III
my client code | Note: I've tried other ways to upload blobs I've commented out
customRecordStream = stream => {
let recorder = new MediaStreamRecorder(stream);
recorder.mimeType = 'video/webm;codecs=vp9';
recorder.ondataavailable = this.postBlob
recorder.start(INT_REC)
};
postBlob = async blob => {
let arrayBuffer = await new Response(blob).arrayBuffer();
let binary = new Uint8Array(arrayBuffer)
axios.post('/api',{binary})
.then(res => {
console.log(res)
})
// let binaryUi8 = new Uint8Array(arrayBuffer);
// let binArr = Array.from(binaryUi8);
// // console.log(new Uint8Array(arrayBuffer))
//
// console.log(blob);
// console.log(binArr)
// let formData = new FormData();
// formData.append('fname', 'test.webm')
// formData.append("file", blob);
//
// console.log(formData,'Checjk Me',blob)
// axios({
// method:'post',
// url:'/api',
// data:formData,
// config: { headers: {'Content-Type': 'multipart/form-data' }}
// }).then(res => {
// console.log(res,'FROM SERBER')
//
// })
//
//
// .then(res => {
// console.log(res)
// })
// this.blobToDataURL(blob, (blobURL) => {
//
// axios.post('/api',{blob:blobURL})
// .then(res => {
// console.log(res)
// })
// })
};
I was able to get this working by converting to base64 encoding on the front-end with the FileReader api. On the backend, create a new Buffer from the data chunk sent and write it to a file stream. Some key things with my code sample:
I'm using fetch because I didn't want to pull in axios.
When using fetch, you have to make sure you use bodyParser on the backend
I'm not sure how much data you're collecting in your chunks (i.e. the duration value passed to the start method on the MediaRecorder object), but you'll want to make sure your backend can handle the size of the data chunk coming in. I set mine really high to 50MB, but this may not be necessary.
I never close the write stream explicitly... you could potentially do this in your /final route. Otherwise, createWriteStream defaults to AutoClose, so the node process will do it automatically.
Full working example below:
Front End:
const mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
let mediaRecorder;
let sourceBuffer;
function customRecordStream(stream) {
// should actually check to see if the given mimeType is supported on the browser here.
let options = { mimeType: 'video/webm;codecs=vp9' };
recorder = new MediaRecorder(window.stream, options);
recorder.ondataavailable = postBlob
recorder.start(INT_REC)
};
function postBlob(event){
if (event.data && event.data.size > 0) {
sendBlobAsBase64(event.data);
}
}
function handleSourceOpen(event) {
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
}
function sendBlobAsBase64(blob) {
const reader = new FileReader();
reader.addEventListener('load', () => {
const dataUrl = reader.result;
const base64EncodedData = dataUrl.split(',')[1];
console.log(base64EncodedData)
sendDataToBackend(base64EncodedData);
});
reader.readAsDataURL(blob);
};
function sendDataToBackend(base64EncodedData) {
const body = JSON.stringify({
data: base64EncodedData
});
fetch('/api', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body
}).then(res => {
return res.json()
}).then(json => console.log(json));
};
Back End:
const fs = require('fs');
const path = require('path');
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
const server = require('http').createServer(app);
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json({ limit: "50MB", type:'application/json'}));
app.post('/api', (req, res) => {
try {
const { data } = req.body;
const dataBuffer = new Buffer(data, 'base64');
const fileStream = fs.createWriteStream('finalvideo.webm', {flags: 'a'});
fileStream.write(dataBuffer);
console.log(dataBuffer);
return res.json({gotit: true});
} catch (error) {
console.log(error);
return res.json({gotit: false});
}
});
Inspired by #willascend answer:
Backend-side:
app.use(express.raw());
app.post('/video-chunck', (req, res) => {
fs.createWriteStream('myvideo.webm', { flags: 'a' }).write(req.body);
res.sendStatus(200);
});
Frontend-side:
mediaRecorder.ondataavailable = event => {
if (event.data && event.data.size > 0) {
fetch(this.serverUrl + '/video-chunck', {
method: 'POST',
headers: {'Content-Type': 'application/octet-stream'},
body: event.data
});
}
};
My express version is 4.17.1
i faced the same problem today
as a solution in back-end i used fs.appendfile
fs.appendFile(Path, rawData, function (err) {
if (err) throw err;
console.log('Chunck Saved!');
})