how to upload files to IPFS using nodejs - node.js

I've tried to upload files to ipfs. But it's not uploading in ipfs. I'm getting hash in return after uploading the file. But it's not accessible using https://ipfs.io/ipfs/+hash but I can access the file localhost:8080/ipfs/+hash.
What am I doing wrong? What do I upload files in https://ipfs.io/ipfs
here is my app.js:
const express = require("express");
const app = express();
const ipfsClient = require("ipfs-http-client");
const ipfs = new ipfsClient();
const expFileUpload = require("express-fileupload");
app.use(expFileUpload());
app.post("/upload", (req, res) => {
let fileObj = {};
if (req.files.inputFile) {
const file = req.files.inputFile;
const fileName = file.name;
const filePath = __dirname + "/files/" + fileName;
file.mv(filePath, async (err) => {
if (err) {
console.log("Error: failed to download file.");
return res.status(500).send(err);
}
const fileHash = await addFile(fileName, filePath);
console.log("File Hash received __>", fileHash);
fs.unlink(filePath, (err) => {
if (err) {
console.log("Error: Unable to delete file. ", err);
}
});
fileObj = {
file: file,
name: fileName,
path: filePath,
hash: fileHash
};
res.render("transfer", { fileObj });
});
}
});
const addFile = async (fileName, filePath) => {
const file = fs.readFileSync(filePath);
const filesAdded = await ipfs.add({ path: fileName, content: file }, {
progress: (len) => console.log("Uploading file..." + len)
});
console.log(filesAdded);
const fileHash = filesAdded.cid.string;
return fileHash;
};
app.listen(3000);
Need help. Thank you.

As #deltab said, your local IPFS node must be reachable from the gateway. It's not possible to push or upload files to the IPFS gateway. When you make an http request to the gateway it looks up the content on the IPFS network for you.
Your local IPFS node is hosting the data you added to it. If the gateway's IPFS nodes can't connect to your local IPFS node, then it won't be able to find the data for the hash you requested. (Unless other nodes are also hosting it... co-hosting FTW \o/)
If your local IPFS node is running, it may be stuck behind a NAT or Firewall. If you run ipfs id, you'll see an array of Addresses your node is listening on. If you see one that looks like a public IP address, then grab the IP and port and see if the port is open using an online service like https://portchecker.co/
The https://docs.ipfs.io has good articles on:
IPFS and NATs https://docs.ipfs.io/how-to/nat-configuration/
IPFS Gateways: https://docs.ipfs.io/concepts/ipfs-gateway/

Related

How to fix cloudinary error "Must supply api_key"

I'm building an image upload app using multer and cloudinary. I've configured my environment variables properly using dotenv. However, when I try to upload files using Cloudinary I get the error "Must supply api_key".
The Cloudinary API credentials are correctly supplied and provisioned as in the code below:
cloudinary.js
const cloudinary = require('cloudinary');
cloudinary.config({
cloud_name: process.env.CLOUDINARY_CLOUD_NAME,
api_key: process.env.CLOUDINARY_API_KEY,
api_secret: process.env.CLOUDINARY_API_SECRET
})
exports.uploads = (file, folder) => {
return new Promise(resolve => {
cloudinary.uploader.upload(file, (result) => {
resolve({
url: result.url,
id: result.public_id
})
}, {
resource_type: "auto",
folder: folder
})
})
}
.env
CLOUDINARY_CLOUD_NAME='my_cloudinary_cloud_name'
CLOUDINARY_API_KEY='my_cloudinary_api_key'
CLOUDINARY_API_SECRET='my_cloudinary_api_secret'
The .env file is also required correctly in my app.js file:
app.js
require('dotenv').config({
path: './app/config/.env'
});
If I console.log any of the cloudinary config variables inside the cloudinary.js file, I get the expected output, but when I try to use cloudinary in my upload route, I get the error that I must supply api_key. Please can someone help me point out what I'm doing wrong? I don't want to have to write out my cloudinary config variables in the cloudinary.js file because the code is being published to Github.
Here is my post route using cloudinary:
const express = require('express'),
Product = require('../app/models/product'),
upload = require('../app/utils/multer'),
cloudinary = require('../app/utils/cloudinary'),
fs = require('fs'),
router = express.Router();
router.post('/products', upload.array('image'), async (req, res) => {
const uploader = async (path) => await cloudinary.uploads(path, 'Images');
const urls = []
const files = req.files
for (const file of files) {
const {
path
} = file;
const newPath = await uploader(path)
urls.push(newPath)
fs.unlinkSync(path)
}
const name = req.body.name
const product = new Product({
name: name,
imageone: urls[0].url,
imagetwo: urls[1].url,
imagethree: urls[2].url,
imagefour: urls[3].url
})
product.save(function (err, prod) {
if (err) {
throw err
} else {
req.flash('success', "You have added a new product.")
res.redirect("/dashboard")
}
})
})
module.exports = router;
Kóyo #awesome-bassey!
Following cloudinary docs for Nodejs and it's official repo i would advice you to import v2 of the cloudinary API.
In case your issue still remains - please share stack-trace with us
Using the Node.js v2, it can be declared as shown in the following code sample as described in the installation setup document (i.e. require('cloudinary').v2):
var cloudinary = require('cloudinary').v2;
cloudinary.config({
cloud_name: '<YOUR_CLOUD_NAME>',
api_key: '<YOUR_API_KEY>',
api_secret: '<YOUR_API_SECRET>',
secure: true
});
cloudinary.uploader.upload("my_image.jpg",
function(error, result) {
console.log(result, error);
});

Send Blob File from html form to express server so it can be uploaded to cloud

So I'm trying to make the html form:
<form action="blahblah" encblah="multipart/form-data" whatever>
Thats not the problem, I need to make that form send the blob to express
app.post('/upload/avatars', async (req, res) => {
const body = req.body;
console.log(req.file);
console.log(body);
res.send(body);
});
So I can access the blob, create a read stream, pipe it to the cloud, and bam, upload the file without downloading anything on the express server it self.
Is that possible?
If yes, please tell me how.
If no, please tell me other alternatives.
On the client we do a basic multi-part form upload. This example is setup for a single image but you could call uploadFile in sequence for each image.
//client.ts
const uploadFile = (file: File | Blob) => {
const formData = new FormData();
formData.append("image", file);
return fetch("/upload", {
method: "post",
body: formData,
});
};
const handleUpload = (event: any) => {
return event.target.files.length ? uploadFile(event.target.files[0]) : null;
};
On the server we can use multer to read the file without persisting it to disk.
//server.js
const express = require("express");
const app = express();
const multer = require("multer");
const upload = multer();
app.post(
"/upload",
upload.fields([{ name: "image", maxCount: 1 }]),
(req, res, next) => {
console.log("/upload", req.files);
if (req.files.image.length) {
const image = req.files.image[0]; // { buffer, originalname, size, ...}
// Pipe the image.buffer where you want.
res.send({ success: true, count: req.files.image.originalname });
} else {
res.send({ success: false, message: "No files sent." });
}
}
);
For larger uploads I recommend socket.io, but this method works for reasonably sized images.
it is possible, but when you have a lot of traffic it would overwhelm your express server (in case you are uploading videos or big files ) but if it's for uploading small images (profile image, etc...) you're fine. either way you can use Multer npm
I'd recommend using client-side uploading on ex: s3-bucket, etc..., which returned a link, and therefore using that link.

Download file from google cloud storage directly to client in nodejs

I want to be able to send the file downloaded from google cloud directly to the client and not have to first save on my server then create a download to client from the saved version on my server, cause this make the process slow, as the file is downloaded two times, first from google cloud to my own server then from my own server to client.
router.get("/:filename", async(req, res) => {
try {
// Grab filename from request parameter
const fetchURL =req.params.filename;
const file = await File.findOne({fetchURL});
const srcFileName = file.originalname;
// Call GCS with bucketName and check the file method with srcFileName and check again with download method which takes download path as argument
storage
.bucket(bucketName)
.file(srcFileName)
.download({
destination: path.join(process.cwd(), "downloads", srcFileName)
})
.then(() =>
res.download(path.join(process.cwd(), "downloads", srcFileName), err =>
err ? console.log(err) : null
)
)
.catch(err =>res.status(400).json({
message: err.message
}));
} catch (err) {
res.status(res.statusCode).json({
message: `There was an error downloading your file. ${err.message}`
});
}
});
This works for me in NodeJS+Express server:
const {Storage} = require('#google-cloud/storage');
const storage = new Storage({projectId, keyFilename});
router.get('/:id', async function (req, res) {
let fileName = 'test.jpg'; //For example
let contetType = 'image/jpg;' //For example
res.writeHead(200, {
'Content-Disposition': `attachment;filename=${filename}`,
'Content-Type': `${contetType}`
});
await storage
.bucket('my-bucket')
.file(`Images/${req.params.id}/${filename}`)
.createReadStream() //stream is created
.pipe(res);
});}

Node.js How to zip an array of images and force browser to download it

as the title says. I have an app that could collect all image urls from an user input url. Now I want to able to zip them and when the user press the download button, it will fires a request contain the array of all the image urls to download.js and let the download.js to process the download.
In addition, I am using express.js and react; the express.js is using port 5000
Someone sent me a working sample code: https://repl.it/#chiKaRau/picture-packer-4-rex-1
However, this code will create its own port 3000
I want to able to process the download on my current port 5000 while express is launch on port 5000
So I changed some code, however, once I pressed the download button, nothing happens (no error and no download)
Would anyone tell me how to solve this? Thank
download.js
const express = require('express');
let router = express.Router();
const fetch = require('node-fetch')
// to get the images const
JSZip = require('jszip')
// to zip them up
const micro = require('micro')
// to serve them
router.post('/download-pics', (req, res) => {
const files = [
{
url: "https://jeremyliberman.com/static/489f2e7cf7df14bc2c8ac2bc8c76aa59/cb864/avatar.png",
file: 'avatar.png'
},
{
url: "https://jeremyliberman.com/static/489f2e7cf7df14bc2c8ac2bc8c76aa59/cb864/avatar.png",
file: 'avatar1.png'
},
{
url: "https://jeremyliberman.com/static/489f2e7cf7df14bc2c8ac2bc8c76aa59/cb864/avatar.png",
file: 'avatar2.png' }
]
// Start a simple web service with one route
// Create an in-memory zip file
var zip = new JSZip();
// Fetch each image source
const request = async () => {
for (const { file, url } of files) {
const response = await fetch(url);
const buffer = await response.buffer();
zip.file(file, buffer);
}
}
request();
// Set the name of the zip file in the download
res.setHeader('Content-Disposition', 'attachment; filename="pictures.zip"')
// Send the zip file
zip.generateNodeStream({ type: 'nodebuffer', streamFiles: true })
.pipe(res).on('finish', function() {
console.log("out.zip written.");
}) })
//export this router to use in our index.js module.exports = router;
Function request returns a promise. You need to wrap the rest of the code after request() in then(() => {})
request()
.then(() => {
// Set the name of the zip file in the download
res.setHeader('Content-Disposition', 'attachment; filename="pictures.zip"')
// Send the zip file
zip.generateNodeStream({ type: 'nodebuffer', streamFiles: true })
.pipe(res).on('finish', function() {
console.log("out.zip written.");
})
})

Nodejs upload base64 image to azure blob storage using .createBlockBlobFromLocalFile()

I want to upload profile picture of a user sent from web app and mobile app via Base64 form.
On the POST request they need to send a JSON on the body that looks something like this.
{
"name":"profile-pic-123.jpg",
"file":"data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAkGBxQTEhIUEhIUFBUV…K9rk8hCAEkjFMUYiEAI+nHIpsQh0AkisDYRTOiCAbWVtgCtI6IlkHh7LDTQXLH0EIQBj//2Q==" // the base64 image
}
Now on the server side using Node and Express, I used this npm module called azure-storage which offers a nice way of uploading files to azure blob storage using web service.
But there's something that I cannot understand on this. Here's a part of the code from my controller. I successfully created all neccessary connections and keys and whatnot to create a working blobService :
controllers.upload = function(req, res, next){
// ...
// generated some sastoken up here
// etc.
// ...
var uploadOptions = {
container: 'mycontainer',
blob: req.body.name, // im not sure about this
path: req.body.file // im not sure about this either
}
sharedBlobService.createBlockBlobFromLocalFile(uploadOptions.container, uploadOptions.blob, uploadOptions.path, function(error, result, response) {
if (error) {
res.send(error);
}
console.log("result", result);
console.log("response", response);
});
}
Im getting this error:
{
"errno": 34,
"code": "ENOENT",
"path": "iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAIAAAB..."
}
if you use javascript sdk v12 You can use this sample code. It's just that simple. I have this implemented in a function and it works great when all I need it to trigger an HTTP event.
index.js
const file = await require('./file')();
uploadOptions = {
container: 'mycontainer',
blob: req.body.name,
text: req.body.file
}
const fileUploader = await file(uploadOptions.text, uploadOptions.blob,
uploadOptions.container);
You can use a separate module for your logic and call this from the index.js above
file.js
const { BlobServiceClient } = require("#azure/storage-blob");
const blobServiceClient = BlobServiceClient.fromConnectionString(process.env.AZURE_STORAGE_CONNECTION_STRING);
const Promise = require('bluebird');
module.exports = Promise.method(async function() {
return async function (data, fileName, container) {
const containerClient = await blobServiceClient.getContainerClient(container);
const blockBlobClient = containerClient.getBlockBlobClient(fileName);
const matches = data.match(/^data:([A-Za-z-+\/]+);base64,(.+)$/);
const buffer = new Buffer(matches[2], 'base64');
return await blockBlobClient.upload(buffer, buffer.byteLength );
};
});
In this case, you should not use createBlockBlobFromLocalFile. Instead, you should use createBlockBlobFromText, because you are not uploading a local file, but content in the request body.
Here is the code:
var uploadOptions = {
container: 'mycontainer',
blob: req.body.name,
text: req.body.file
}
sharedBlobService.createBlockBlobFromText(uploadOptions.container,
uploadOptions.blob,
uploadOptions.text,
{
contentType: 'image/jpeg',
contentEncoding: 'base64'
},
function(error, result, response) {
if (error) {
res.send(error);
}
console.log("result", result);
console.log("response", response);
});
The blob is just the file name, which is "profile-pic-123.jpg" this case, and path is the local path to your file. Since you are not storing the file locally in the server side, path is meaningless in the case.
If you need more information about Storage, see this, and this

Resources