I am trying to upload file onto IPFS.
My code is as below
var express = require('express')
var fs = require('fs')
const { create, globSource } = require('ipfs-http-client')
const ipfs = create()
....
const filesAdded = await ipfs.add(
{ path: fileName, content: file },
{
progress: (len) => console.log('Uploading file...' + len),
},
)
console.log(filesAdded)
const fileHash = filesAdded.cid.toString()
This code return hash value.
But i can't see my file(image) on https://ipfs.io/ipfs/{hash}
Related
I'm playing around with Google Vision autoML in react and I'm trying to figure out a way to change fs.readFileSync to an online url. I've used https, request and even url in node.js but I still can't figure out a way to make it read from a url source. Any help is appreciated, thank you
const projectId = "abc"
const location = "abc"
const modelId = "abc"
const filePath = "./test-pics/7.jpg"
const { PredictionServiceClient } = require("#google-cloud/automl").v1
const https = require("https")
const request = require("request")
const url = require("node:url")
const fs = require("fs")
const client = new PredictionServiceClient({
keyFilename: "./vision-private-key.json",
})
const imageURL = "https://cdn.filestackcontent.com/IJ0kViHQQyiwQTVaFH66"
// Read the file content for translation.
const content = fs.readFileSync(filePath)
const content1 = https.get(imageURL)
const content2 = request.get(
"https://cdn.filestackcontent.com/IJ0kViHQQyiwQTVaFH66"
)
async function predict() {
const request = {
name: client.modelPath(projectId, location, modelId),
payload: {
image: {
imageBytes: content,
},
},
}
const [response] = await client.predict(request)
for (const annotationPayload of response.payload) {
console.log(`type of vehicle: ${annotationPayload.displayName}`)
const carType = annotationPayload.displayName
console.log(carType)
}
}
predict()
i need to download files for my project. This is my current code:
const fs = require("fs")
const https = require("https")
const url = "https://f52.workupload.com/download/YYHAXY6t9VK"
https.get(url,(res) => {
// Image will be stored at this path
const path = `${__dirname}/clayent.zip`;
const filePath = fs.createWriteStream(path);
res.pipe(filePath);
filePath.on('finish',() => {
filePath.close();
console.log('Download Completed');
})
})
It works with pngs, but not with zip-files. Is there a way to make it possible?
I am using Resemble Js for comparing two image using Node. I am getting error " error while reading from input stream" fetching external url image.
code works fine when I access images locally.Thanks in advance .
const http = require('http-server');
const compareImages = require('resemblejs/compareImages');
const resemble = require('resemblejs');
const fs = require("mz/fs");
const request = require('request');
const FileReader = require('filereader');
const stream = require('stream');
var file = "https://static-cdn.jtvnw.net/previews-ttv/live_user_combatgo-640x360.jpg";
var file2 = "https://static-cdn.jtvnw.net/previews-ttv/live_user_combatgo-640x360.jpg";
// var file3 = "./kritika.png";
// var file4 = "./some gal.png"
async function getDiff() {
const options = {
output: {
errorColor: {
red: 255,
green: 0,
blue: 255
},
errorType: 'movement',
transparency: 0.3,
largeImageThreshold: 1200,
useCrossOrigin: false,
outputDiff: true
},
scaleToSameSize: true,
ignore: ['nothing', 'less', 'antialiasing', 'colors', 'alpha'],
};
// request.get(file, (err, res) => {
// this.newFile = file;
// console.log("SAMPLE RECEPT",newFile);
// return newFile;
// });
var diff = resemble(file2).compareTo(file2).onComplete(
(data) => {
console.log("Data", data);
}
);
// await fs.writeFile('./output.png', data.getBuffer());
// The parameters can be Node Buffers
// data is the same as usual with an additional getBuffer() function
}
getDiff();
I'm trying to create and image on the Google Firebase server with node-canvas and store it in Firebase Storage.
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const gcs = require('#google-cloud/storage')();
const path = require('path');
const Canvas = require('canvas-prebuilt');
const env = require('dotenv').config();
try {admin.initializeApp(functions.config().firebase);} catch(e) {}
//Trigger on creation of a new post
exports.default = functions.database.ref('/posts/{postId}').onCreate(event => {
//Get the postID
const postId = event.params.postId;
console.log('We have a new post ' + postId);
//Get data from the postid
return admin.database().ref('/posts/' + postId).once('value').then(function(snapshot) {
const text = snapshot.val().text;
const canvas = new Canvas(1024, 512);
const ctx = canvas.getContext('2d');
//Draw Background
ctx.fillStyle = '#000';
ctx.fillRect(0, 0, 1024 , 512);
//Draw Text
ctx.font = 'bold 66px Arial';
ctx.textAlign = 'center';
ctx.fillStyle = '#fff';
ctx.fillText(text, 120, 256, 784);
// Use the postID to name the file Ex : -L1rVJUAtSbc_FampT0D.png
var filename = postId + '.png';
// Create the file metadata
var metadata = {
contentType: 'image/png'
};
const bucket = gcs.bucket('images');
const filePath = 'images/' + filename;
return canvas.toDataURL('image/png', function(err, png){
//Save on Firebase Storage
return bucket.upload(png, {
destination: filePath,
metadata: metadata
}).then(() => {
console.log('Image uploaded to Storage at ', filePath);
});
});
});
});
But, when I try to save it with toDataURL I get this error :
ENAMETOOLONG: name too long, stat 'data:image/png;base64,iVBORw0 ...'
And when I try with toBuffer I get this one :
TypeError: Path must be a string. Received
at assertPath (path.js:7:11)
at Object.basename (path.js:1362:5)
at Bucket.upload (/user_code/node_modules/#google-cloud/storage/src/bucket.js:2259:43)
at /user_code/node_modules/#google-cloud/storage/node_modules/#google-cloud/common/src/util.js:777:22
at Bucket.wrapper [as upload] (/user_code/node_modules/#google-cloud/storage/node_modules/#google-cloud/common/src/util.js:761:12)
at /user_code/sendTweet.js:107:21
I also try toBlob but the function doesn't exist server side with node-canvas.
Anyone know how I should save the image server side before transfer it to Firebase Storage?
Thanks!
I am trying to use a google cloud function in Node.js to download a file for wordpress repo then send the file into a google cloud bucket. I have the wordpress file downloading but it fails to write to the google bucket.
function writeToBucket(jsonObject){
/*
* Google API authentication
*/
var gcs = require('#google-cloud/storage')({
projectId: 'wp-media-cdn',
keyFilename: 'wp-media-cdn-d9d7c61bfad9.json'
});
/*
* rename image file with image size, format: size X size imgName
*/
var pluginUrl = "https://downloads.wordpress.org/plugin/bbpress.2.5.14.zip";
newPluginName = "bbpress";
/*
* Read image into stream, upload image to bucket
*/
var request = require('request');
var fs = require('fs'); //used for createWriteString()
var myBucket = gcs.bucket('test_buckyy'); //PUT BUCKET NAME HERE
var file = myBucket.file(nnewPluginName);
// file.exists() returns true if file already in bucket, then returns file url, exits function
if(file.exists()){
return 'https://storage.googleapis.com/${test_buckyy}/${file}';
}
//pipes image data into fileStream
var fileStream = myBucket.file(newImageName).createWriteStream();
request(imgUrl).pipe(fileStream)
.on('error', function(err) {
console.log('upload failed');
})
.on('finish', function() {
console.log('file uploaded');
});
/*
* return image url
* use getSignedUrl
*/
return 'https://storage.googleapis.com/${test_buckyy}/${file}';
}
I just replicated your use case scenario and I successfully downloaded the file into the temporary folder of a Cloud Function and from there I copied this file into a bucket.
In order to achieve this, I downloaded the file using createWriteStream into the /tmp folder since is the only folder where we can store files in a Cloud Function, as stated in the Cloud Functions Execution Environment documentation.
After that, I just copied the file to a bucket following this Cloud Storage Uploading Objects documentation.
You can take a look of my sample function
Index.js
const {Storage} = require('#google-cloud/storage');
exports.writeToBucket = (req, res) => {
const http = require('http');
const fs = require('fs');
const file = fs.createWriteStream("/tmp/yourfile.jpg");
const request = http.get("YOUR_URL_TO_DOWNLOAD_A_FILE", function(response) {
response.pipe(file);
});
console.log('file downloaded');
// Imports the Google Cloud client library
const {Storage} = require('#google-cloud/storage');
// Creates a client
const storage = new Storage();
const bucketName = 'YOUR_BUCKET_NAME';
const filename = '/tmp/yourfile.jpg';
// Uploads a local file to the bucket
storage.bucket(bucketName).upload(filename, {
gzip: true,
metadata: {
cacheControl: 'no-cache',
},
});
res.status(200).send(`${filename} uploaded to ${bucketName}.`);
};
package.json
{
"name": "sample-http",
"version": "0.0.1",
"dependencies": {
"#google-cloud/storage": "^3.0.3"
}
}
Using Chris32's answer I've created a similar version but avoiding the download of the image to the tmp folder. Hope it's useful !
'use strict';
const http = require('http');
const {Storage} = require('#google-cloud/storage');
exports.http = (request, response) => {
const imageUrl = request.body.url;
const fileName = imageUrl.substring(imageUrl.lastIndexOf('/') + 1);
const storage = new Storage({keyFilename: "keyfile.json"});
const bucket = storage.bucket('MY_BUCKET_NAME');
const file = bucket.file(fileName);
console.log('Uploading image')
http.get(imageUrl, function(res) {
res.pipe(
file.createWriteStream({
resumable: false,
public: true,
metadata: {
contentType: res.headers["content-type"]
}
})
);
});
console.log('Image uploaded')
response.status(201).send('Image successful uploaded!');
};
exports.event = (event, callback) => {
callback();
};