I am using a node.js function on Google Cloud Functions to save Pub/Sub messages to GCS (Storage), but it randomly gives me the following error for some messages (most of the messages are succesfully written):
"Error: Could not authenticate request. Could not load the default credentials. Browse to https://developers.google.com/accounts/docs/application-default-credentials for more information."
It doesn't make sense, once running on GCE would use same service-account for all messages, which has the proper permissions, and all messages comes from the same source and goes to the same destination. Can someone enlighten me on what could I do?
I'm using google-cloud/storage version 0.8.0.
/**
* Triggered from a message on a Cloud Pub/Sub topic.
*
* #param {!Object} event The Cloud Functions event.
* #param {!Function} The callback function.
*/
const bucketName = 'backup-queue-bucket';
const util = require('util')
const gcs = require('#google-cloud/storage')();
const crypto = require('crypto');
exports.backupQueue = function backupQueue(event, callback) {
// The Cloud Pub/Sub Message object.
const timestamp = event.timestamp;
const resources = event.resource.split('/');
const pubsubMessage = event.data;
const messageContent = Buffer.from(pubsubMessage.data, 'base64').toString();
// We're just going to log the message to prove that
// it worked.
var queueName = resources[resources.length-1];
console.log(`Message received: ${messageContent} in queue ${queueName}`);
const filename = timestamp+'_'+crypto.createHash('md5').update(messageContent).digest('hex');
const bucket = gcs.bucket(bucketName);
const file = bucket.file(queueName+'/'+filename);
const fs = file.createWriteStream({});
fs.on('finish', function () {
console.log(`Message ${filename} successfully written to file.`);
});
fs.on('error', function (error) {
console.warn(`Message ${filename} could not be written to file. Retry will be called. Error: ${error.message}`);
setTimeout(backupQueue(event, callback), 1000);
});
fs.write(Buffer.from(pubsubMessage.data, 'base64').toString());
fs.end();
callback();
};`
EDIT:
I opened an issue on google-cloud-node and they reported this as a bug. It should be fixed by next release.
Related
I have created a cloud function that connects to an MQTT broker I have used a third-party MQTT broker (Mosquito MQTT broker), and sends the data to the Firebase real-time database every time the MQTT broker receives data from the machine. I am using the GCP console for writing and deploying the function. I successfully deployed the function without any errors, however, when I test it from the GCP console, it starts sending data but stops after the time specified in the timeout. I have tried timeout values from 60 to 540 seconds, but it still stops after the specified time. I have also increased the allocated memory, but it hasn't resolved the issue and I keep getting the same timeout error.
This is my code
const Admin = require("firebase-admin");
const functions = require("firebase-functions");
const mqtt = require('mqtt');
const clientId = 'mqtt_googleserver_********7'
const topic = '#'
const serviceAccount = require("./service.json");
Admin.initializeApp({
credential: Admin.credential.cert(serviceAccount),
databaseURL: "https://***************firebaseio.com/"
});
exports.rtdb_mains = functions.https.onRequest((_request, _response) => {
const client = mqtt.connect('mqtt://**.**.**.****.***',{
clientId,
clean: true,
connectTimeout: 4000,
username: '******',
password: '********',
reconnectPeriod: 1000,
});
const db = Admin.database();
client.addListener('connect', () => {
console.log('Connected');
client.subscribe([topic], { qos: 1 });
console.log(`Subscribe to topic '${topic}'`);
});
client.on('message', async (topic, payload) => {
console.log('Received Message:', topic, payload.toString());
if (payload.toString() !== "" && topic !== "") {
const ref = db.ref("All_machines");
const childref = ref.child(topic.toString());
await childref.set(payload.toString());
const topicDetails = topic.split("/");
const machineId = topicDetails[1];
const machineParameter = topicDetails[2];
if (machineParameter === "BoardID") {
const ref = db.ref(machineParameter);
await ref.set(machineId);
}
}
});
});
can anyone please help me with this problem.
You don't need to specify a service.json if you push the CF on firebase. You can directly use the default configuration.
You can do directly this :
admin.initializeApp();
Secondly, the way you use your MQTT implementation and the cloud function are not correct.
You are listenning and waiting for a message in a function that is trigger only by a POST or GET request.
I suggest to use the pub/sub api for doing such a thing and have a good implementation for sending / receiving messages.
In case of you really need to listen for message in your MQTT implementation, you will need another provider than Cloud Function or calling the native MQTT of Cloud Function
https://cloud.google.com/functions/docs/calling/pubsub
https://www.googlecloudcommunity.com/gc/Serverless/Can-a-cloud-function-subscribe-to-an-MQTT-topic/m-p/402965
I have a Google Cloud function that take the screenshot of a url and i need to upload that image to google cloud bucket from google cloud function
I have tried uploading it using the code given below but it does not work and gives error
/**
* Responds to any HTTP request.
*
* #param {!express:Request} req HTTP request context.
* #param {!express:Response} res HTTP response context.
*/
exports.takeScreenshot = (req, res) => {
const Screenshot = require('url-to-screenshot')
const Storage = require('#google-cloud/storage')
const fs = require('fs')
const bucketName="screenshot_bucket_mujahid"
new Screenshot('http://ghub.io/')
.width(800)
.height(600)
.capture()
.then(img =>
Storage
.bucket(bucketName)
.upload(img,"mujahid.jpg"))
console.log('open example.png')
})
let message = req.query.message || req.body.message || 'Hello World!';
res.status(200).send(message);
};
Your code appears to have a syntax error. Instead of:
.then(img =>
Storage
.bucket(bucketName)
.upload(img,"mujahid.jpg"))
console.log('open example.png')
})
it might be correct to code:
.then(img =>
{
Storage
.bucket(bucketName)
.upload(img,"mujahid.jpg");
console.log('open example.png');
});
Try something like this :
const {Storage} = require('#google-cloud/storage');
const storage = new Storage();
const bucketName = ' something ' // bucket you want to upload to
const fileName = ' some path ' // PATH of the file
await storage.bucket(bucketName).upload(filename, {
gzip: true,
metadata: {
cacheControl: 'no-cache'
},
});
So, be aware that to the upload function, you must pass the whole path of the file you want to upload.
If the upload still does not work, check if you don't have any permission issues in your GCS bucket. Try to make data public and retry to upload. But most likely, if your file is not uploaded and you get no error, something may remain stucked in this portion of code :
new Screenshot('http://ghub.io/')
.width(800)
.height(600)
.capture()
I am trying to integrate AWS X-Ray with my nodejs api hosted on AWS Lambda(serverless).
X-Ray works as intended for api using express middleware and able to see traces on AWS Console.
For async functions without express framework, I am facing issues while integration.
Tried enabling Manual mode, but facing- Lambda not supporting manual mode error.
Referred this - Developing custom solutions for automatic mode section but no luck.
Can someone help me out with this?
'use strict';
const AWSXRay = require('aws-xray-sdk-core');
const Aws = AWSXRay.captureAWS(require('aws-sdk'))
const capturePostgres = require('aws-xray-sdk-postgres');
const { Client } = capturePostgres(require('pg'));
module.exports.test = async (event, context) => {
var ns = AWSXRay.getNamespace();
const segment = newAWSXRay.Segment('Notifications_push');
ns.enter(ns.createContext());
AWSXRay.setSegment(segment_push);
.... };
So, when in Lambda, the SDK creates a placeholder (facade) segment automatically. More in-depth explanation here: https://github.com/aws/aws-xray-sdk-node/issues/148
All you need is:
const AWSXRay = require('aws-xray-sdk-core');
//lets patch the AWS SDK
const Aws = AWSXRay.captureAWS(require('aws-sdk'));
module.exports.test = async (event, context) => {
//All capturing will work out of box
var sqs = new AWS.SQS({apiVersion: '2012-11-05'});
var params = {...}
//no need to add code, just regular SQS call
sqs.sendMessage(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data.MessageId);
}
});
//if you want to create subsegments manually simply do
const seg = AWSXRay.getSegment();
const subseg = seg.addSubsegment('mynewsubsegment');
subseg.close();
//no need to close the Lambda segment
};
Additional documentation here: https://docs.aws.amazon.com/lambda/latest/dg/nodejs-tracing.html
I am trying to send a POST form including (raw) files and these files are located in a google cloud storage bucket
This code runs in a firebase cloud function - Instead of downloading the storage file to a the cloud function instance and then uploading it via the form (which works), I would like to pass the form the Stream directly
async function test() {
const rp = require('request-promise');
const path = require('path');
const { Storage } = require('#google-cloud/storage');
const storage = new Storage();
const bucketName = 'xxx';
const bucket = storage.bucket(bucketName);
const fileAPath = path.join('aaa', 'bbb.jpg');
let formData = {
fileA: bucket.file(fileAPath).createReadStream(),
};
return rp({
uri: uri,
method: 'POST',
formData: formData,
});
}
The POST works as intended if we download the file first (to a temp file on the cloud functions instance) and then use fs.createReadStream(fileAPath_tmp)
The POST fails (i.e. the end point is not receiving the file in the same way, if at all), when using the code above (no temp download) using bucket.file(fileAPath).createReadStream()
Based on the docs for Google File Storage createReadStream, you need to use the read stream as if it is an event emitter to populate a buffer to return to the end user. You should be able to use the .pipe() method to pipe it directly to the HTTP response, similar to your existing source code.
remoteFile.createReadStream()
.on('error', function(err) {})
.on('response', function(response) {
// Server connected and responded with the specified status and headers.
})
.on('end', function() {
// The file is fully downloaded.
})
.pipe(.....));
I have a weird error using google-cloud/datastore in my nodejs app
const googleDatastoreFactory = () => {
console.log('inside the factory');
const Datastore = require('#google-cloud/datastore');
console.log('imported datastore stuff');
return new Datastore();
};
export default googleDatastoreFactory;
The above factory method hangs at this line
const Datastore = require('#google-cloud/datastore');
It literally just sits there (I know this because of my custom logging; console.log). There is no logging or error message from google-cloud/datastore.