I am having such a problem when sending a request to the grpc server with vuejs, how can I do?
How can I create a nodejs grpc server and use vue js as a client to send a request and get a response.
MyServer Code here;
const grpc = require("grpc");
const loader = require("#grpc/proto-loader")
const packageDef = loader.loadSync("user.proto", {});
const object = grpc.loadPackageDefinition(packageDef);
const userPackage = object.userPackage;
const server = new grpc.Server()
server.addService(userPackage.User.service, {
"createUser": createUser,
})
const users = []
server.bind('0.0.0.0:5000', grpc.ServerCredentials.createInsecure())
console.log('Server running at http://127.0.0.1:5000')
function createUser(call, callback) {
const userItem = {
id: users.length + 1,
name: call.request.name,
lastname: call.request.lastname
}
users.push(userItem)
console.log('_', call)
console.log('cb', callback)
callback(null, userItem);
}
server.start()
My user.proto here;
syntax = "proto3";
package userPackage;
service User{
rpc createUser(UserItem) returns (UserItem); //unary
}
message UserItem{
int32 id = 1;
string name= 2;
string lastname= 3;
}
and My Client Code here;
<script>
import {UserClient} from './proto/proto-gen/user_grpc_web_pb'
import { UserItem} from './proto/proto-gen/user_pb'
created(){
this.client=new UserClient('http://0.0.0.0:5000',{})
const item=new UserItem()
item.setId(1)
item.setName('aaa')
item.setLastname('bbb')
this.client.createUser(item,{'Access-Control-Allow-Origin': '*'},(err,response)=>{
console.log(`error`, err)
console.log(`response`, response)
})
},
</script>
Error is here;
error {code: 2, message: "Http response at 400 or 500 level", metadata: {…}}
http://0.0.0.0:5000/userPackage.User/createUser net::ERR_ADDRESS_INVALID
Related
I have NodeJS express web server that serves files from AWS S3. Most of the time this exact code works correctly and serves files for a wide verity of applications with large numbers of requests in Production. The NodeJS web server is running across multiple nodes on a docker swarm server.
After about 2-3 weeks this stops working. There is no response from S3Client GetObjectCommand, there no error returned or anything. This starts working again only after restarting the NodeJS Docker container.
I read the S3 SDK docs that indicate a that the SDK will retry automatically.
Each AWS SDK implements automatic retry logic.
Questions:
How can we make this code more resilient and not need a restart?
Is the error handling correct? I'm wondering why there is no seemingly no response or error returned at all in this situation.
Is it necessary to configure the re-try settings?
NodeJS version: node:lts-alpine
Module: #aws-sdk/client-s3
Controllers
AWS Controller
const consoleLogger = require('../logger/logger.js').console;
const { S3Client, GetObjectCommand } = require('#aws-sdk/client-s3');
const config = {
"credentials": {
"accessKeyId": "example",
"secretAccessKey": "example"
},
"endpoint": "example",
"sslEnabled": true,
"forcePathStyle": true
}
const s3client = new S3Client(config);
const awsCtrl = {};
awsCtrl.getObject = async (key) => {
// Get object from Amazon S3 bucket
let data;
try {
// Data is returned as a ReadableStream
data = await s3client.send(new GetObjectCommand({ Bucket: "example", Key: key }));
console.log("Success", data);
} catch (e) {
consoleLogger.error("AWS S3 error: ", e);
const awsS3Error = {
name: e.name || null,
status: e.$metadata.httpStatusCode || 500
};
throw awsS3Error;
}
return data;
}
module.exports = awsCtrl;
Files Controller
const queryString = require('query-string');
const consoleLogger = require('../logger/logger.js').console;
const httpCtrl = require('./http.ctrl');
const jwtCtrl = require('./jwt.ctrl');
const awsCtrl = require('./aws.ctrl');
filesCtrl.deliverFile = async (req, res) => {
/* Get object from AWS S3 */
let fileObjectStream;
try {
fileObjectStream = await awsCtrl.getObject(filePath);
} catch (e) {
consoleLogger.error(`Unable to get object from AWS S3`, e);
if (e.status && e.status === 404) {
result.error = `Not found`;
result.status = 404;
return res.status(result.status).json(result);
}
return res.status(e.status || 500).json(result);
}
const filename = lookupResponse.data.filename;
// Set response header: Content-Disposition
res.attachment(filename);
// API response object stream download to client
return fileObjectStream.Body.pipe(res);
}
API
const express = require('express');
const router = express.Router();
const filesCtrl = require('../../controllers/files.ctrl');
const filesValidation = require('../validation/files');
router.get('/:fileId', [filesValidation.getFile], (req, res, next) => {
return filesCtrl.deliverFile(req, res);
});
I am getting the following errors when running my application in elastic beanstalk: [error] 3636#0: *295 upstream prematurely closed connection while reading response header from upstream and [error] 3636#0: *295 connect() failed (111: Connection refused) while connecting to upstream Its strange because if I hit those routes independently it works fine. It only appears to error when firing those routes from my vuex action.
The following is the log from the AWS elastic beanstalk.
The following is the network tab when it hits my FFmpeg route:
The following is the generate video action as fired from vuex.
async [GENERATE_VIDEO]({state, rootState, dispatch, commit}){
const username = rootState.user.currentUser.username;
const s3Id = rootState.templates.currentVideo.stock_s3_id;
const type = rootState.dataClay.fileFormat || state.type;
const vid = new Whammy.fromImageArray(state.captures, 30);
vid.lastModifiedDate = new Date();
vid.name = "canvasVideo.webm";
const data = new FormData();
const id = `${username}_${new Date().getTime()}`;
data.append("id", id);
data.append("upload", vid);
const projectId = await dispatch(INSERT_PROJECT);
await dispatch(UPLOAD_TEMP_FILE, data);
const key = await dispatch(CONVERT_FILE_TYPE, { id, username, type, projectId});
const role = rootState.user.currentUser.role;
state.file = `/api/files/${key}`;
let message;
if(role!='banner'){
message =`<p>Your video is ready.</p> Download`;
} else {
message = `<p>Your video is ready. You may download your file from your banner account</p>`;
const resolution = rootState.dataClay.matrix[0];
await dispatch(EXPORT_TO_BANNER, { s3Id, fileUrl: key, extension: `.${type}`, resolution});
}
And here are the api routes called in the actions.
async [UPLOAD_TEMP_FILE]({ commit }, data) {
try {
const response = await axios.post("/api/canvas-editor/upload-temp", data);
return response.data;
} catch (error) {
console.log(error);
}
},
async [CONVERT_FILE_TYPE]({commit}, data) {
try{
const response = await axios.post("/api/canvas-editor/ffmpeg", data);
return response.data;
} catch(error){
console.log(error);
}
}
}
As I said all my routes work and the application runs as expected on localhost however when uploaded to aws I receive unexpected errors.
After some digging I found out that I did not set the ffmpeg path.
Once this was done it worked great.
const ffmpeg = require('fluent-ffmpeg');
const ffmpegPath = require('#ffmpeg-installer/ffmpeg').path;
ffmpeg.setFfmpegPath(ffmpegPath);
module.exports = ffmpeg;
I have a simple Express/ gRPC project that's supposed to print a hard coded JSON object to the browser & console. The problem is that the object isn't returned to the client in time to receive it from the gRPC server.
My project is actually a modification of this Codelabs project. I've only implemented the listBooks method though, and have changed Go to Express. I was able to successfully get a response from the server in the Codelabs project.
At first I thought about avoiding callbacks, and trying promises (promises, promisfy...) instead. But I haven't had success. Also, from this answer, I now know that Node gRPC isn't implemented to have sync:
"Node gRPC does not have synchronous calls." - murgatroid99
With that said, the output is showing that the data isn't being received. What do I need to do to have the client wait for the data to be available to be received?
products.proto
syntax = "proto3";
package products;
service ProductService {
rpc ListProduct (Empty) returns (ProductList) {}
}
message Empty {}
message Product {
int32 id = 1;
string name = 2;
string price = 3;
}
message ProductList {
repeated Product products = 1;
}
server.js
var PROTO_PATH = __dirname + '/products.proto';
var grpc = require('grpc');
var protoLoader = require('#grpc/proto-loader');
var packageDefinition = protoLoader.loadSync(
PROTO_PATH,
{
keepCase: true,
longs: String,
enums: String,
defaults: true,
oneofs: true
});
var productsProto = grpc.loadPackageDefinition(packageDefinition).products;
var products = [{
id: 123,
name: 'apple',
price: '$2'
}];
function listProduct(call, callback){
console.log(products);
callback(null, products);
}
function main(){
var server = new grpc.Server();
server.addService(productsProto.ProductService.service,
{ListProduct: listProduct}
);
server.bind('0.0.0.0:50051', grpc.ServerCredentials.createInsecure());
console.log("server started");
server.start();
}
main();
client.js
var PROTO_PATH = __dirname + '/products.proto';
var grpc = require('grpc');
var protoLoader = require('#grpc/proto-loader');
var packageDefinition = protoLoader.loadSync(
PROTO_PATH,
{
keepCase: true,
longs: String,
enums: String,
defaults: true,
oneofs: true
});
var products = grpc.loadPackageDefinition(packageDefinition).products;
var client = new products.ProductService('localhost:50051', grpc.credentials.createInsecure());
function printResponse(error, response){
if(error){
console.log('Error: ', error);
}
else{
console.log(response);
}
}
function listProducts() {
client.listProduct({}, function(error, products){
printResponse(error, products);
});
}
exports.listProducts = listProducts;
client-server.js
const express = require('express');
const app = express();
var client = require('./client');
app.get('/', (req, res) => {
console.log('Hello World!');
client.listProducts();
res.send('Hello World!');
});
app.listen(3000, () =>
console.log('Example app listening on port 3000!'),
);
Actual Result
The gRPC server obviously already has the object, but I print it to console just for fun. The client prints its callback result to the console, but is only printing an empty object.
server.js Output
[ { id: 123, name: 'apple', price: '$2' } ]
client.js Output
Hello World!
{ products: [] }
To Reproduce
Open 2 terminals, cd to project
Run node server.js in one terminal
Run node client-server.js in the other terminal
Open a browser to localhost:3000
The problem here has nothing to do with synchronous vs asynchronous actions.
The object you are trying to send from your server request handler does not match the response message type you declared in your products.proto file. The response message type ProductList is a message with a single field products that is a repeated list of ProductObjects. So to match that the object you send should be an object with a products field that contains an array of objects structured like ProductObject messages. The code you have is almost there. You have the array, so your server handler code should look like this:
function listProduct(call, callback){
console.log(products);
callback(null, {products: products});
}
The output you already have hints towards this. The object your client receives is {products: []}, which is the structure of the objects your server should be sending.
Websocket server not connecting after deployment on heroku:
I already went through the documentation and a heroku tutorial.
I am able to run the application locally but running into problems on heroku deployment. The UI part is working, but doesn't seem to be connecting to the back end. Procfile as been updated.
The client connection url would need to be changed to a localhost:8989 to work locally. When I hard coded the url in the client before deployment, the error in console is the same as when I don't run the websocket server running with npm start locally.
https://github.com/kbventures/chatTest
Front end:
import * as types from '../constants/ActionTypes';
import { addUser, messageReceived, populateUsersList } from '../actions';
const setupSocket = (dispatch, username) => {
const socket = new WebSocket('wss://boiling-brook-76849.herokuapp.com');
//const HOST = location.origin.replace(/^http/, 'ws');
//const socket = new WebSocket(HOST);
socket.onopen = () => {
socket.send(
JSON.stringify({
type: types.ADD_USER,
name: username
})
);
};
socket.onmessage = event => {
const data = JSON.parse(event.data);
switch (data.type) {
case types.ADD_MESSAGE:
dispatch(messageReceived(data.message, data.author));
break;
case types.ADD_USER:
dispatch(addUser(data.name));
break;
case types.USERS_LIST:
dispatch(populateUsersList(data.users));
break;
default:
break;
}
};
return socket;
};
export default setupSocket;
Back end:
const WebSocket = require('ws');
const port = process.env.PORT || 8989;
const wss = new WebSocket.Server({ port });
const users = [];
const broadcast = (data, ws) => {
wss.clients.forEach(client => {
if (client.readyState === WebSocket.OPEN && client !== ws) {
client.send(JSON.stringify(data));
}
});
};
wss.on('connection', ws => {
let index;
ws.on('message', message => {
const data = JSON.parse(message);
switch (data.type) {
case 'ADD_USER': {
index = users.length;
users.push({ name: data.name, id: index + 1 });
ws.send(
JSON.stringify({
type: 'USERS_LIST',
users
})
);
broadcast(
{
type: 'USERS_LIST',
users
},
ws
);
break;
}
case 'ADD_MESSAGE':
broadcast(
{
type: 'ADD_MESSAGE',
message: data.message,
author: data.author
},
ws
);
break;
default:
break;
}
});
ws.on('close', () => {
users.splice(index, 1);
broadcast(
{
type: 'USERS_LIST',
users
},
ws
);
});
});
Locally the user list would populate, update and there would no errors in the browser console.
In order to reproduce error go to :
https://boiling-brook-76849.herokuapp.com/
Type a message and then check the console for the following error:
index.js:2178 uncaught at handleNewMessage at handleNewMessage
at takeEvery
at
Error: Failed to execute 'send' on 'WebSocket': Still in CONNECTING state
...
index.js:5 WebSocket connection to 'wss://boiling-brook-76849.herokuapp.com/' failed: Error during WebSocket handshake: Unexpected response code: 503
I am using the npm package react-native-fetch-blob.
I have followed all the steps from the git repository to use the package.
I then imported the package using the following line:
var RNFetchBlob = require('react-native-fetch-blob');
I am trying to request a BLOB containing an image from the a server.
This is my main method.
fetchAttachment: function(attachment_uri) {
var authToken = 'youWillNeverGetThis!'
var deviceId = '123';
var xAuthToken = deviceId+'#'+authToken
//Authorization : 'Bearer access-token...',
// send http request in a new thread (using native code)
RNFetchBlob.fetch('GET', config.apiRoot+'/app/'+attachment_uri, {
'Origin': 'http://10.0.1.23:8081',
'X-AuthToken': xAuthToken
})
// when response status code is 200
.then((res) => {
// the conversion is done in native code
let base64Str = res.base64()
// the following conversions are done in js, it's SYNC
let text = res.text()
let json = res.json()
})
// Status code is not 200
.catch((errorMessage, statusCode) => {
// error handling
});
}
I keep receiving the following error:
"Possible Unhandled Promise Refection(id: 0): TypeError: RNFetchBlob.fetch is not a function".
Any ideas?
The issue is you are using ES5 style require statements with a library written against ES6/ES2015. You have two options:
ES5:
var RNFetchBlob = require('react-native-fetch-blob').default
ES6:
import RNFetchBlob from 'react-native-fetch-blob'
My import looks like this : import RNFetchBlob from 'rn-fetch-blob';
but I'v got an error : TypeError: RNFetchBlob.scanFile is not a function
My code:
const downloadAudio = async () => {
const { config, fs } = RNFetchBlob;
const meditationFilesPath =
Platform.OS == 'android'
? `${fs.dirs.DownloadDir}/meditations/${id}`
: `${fs.dirs.DocumentDir}/meditations/${id}`;
let audio_URL = track;
let options = {
fileCache: true,
path: meditationFilesPath + `/${id}.mp3`,
addAndroidDownloads: {
// Related to the Android only
useDownloadManager: true,
notification: true,
path: meditationFilesPath + `/${id}.mp3`,
description: 'Audio',
},
};
try {
const resAudio = await config(options).fetch('GET', audio_URL.uri);
if (resAudio) {
const audio = await RNFetchBlob.fs.scanFile([
{ path: resAudio.path(), mime: 'audio/mpeg' },
]);
console.log('res -> ', audio);
Alert.alert('Audio Downloaded Successfully.');
}
} catch (error) {
console.error('error from downloadAudio', error);
}
};