I am trying to use node.js thrift client. I am getting error on server side
TSimpleServer exception: N6apache6thrift8protocol18TProtocolExceptionE: TProtocolException: Invalid data
How to fix this issue?
My sample .thrift file is:
struct Person{
1: required string name_;
2: required map<i64,string> attribute1_;
3: required map<i64,i64> attribute2_;
4: required map<i64,string> attribute3_;
}
service ProcessPerson {
void DoPerson(
1: required list<Person> person_array
)
}
node.js client code is:
var thrift = require('thrift');
var ttransport = require('./node_modules/thrift/lib/thrift/transport.js');
var tprotocol = require('./node_modules/thrift/lib/thrift/protocol.js');
var b_conn = thrift.createConnection('localhost', 9090, {transport: ttransport.TBufferedTransport ,protocol: tprotocol.TBinaryProtocol});
var ServicePerson = require('./person_js/ProcessPerson.js');
var type = require('./person_js/person_types');
b_conn.on('error', function(err) {
console.error("error");
console.error(err);
});
b_conn.on('connect', function(data) {
console.log('on conect');
var client = thrift.createClient(ServicePerson, b_conn);
var person_list = new Array();
var person_obj = new type.Person({name_:"aa", attribute1_:"",attribute2_:"",attribute3_: "" });
console.log(person_obj);
person_list.push(person_obj);
client.DoPerson(person_list, function() {
console.log("Hi");
});
});
I am using skeleton file at server side.
I have seen this problem before.
The cause is that the struct message that received is not valid because it doesn't set the field which is "required" in thrift!
Please be sure that the client set all the required fields.
PS: I don't know how to check the node js code. I write code in C++ which I can check with __isset mechanism
Related
well as i mentioned in the title when i'm sending message through socket to the server then save it in database mongodb with mongoose, then i returned the the message and send it back in the socket,
now when i tried to print it on console in the server right before i send it to the client with socket,
i got the object i wanted to send, but when i checked the object i got in the client, then i got diffrent object seems related to mongo probably(pretty sure) and i'm not sure what should i do to fix it.this is what i get in the server right before i send it back to the client
this what i get in the client when i recieve new message
const addMessage = async (newMessage) => {
try {
if (newMessage.type === 2) {
const audioBlob = new Buffer.from(newMessage.message).toString("base64");
newMessage.message = new Binary(audioBlob, Binary.SUBTYPE_BYTE_ARRAY);
}
const newMsg = new Message(newMessage);
await newMsg.save();
newMsg.message = Buffer.from(newMsg.message.buffer, "base64")
return newMsg;
} catch (error) {
console.log(error);
errorHandler(error);
}
};
i expected to get the same object i see in the server so in the client too
you should always, provide code snippets while asking a question.
Th probable reason for the above problem is, you are printing the console.log(result._docs) and sending to the databse the complete result object. Try sending result._docs to your database as well.
P.S: result is just a variable to which your assigning the data. This variable may be different in your code.
(If this does not work, edit your question and attach code)
well I found the problem, though i'm not sure how to describe it but the problem was here:
const newMsg = new Message(newMessage);
await newMsg.save();
newMsg.type === 2
? Buffer.from(newMsg.message.buffer, "base64")
: newMsg.message;
return newMsg;
so i took the newMessage and inserted newMsg.toJSON() to it;
if (newMessage.type === 2) {
const audioBlob = new Buffer.from(newMessage.message).toString("base64");
newMessage.message = new Binary(audioBlob, Binary.SUBTYPE_BYTE_ARRAY);
}
const newMsg = new Message(newMessage);
await newMsg.save();
newMessage = newMsg.toJSON();
if (newMessage.type === 2) {
newMessage.message = Buffer.from(newMessage.message.buffer, "base64");
}
return newMessage;
and now it's working!
Here is my code snippet
var sendgrid = require('sendgrid')('xxxxxx', 'xxxxxx');
var email = new sendgrid.Email();
email.addTo('xyz#gmail.com');
email.setFrom('xyz#gmail.com');
email.setSubject('welcome to send grid');
email.setHtml('<html><body>HELLO evryone ...,</body></html>');
sendgrid.send(email, function(err, json) {
if(!err)
{
console.log("mail sent successssss");
res.send({"status":0,"msg":"failure","result":"Mail sent successfully"});
}
else
{
console.log("error while sending mail")
res.send({"status":1,"msg":"failure","result":"Error while sending mail."});
}
});
Installed sendgrid throgh npm also.am getting "TypeError: object is not a function" error.MAy i know why.??
Version:--
sendgrid#3.0.8 node_modules\sendgrid
└── sendgrid-rest#2.2.1
It looks like you're using sendgrid#3.0.8 but trying to call on the sendgrid#2.* api.
v2 implementation: https://sendgrid.com/docs/Integrate/Code_Examples/v2_Mail/nodejs.html
v3 implementation:
https://sendgrid.com/docs/Integrate/Code_Examples/v3_Mail/nodejs.html
Give the v3 a go.
As for the type error:
v2
var sendgrid = require("sendgrid")("SENDGRID_APIKEY");
you're invoking a function
however you have v3 installed
require('sendgrid').SendGrid(process.env.SENDGRID_API_KEY)
and it's now an object
REQUESTED UPDATE:
I don't know too much about the keys given, but since they have tons of different supported libraries, it's completely possible that some of them use both while others use only one. If you really only have a USER_API_KEY nad PASSWORD_API_KEY, just use the user_api_key
Here is their source for the nodejs implementation module SendGrid:
function SendGrid (apiKey, host, globalHeaders) {
var Client = require('sendgrid-rest').Client
var globalRequest = JSON.parse(JSON.stringify(require('sendgrid-rest').emptyRequest));
globalRequest.host = host || "api.sendgrid.com";
globalRequest.headers['Authorization'] = 'Bearer '.concat(apiKey)
globalRequest.headers['User-Agent'] = 'sendgrid/' + package_json.version + ';nodejs'
globalRequest.headers['Accept'] = 'application/json'
if (globalHeaders) {
for (var obj in globalHeaders) {
for (var key in globalHeaders[obj] ) {
globalRequest.headers[key] = globalHeaders[obj][key]
}
}
}
The apiKey is attached to the header as an auth, and it looks like that's all you need.
Try following their install steps, without your own implementation,
1) (OPTIONAL) Update the development environment with your SENDGRID_API_KEY, for example:
echo "export SENDGRID_API_KEY='YOUR_API_KEY'" > sendgrid.env
echo "sendgrid.env" >> .gitignore
source ./sendgrid.env
========
2) Make this class and if you did the above use process.env.SENDGRID_API_KEY else put your USER_API_KEY
var helper = require('sendgrid').mail
from_email = new helper.Email("test#example.com")
to_email = new helper.Email("test#example.com")
subject = "Hello World from the SendGrid Node.js Library!"
content = new helper.Content("text/plain", "Hello, Email!")
mail = new helper.Mail(from_email, subject, to_email, content)
//process.env.SENDGRID_API_KEY if above is done
//else just use USER_API_KEY as is
var sg = require('sendgrid').SendGrid(process.env.SENDGRID_API_KEY)
var requestBody = mail.toJSON()
var request = sg.emptyRequest()
request.method = 'POST'
request.path = '/v3/mail/send'
request.body = requestBody
sg.API(request, function (response) {
console.log(response.statusCode)
console.log(response.body)
console.log(response.headers)
})
I'm building a simple, STARTTLS capable POP3 Proxy in Node.JS and I'm having quite a hard time.
The proxy serves as a front-end for many back-end servers, so it must load their certificates dynamically, depending on the Client's connection.
I'm trying to use the SNICallback, which brings me the servername the client uses, but I can't set the right certificate after this, because I need one certificate before I have this call, when I create the secure context.
The code is as bellow:
// Load libraries
var net = require('net');
var tls = require('tls');
var fs = require('fs');
// Load certificates (created with openssl)
var certs = [];
for (var i = 1; i <= 8; i++) {
var hostName = 'localhost' + i;
certs[hostName] = {
key : fs.readFileSync('./private-key.pem'),
cert : fs.readFileSync('./public-cert' + i + '.pem'),
}
}
var server = net.createServer(function(socket) {
socket.write('+OK localhost POP3 Proxy Ready\r\n');
socket.on('data', function(data) {
if (data == "STLS\r\n") {
socket.write("+OK begin TLS negotiation\r\n");
upgradeSocket(socket);
} else if (data == "QUIT\r\n") {
socket.write("+OK Logging out.\r\n");
socket.end();
} else {
socket.write("-ERR unknown command.\r\n");
}
});
}).listen(10110);
and upgradeSocket() is as follows:
function upgradeSocket(socket) {
// I need this 'details' or handshake will fail with a message:
// SSL routines:ssl3_get_client_hello:no shared cipher
var details = {
key : fs.readFileSync('./private-key.pem'),
cert : fs.readFileSync('./public-cert1.pem'),
}
var options = {
isServer : true,
server : server,
SNICallback : function(serverName) {
return tls.createSecureContext(certs[serverName]);
},
}
sslcontext = tls.createSecureContext(details);
pair = tls.createSecurePair(sslcontext, true, false, false, options);
pair.encrypted.pipe(socket);
socket.pipe(pair.encrypted);
pair.fd = socket.fd;
pair.on("secure", function() {
console.log("TLS connection secured");
});
}
It handshakes correctly but the certificate I use is the static one in 'details', not the one I get in the SNICallback.
To test it I'm running the server and using gnutls-cli as a Client:
~$ gnutls-cli -V -s -p 10110 --crlf --insecure -d 5 localhost3
STLS
^D (Control+D)
The above command is supposed to get me the 'localhost3' certificate but it's getting the 'localhost1' because it's defined in 'details' var;
There are just too many examples throughout the internet with HTTPS or for TLS Clients, which it's a lot different from what I have here, and even for Servers as well but they're not using SNI. Any help will be appreciated.
Thanks in advance.
The answer is quite simple using tls.TLSSocket, though there is a gotcha with the listeners.
You have to remove all the listeners from the regular net.Socket you have, instantiate a new tls.TLSSocket using your net.Socket and put the listeners back on the tls.TLSSocket.
To achieve this easily, use a wrapper like Haraka's tls_socket pluggableStream over the regular net.Socket and replace the "upgrade"
function to something like:
pluggableStream.prototype.upgrade = function(options) {
var self = this;
var socket = self;
var netSocket = self.targetsocket;
socket.clean();
var secureContext = tls.createSecureContext(options)
var tlsSocket = new tls.TLSSocket(netSocket, {
// ...
secureContext : secureContext,
SNICallback : options.SNICallback
// ...
});
self.attach(tlsSocket);
}
and your options object would have the SNICallback defined as:
var options {
// ...
SNICallback : function(serverName, callback){
callback(null, tls.createSecureContext(getCertificateFor(serverName));
// ...
}
}
i wrote a simple chat room demo with grpc lib, but only one client can connect to the server at one time, weird. Here is my code, can anyone give my some advise on it?
full code Link from Github
client:
const chat_proto = grpc.load(__dirname + '/chat.proto').demo;
const client = new chat_proto.Chat('localhost:50051');
server:
let chatServer = getService();
chatServer.bind('localhost:50051');
chatServer.listen();
chat.proto:
syntax = "proto3";
package demo;
service Chat {
rpc sayHi(userName) returns (welMessage) {}
rpc ChatWith(stream chatNote) returns (stream chatNote) {}
}
message userName {
string name = 1;
}
message welMessage {
string message = 1;
}
message chatNote {
string message = 1;
}
Error:
client return Error at /Users/chopper/WebstormProjects/grpcexamples/node_modules/grpc/src/client.js:461:23 which means response.status.code !== grpc.status.OK
server return Segmentation fault: 11
I have a nodejs (node v0.10.26) MQTT code which publishes strings to a topic on a remote MQTT server. Now I am trying to publish a byte array and a buffer to the same topic, but I get an error
The code :
var ProtoBuf = require("protobufjs");
var mqtt = require('mqtt');
var builder = ProtoBuf.loadProtoFile('event.proto'),
as2 = builder.build('as2'),
Message=as2.Message;
var message = new Message({
"message_type": "EMOTICON"
});
console.log("message : "+message);
var buffer_message= message.encode();
client = mqtt.createClient(1883,'hostNameOfMQTT',{ encoding: 'binary' });
client.publish('NewTopic',buffer_message);
client.on('connect', function() {
console.log("Message published : "+buffer_message);
client.end();
});
and I get this error when I execute
node.exe sampleMqtt.js
C:\node>node.exe SimpleMQTT.js
message : .as2.Message
C:\node\node_modules\mqtt\lib\generate.js:178
length += Buffer.byteLength(payload);
^
TypeError: Argument must be a string
at Object.module.exports.publish (C:\node\node_modules\mqtt\lib\
generate.js:178:22)
at Connection.eval [as publish] (eval at <anonymous> (C:\node\no
de_modules\mqtt\lib\connection.js:58:29), <anonymous>:2:26)
at MqttClient._sendPacket (C:\node\node_modules\mqtt\lib\client.
js:430:20)
at MqttClient.<anonymous> (C:\node\node_modules\mqtt\lib\client.
js:105:12)
at MqttClient.EventEmitter.emit (events.js:117:20)
at MqttClient._handleConnack (C:\node\node_modules\mqtt\lib\clie
nt.js:498:10)
at Connection.<anonymous> (C:\node\node_modules\mqtt\lib\client.
js:191:10)
at Connection.EventEmitter.emit (events.js:95:17)
at Connection._write (C:\node\node_modules\mqtt\lib\connection.j
s:187:12)
at doWrite (_stream_writable.js:226:10)
However When I try to publish buffer_message.tostring(), I get the object details as a string but not the actual ArrayBuffer??
client.publish('AnkitTopic',buffer_message.toString());
Output with toString() -> ByteBuffer(offser=0,markedOffSet=-1,length=2,capacity=16)
I dont want a to string, I want the actual byteBuffer to be transmitted via MQTT.
Someone, Please suggest how can I do this!! Im guessing its not too hard to get, It's possible in java, then why not in nodeJS?
Can you try to send another kind of binary content like a file to see that it works?
Latest MQTT.js versions supports binary payloads https://github.com/adamvr/MQTT.js/issues/109
I found the solution to this problem and hope it helps other :
We can attach a header with every payload which we send to MQ, encode it and the final payload should look something similar (this is how we convert it into byte buffer) :
var headerLengthBuffer = new ByteBuffer();
var headerBuffer = header.encode();
var messageLengthBuffer = new ByteBuffer();
var messageBuffer = event1.encode();
headerLengthBuffer.writeVarint32(headerBuffer.length);
messageLengthBuffer.writeVarint32(messageBuffer.length);
//Create the payload object
var payload = headerLengthBuffer.toBuffer() + headerBuffer.toBuffer()+ messageLengthBuffer.toBuffer() + messageBuffer.toBuffer() ;
return payload;
The above payload created can be published easily and is accepted by MQ as well.
the protobufjs package supports native node.js Buffers.
According to protobufjs/message API you just have to call message.toBuffer().
Here is a working example:
var ProtoBuf = require("protobufjs");
var mqtt = require('mqtt');
var builder = ProtoBuf.loadProtoFile('company.proto'),
Company = builder.build('Company'),
Employee = Company.Employee;
var employee = new Employee("James Bond", 007);
var buffer_message = employee.toBuffer();
var client = mqtt.connect('mqtt://test.mosquitto.org');
client.on('connect', function() {
client.publish('MyCompany', buffer_message);
client.end();
});
Using the following company.proto file:
package Company;
message Employee{
optional string name = 1;
optional uint32 id = 2;
}