How to convert incoming buffer data from serial port to nodejs - node.js

hello I'am using Arduino and node js
I sent and recive data but the data incoming from arduino like this :
<Buffer 00 00 00 e0 e0 e0 00 e0 e0 e0>
<Buffer e0 e0 e0 e0 00 e0 e0 00 e0 00 e0 e0 e0>
How can i decode this to UTF8
arduino
int incomingByte = 0;
void setup() {
Serial.begin(9600); // opens serial port, sets data rate to 9600 bps
}
void loop() {
if (Serial.available() > 0) {
incomingByte = Serial.read(); // read the incoming byte:
Serial.print(incomingByte);
}
}

You can use readable.setEncoding method from Node SerialPort class:
const SerialPort = require('serialport');
var port = new SerialPort("COM3").setEncoding('utf8');

In node.js you can use toString:
console.log(incomeBuffer.toString('utf8'))

incomingByte.toString()
encoding The character encoding to decode to. Default: 'utf8'
look at here

I used the Buffer class to do various conversions to get printable hex data.
const { SerialPort } = require('serialport')
const { Buffer } = require('buffer')
let buffer = []
port.on('data', function (data) {
buffer.push(Buffer.from(data, 'ascii'))
}})
// Button handler or something else
let buffer_ascii = ''
buffer.forEach(chunk => {
buffer_ascii += chunk.toString('hex')
})
console.log(buffer_ascii)

Related

How do I get iBeacon to Raspbberry PI working on iPhone?

I have a Raspberry PI with a Cambridge Scientific USB adapter on which I've un the following:
sudo hciconfig hci0 up
sudo hciconfig hci0 leadv 3
sudo hcitool -i hci0 cmd 0x08 0x0008 1E 02 01 06 1A FF 4C 00 02 15 C7 C1 A1 BF BB 00 4C AD 87 04 9F 2D 29 17 DE D2 00 00 00 00 C8 00
I have written a MacOS Cocoa App as follows:
import Cocoa
import CoreBluetooth
class ViewController: NSViewController {
override func viewDidLoad() {
super.viewDidLoad()
BeaconScanner(type: BeaconType.iBeacon).scanEverySecond()
}
}
enum BeaconType {
case iBeacon
}
struct Beacon {
var uuid: NSUUID!
var major: UInt16!
var minor: UInt16!
var power: Int8!
var peripheral: CBPeripheral!
var advertisementData: NSDictionary!
var RSSI: NSNumber!
init(peripheral: CBPeripheral, advertisementData: NSDictionary, RSSI: NSNumber) {
self.peripheral = peripheral
self.advertisementData = advertisementData
self.RSSI = RSSI
}
private func getBytes(data: Any?, from: Int, count: Int) -> [CUnsignedChar] {
var bytes = [CUnsignedChar](repeating: 0, count: count)
(data! as AnyObject).getBytes(&bytes, range: NSMakeRange(from, count))
return bytes
}
private func getBytesAsUshort(data: Any?, from: Int) -> ushort {
var bytes = ushort()
(data! as AnyObject).getBytes(&bytes, range: NSMakeRange(from, 2))
return NSSwapShort(bytes)
}
private func getBytesAsInt8(data: Any?, from: Int) -> Int8 {
var bytes = Int8()
(data! as AnyObject).getBytes(&bytes, range: NSMakeRange(from, 1))
return bytes
}
func checkAdvertisementDataForPossibleiBeaconData() {
let beaconStandard: [CUnsignedChar] = [0x4c, 0x00, 0x02, 0x15]
var data = advertisementData.value(forKey: "kCBAdvDataManufacturerData")
if(data != nil) {
data = data as! Data
var beaconStandardBytes = [CUnsignedChar](repeating: 0, count: 4)
(data! as AnyObject).getBytes(&beaconStandardBytes, range: NSMakeRange(0,4))
if((data! as AnyObject).length >= 25 && beaconStandardBytes.elementsEqual(beaconStandard)) {
let uuid = NSUUID.init(uuidBytes: getBytes(data: data, from: 4, count: 16)) as UUID
let major = getBytesAsUshort(data: data, from: 20)
let minor = getBytesAsUshort(data: data, from: 22)
let power = getBytesAsInt8(data: data, from: 24)
let distance = calculateBeaconDistance(Int(power), RSSI: Int(truncating: RSSI))
print("BeaconStandard=\(beaconStandard), UUID=\(uuid.uuidString), major=\(major), minor=\(minor), power=\(power), RSSI=\(String(describing: RSSI)), Distance=\(String(format: "%.2f", distance))m")
}
}
}
func calculateBeaconDistance(_ power: Int, RSSI: Int) -> Double {
let ratio = Double(RSSI) * 1.0 / Double(power)
return ratio < 1.0 ? pow(ratio, 10.0) : round(10*((0.89976) * pow(ratio, 7.7095) + 0.111)) / 10
}
}
class BeaconScanner: NSObject, CBCentralManagerDelegate {
var cbManager: CBCentralManager!
var timer: Timer!
var type: BeaconType!
init(type: BeaconType) {
self.type = type
}
func scanEverySecond() {
cbManager = CBCentralManager(delegate: self, queue: nil)
self.timer = Timer.scheduledTimer(timeInterval: TimeInterval(1), target: self, selector: #selector(scan), userInfo: nil, repeats: true)
}
#objc func scan() {
if(cbManager != nil && cbManager.state.rawValue==CBManagerState.poweredOn.rawValue) {
cbManager.scanForPeripherals(withServices: nil, options: nil)
}
}
func centralManager(_ central: CBCentralManager, didDiscover peripheral: CBPeripheral, advertisementData: [String : Any], rssi RSSI: NSNumber) {
Beacon(peripheral: peripheral, advertisementData: advertisementData as NSDictionary, RSSI: RSSI).checkAdvertisementDataForPossibleiBeaconData()
}
func centralManagerDidUpdateState(_ central: CBCentralManager) {
if(!(central.state.rawValue == CBManagerState.poweredOn.rawValue)) {
print("In order to user this tool, Bluetooth must be switched on\n")
exit(EXIT_FAILURE)
}
}
}
This seems to work just fine. I get the following:
BeaconStandard=[76, 0, 2, 21], UUID=C7C1A1BF-BB00-4CAD-8704-9F2D2917DED2, major=0, minor=0, power=-56, RSSI=Optional(-74), Distance=7.80m
BeaconStandard=[76, 0, 2, 21], UUID=C7C1A1BF-BB00-4CAD-8704-9F2D2917DED2, major=0, minor=0, power=-56, RSSI=Optional(-76), Distance=9.60m
I then built an iOS app simply copying the ViewController code replacing
import Cocoa
import CoreBluetooth
class ViewController: NSViewController {
override func viewDidLoad() {
super.viewDidLoad()
BeaconScanner(type: BeaconType.iBeacon).scanEverySecond()
}
}
with
import UIKit
import CoreBluetooth
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
BeaconScanner(type: BeaconType.iBeacon).scanEverySecond()
}
}
I ran the code for about 5 minutes on iOS but got no hits.
I added some debug code and I do see hits from other Bluetooth devices, but none from the PI.
MacOS is running 10.14.6. iOS is running 12.3.1 and XCode is running 10.3.
Is this a hardware limitation? I know the CSR Bluetooth is pretty cheap. Or do I have a problem in the code?

Properly parse MAC Address

When I use tools such as snmp-walk or snmp-get to query an OID with a return type of MacAddress, It'll always parse the data as a HexString and display it properly. Even when they don't have the MIBs loaded it'll still works.
bash#snmpwalk -v 2c -c public 10.1.2.3 1.3.6.1.4.1.14179.2.2.1.1
SNMPv2-SMI::enterprises.14179.2.2.1.1.1.16.189.24.206.212.64 = Hex-STRING: 10 BD 18 CE D4 40
SNMPv2-SMI::enterprises.14179.2.2.1.1.1.100.233.80.151.114.192 = Hex-STRING: 64 E9 50 97 72 C0
However, I can't seem to get the same result from Lextm.SharpSnmpLib (11.2.0). Data types of MacAddress don't get decoded correctly and it's a manual process to convert it to a proper MAC.
public void WalkTable()
{
const string baseOid = "1.3.6.1.4.1.14179.2.2.1.1"; //The entire table
const string community = "public";
var ep = new IPEndPoint(IPAddress.Parse("10.1.2.3"), 161);
var results = new List<Variable>();
Messenger.Walk(VersionCode.V2, ep, new OctetString(community), new ObjectIdentifier(baseOid), results, 60000, WalkMode.WithinSubtree);
foreach(var v in results)
Console.WriteLine(v.Data.ToString());
}
Am I doing something wrong or is this just how the library works?
You are outputting the MAC Address as ASCII instead of Hex. Here's a quick method I put together that will detect non-ascii characters and output as hex if any are found.
public void WalkTable()
{
const string baseOid = "1.3.6.1.4.1.14179.2.2.1.1"; //The entire table
const string community = "public";
var ep = new IPEndPoint(IPAddress.Parse("10.1.2.3"), 161);
var results = new List<Variable>();
Messenger.Walk(VersionCode.V2, ep, new OctetString(community), new ObjectIdentifier(baseOid), results, 60000, WalkMode.WithinSubtree);
foreach(var v in results)
//If the result is an OctetString, check for ascii, otherwise use ToString()
Console.WriteLine(v.Data.TypeCode.ToString()=="OctetString" ? DecodeOctetString(v.Data.ToBytes()) : v.Data.ToString())
}
}
public string DecodeOctetString(byte[] raw)
{
//First 2 bytes are the Type, so remove them
byte[] bytes = new byte[raw.Length - 2];
Array.Copy(raw, 2, bytes, 0, bytes.Length);
//Check if there are any non-ascii characters
bool ascii = true;
foreach (char c in Encoding.UTF8.GetString(bytes))
{
if (c >= 128)
{
ascii = false;
}
}
//If it's all ascii, return as ascii, else convert to hex
return ascii ? Encoding.ASCII.GetString(bytes) : BitConverter.ToString(bytes);
}

How to manage several Serial messages in processing

I am reading the UID of my RFID card and storing it in a variable called myUID.
After that I am authorizing to the card with the factory key and read block number 4 (which has been written to earlier) and store it in a string readBlock.
On the Arduino, I print out the variables onto the serial interface like so.
Serial.println(myUID);
Serial.println(readBlock);
On the client side, I use a Java program that reads in serial data. My program uses the Processing Library.
Serial mySerial;
PrintWriter output;
void setup() {
output = createWriter( "data.txt" );
mySerial = new Serial( this, Serial.list()[0], 9600 );
mySerial.bufferUntil('\n');
}
void draw(){
while (mySerial.available() > 0) {
String inBuffer = mySerial.readString();
if (inBuffer != null)
output.println(inBuffer);
}
}
void keyPressed() { // Press a key to save the data
output.flush(); // Write the remaining data
output.close(); // Finish the file
exit(); // Stop the program
}
Now my data.txt is expected to look like
xxx xxx xxx xxx (uid of card)
00 00 00 00 00 00 00 00 ... (read block from card)
but looks like
237 63 58 1
07
37 37 95
37
97 98 50 54 37 5
4 55 102 55 52
45 98
I have tried several things like readStringUntil('\n'); in the Processing Library but without success.
For everyone interested, I have fixed the problem myself with many hours of searching Google, so maybe this will help someone in the future:
I could fix it with this code:
import processing.serial.*;
int count = 0;
String input = "";
String fileName = dataPath("SET FILEPATH HERE");
Serial mySerial;
import java.io.*;
void setup() {
mySerial = new Serial(this, Serial.list()[0], 9600);
mySerial.bufferUntil('\n');
File f = new File(fileName);
if (f.exists()) {
f.delete();
}
}
void draw(){}
// listen to serial events happening
void serialEvent(Serial mySerial){
input = mySerial.readStringUntil('\n');
write(input, count);
count++;
}
// function for writing the data to the file
void write(String inputString, int counter) {
// should new data be appended or replace any old text in the file?
boolean append = false;
// just for my purpose, because I have got two lines of serial which need to get written to the file
//(Line 1: UID of card, Line 2: Read block of card)
if(counter < 2){
append = true;
}
else{
count = 0;
}
try {
File file = new File("D:/xampp/htdocs/pizza/src/rfid/data.txt");
if (!file.exists()) {
file.createNewFile();
}
FileWriter fw = new FileWriter(file, append);
BufferedWriter bw = new BufferedWriter(fw);
PrintWriter pw = new PrintWriter(bw);
pw.write(inputString + '\n');
pw.close();
}
catch(IOException ioe) {
System.out.println("Exception ");
ioe.printStackTrace();
}
}

Error 0x6f00 in Sign Method in javacard 2.2.1

I wrote this code and I debug it with jcIDE. I have a error 0x6f00 in line signature.sign('''). I sent apdu "00 80 00 00 04 01 02 03 04" for signing operation
.
My key is RSA -1024
RSAPrivateKey thePrivateKey = (RSAPrivateKey) KeyBuilder.buildKey(KeyBuilder.TYPE_RSA_PRIVATE, KeyBuilder.LENGTH_RSA_1024, NO_EXTERNAL_ACCESS);
RSAPublicKey thePublickKey = (RSAPublicKey) KeyBuilder.buildKey(KeyBuilder.TYPE_RSA_PUBLIC, KeyBuilder.LENGTH_RSA_1024, NO_EXTERNAL_ACCESS);
public void generatesignature(APDU apdu)
{
if(!Pin.isValidated())
ISOException.throwIt (ISO7816.SW_SECURITY_STATUS_NOT_SATISFIED);
byte[] buffer=apdu.getBuffer();
// data field of the command APDU
short numdata=(short) buffer[ISO7816.OFFSET_LC];
byte p1=(byte)buffer[ISO7816.OFFSET_P1];
thePrivateKey=(RSAPrivateKey)PrivateKeyArray[p1];
thePublickKey=(RSAPublicKey)PublicKeyArray[p1];
// receive data starting from the offset
// ISO.OFFSET_CDATA
short inputlength= (short) apdu.setIncomingAndReceive();
// it is an error if the number of data bytes
// read does not match the number in Lc byte
if (inputlength == 0)
ISOException.throwIt(ISO7816.SW_WRONG_LENGTH);
try
{
//convert input to hash
MessageDigest digest=MessageDigest.getInstance(MessageDigest.ALG_SHA,false );
short hashlength=digest.doFinal(buffer,ISO7816.OFFSET_CDATA,numdata,Input_Hash,(short)0);
Signature signature=Signature.getInstance(Signature.ALG_RSA_SHA_PKCS1,false);
signature.init(thePrivateKey,Signature.MODE_SIGN);
short hashlength=signature.sign(Input_Hash,(short)0,hashlength,Input_Sign, (short)0);
Util.arrayCopy(Input_Sign,(short)0, buffer, (short)0, signLength);
apdu.setOutgoingAndSend((short)0, ((short)signLength));
}
catch (CryptoException c) {
short reason = c.getReason();
ISOException.throwIt(reason); // for check
}
}
Can everybody help me?exactly in line short hashlength=signature.sign(Input_Hash,(short)0,hashlength,Input_Sign, (short)0);
I have problem. I use catch but code never go in catch.
my result in apdutool on real gemalto 2.2.1 card
byte[] buffer=apdu.getBuffer();
short numdata=(short) buffer[ISO7816.OFFSET_LC];
short inputlength= (short) apdu.setIncomingAndReceive();
if (inputlength == 0)
ISOException.throwIt(ISO7816.SW_WRONG_LENGTH);
//sign
Signature signature=Signature.getInstance(Signature.ALG_RSA_SHA_PKCS1,false);
signature.init(thePrivateKey,Signature.MODE_SIGN);
signLength=signature.sign(buffer,(short)(ISO7816.OFFSET_CDATA & 0xFF), inputlength, buffer, (short)(0));
apdu.setOutgoingAndSend((short)0,signLength);

How to convert Unicode[Marathi] string in to byte array?

I am trying to convert unicode[marathi] string in to byte array,but when I convert it and print, it shows " ? ? ? ? ? ? " like this.
using System;
using System.Text;
public class Example
{
public static void Main()
{
// Input string.
const string input = "पांडुरंग";
// Invoke GetBytes method.
// ... You can store this array as a field!
byte[] array = Encoding.ASCII.GetBytes(input);
// Loop through contents of the array.
foreach (byte element in array)
{
Console.WriteLine("{0} = {1}", element, (char)element);
}
Console.ReadLine();
}
}
.
Output:
63 = ?
63 = ?
63 = ?
63 = ?
63 = ?
63 = ?
63 = ?
63 = ?
if anyone have idea about this please help us.thanks in advance.
Try using Encoding.Unicode instead of Encoding.ASCII:
byte[] array = Encoding.Unicode.GetBytes(input);
Also, you are converting each byte to a separate char, whereas actually each character is represented by two bytes. The easiest way to get the chars from the byte array is to convert the bytes back to string using the same encoding, and calling GetCharArray() on the rsulting string:
foreach (byte element in Encoding.Unicode.GetString(array).ToCharArray()
{
Console.WriteLine("{0} = {1}", element, (char)element);
}

Resources