crossbar authentication with proxy - node.js

We are reaching a point that we need to scale crossbar to use multiple cpu cores to improve performance. Because one crossbar worker can only run one realm, the only to scale is to use proxy workers.
I tried to add proxy worker to our crossbar configuration but can't get it working.
The crossbar documentation is very poor. I can only try to figure out how to configure it from the examples the provided here: https://github.com/crossbario/crossbar-examples/tree/master/proxy/.crossbar
So far, I've made my apps successfully registered with crossbar without updating anything on the app side. But when accessing the app from the browser, there is also a direct websocket call from the browser, which is not working. Inspecting on the browser shows error explicit role <None> on realm <realm1> configured for dynamic authenticator does not exist. I think it's probably related to the dynamic authentication.
Here's my original configuration with one worker and my updated configuration with one worker and one proxy.
original config with one worker:
{
"controller": {},
"version": 2,
"workers": [
{
"realms": [
{
"name": "realm1",
"roles": [
{
"name": "backend",
"permissions": [
{
"allow": {
"call": true,
"publish": true,
"register": true,
"subscribe": true
},
"cache": false,
"disclose": {
"caller": true,
"publisher": true
},
"match": "prefix",
"uri": ""
}
]
},
{
"name": "authenticated",
"permissions": [
{
"allow": {
"call": false,
"publish": false,
"register": false,
"subscribe": true
},
"cache": false,
"disclose": {
"caller": true,
"publisher": true
},
"match": "prefix",
"uri": "organisation.file.provision"
},
{
"allow": {
"call": false,
"publish": false,
"register": false,
"subscribe": true
},
"cache": false,
"disclose": {
"caller": true,
"publisher": true
},
"match": "prefix",
"uri": "organisation.notifications.updates"
},
{
"allow": {
"call": true,
"publish": false,
"register": false,
"subscribe": false
},
"cache": false,
"disclose": {
"caller": true,
"publisher": true
},
"match": "prefix",
"uri": "organisation.model"
},
{
"allow": {
"call": true,
"publish": true,
"register": false,
"subscribe": true
},
"cache": false,
"disclose": {
"caller": true,
"publisher": true
},
"match": "prefix",
"uri": "public."
}
]
},
{
"name": "anonymous",
"permissions": [
{
"allow": {
"call": true,
"publish": false,
"register": false,
"subscribe": false
},
"cache": true,
"disclose": {
"caller": true,
"publisher": true
},
"match": "exact",
"uri": "organisation.app.meta"
},
{
"allow": {
"call": true,
"publish": false,
"register": false,
"subscribe": false
},
"cache": false,
"disclose": {
"caller": true,
"publisher": true
},
"match": "prefix",
"uri": "anon."
}
]
}
]
}
],
"transports": [
{
"endpoint": {
"port": 8080,
"type": "tcp"
},
"options": {
"access_log": true,
"hsts": true
},
"paths": {
"/": {
"directory": "../web",
"type": "static"
},
"ws": {
"auth": {
"anonymous": {
"role": "anonymous",
"type": "static"
},
"ticket": {
"authenticator": "organisation.auth.authenticate",
"type": "dynamic"
},
"wampcra": {
"type": "static",
"users": {
"1": {
"role": "backend",
"secret": "********"
},
"monitor": {
"role": "devops",
"secret": "******"
}
}
}
},
"debug": false,
"options": {
"auto_fragment_size": 65536,
"auto_ping_interval": 10000,
"auto_ping_size": 4,
"auto_ping_timeout": 5000,
"close_handshake_timeout": 1000,
"echo_close_codereason": true,
"enable_webstatus": false,
"fail_by_drop": true,
"max_frame_size": 1048576,
"max_message_size": 1048576,
"open_handshake_timeout": 2500
},
"type": "websocket"
}
},
"type": "web"
}
],
"type": "router"
}
]
}
one worker and one proxy:
{
"controller": {},
"version": 2,
"workers": [
{
"realms": [
{
"name": "realm1",
"roles": [
{
"name": "backend",
"permissions": [
{
"allow": {
"call": true,
"publish": true,
"register": true,
"subscribe": true
},
"cache": true,
"disclose": {
"caller": true,
"publisher": true
},
"match": "prefix",
"uri": ""
}
]
},
{
"name": "authenticated",
"permissions": [
{
"allow": {
"call": false,
"publish": false,
"register": false,
"subscribe": true
},
"cache": false,
"disclose": {
"caller": true,
"publisher": true
},
"match": "prefix",
"uri": "organisation.file.provision"
},
{
"allow": {
"call": false,
"publish": false,
"register": false,
"subscribe": true
},
"cache": false,
"disclose": {
"caller": true,
"publisher": true
},
"match": "prefix",
"uri": "organisation.notifications.updates"
},
{
"allow": {
"call": true,
"publish": false,
"register": false,
"subscribe": false
},
"cache": false,
"disclose": {
"caller": true,
"publisher": true
},
"match": "prefix",
"uri": "organisation.model"
},
{
"allow": {
"call": true,
"publish": true,
"register": false,
"subscribe": true
},
"cache": false,
"disclose": {
"caller": true,
"publisher": true
},
"match": "prefix",
"uri": "public."
}
]
},
{
"name": "anonymous",
"permissions": [
{
"allow": {
"call": true,
"publish": false,
"register": false,
"subscribe": false
},
"cache": true,
"disclose": {
"caller": true,
"publisher": true
},
"match": "exact",
"uri": "organisation.app.meta"
},
{
"allow": {
"call": true,
"publish": false,
"register": false,
"subscribe": false
},
"cache": false,
"disclose": {
"caller": true,
"publisher": true
},
"match": "prefix",
"uri": "anon."
}
]
}
]
}
],
"transports": [
{
"serializer": "cbor",
"type": "rawsocket"
"endpoint": {
"type": "unix",
"path": "router.sock"
},
"options": {
"access_log": true,
"hsts": true
},
"auth": {
"cryptosign-proxy": {
"type": "dynamic",
"authenticator": "organisation.auth.authenticate"
},
"options": {
"max_message_size": 1048576
},
}
],
"type": "router"
},
{
"type": "proxy",
"connections": {
"conn1": {
"transport": {
"type": "rawsocket",
"endpoint": {
"type": "unix",
"path": "router.sock"
},
"url": "ws://localhost",
"serializer": "cbor"
}
},
"auth": {
"cryptosign-proxy": {
"type": "static"
}
}
},
"routes": {
"realm1": {
"anonymous": "conn1",
"backend": "conn1",
"authenticated": "conn1"
}
},
"transports": [
{
"type": "web",
"endpoint": {
"port": 8080,
"type": "tcp",
"shared": true,
"backlog": 1024
},
"options": {
"access_log": true,
"hsts": true
},
"paths": {
"/": {
"directory": "../web",
"type": "static"
},
"ws": {
"auth": {
"anonymous": {
"role": "anonymous",
"type": "static"
},
"ticket": {
"authenticator": "organisation.auth.authenticate",
"type": "dynamic"
},
"wampcra": {
"type": "static",
"users": {
"1": {
"role": "backend",
"secret": "*******n"
},
"monitor": {
"role": "devops",
"secret": "*****"
}
}
}
},
"debug": false,
"options": {
"auto_fragment_size": 65536,
"auto_ping_interval": 10000,
"auto_ping_size": 13,
"auto_ping_timeout": 5000,
"close_handshake_timeout": 1000,
"echo_close_codereason": true,
"enable_webstatus": false,
"fail_by_drop": true,
"max_frame_size": 1048576,
"max_message_size": 1048576,
"open_handshake_timeout": 2500
},
"type": "websocket"
},
"info": {
"type": "nodeinfo"
}
},
}
]
}
]
}

Related

Azure ForEach activity failing with: The function 'length' expects its parameter to be an array or a string. The provided value is of type 'Integer'

How do I convert my value to an integer?
Here's added context if helpful:
My pipeline should get the column count of a blob CSV and pass that count to a ForEach activity. A switch activity is embedded in ForEach, but the pipeline is failing at ForEach with this error: 'The function 'length' expects its parameter to be an array or a string. The provided value is of type 'Integer'.
Metadata output:
{
"columnCount": 52,
"effectiveIntegrationRuntime": "AutoResolveIntegrationRuntime (Central US)",
"executionDuration": 1,
"durationInQueue": {
"integrationRuntimeQueue": 0
},
"billingReference": {
"activityType": "PipelineActivity",
"billableDuration": [
{
"meterType": "AzureIR",
"duration": 0.016666666666666666,
"unit": "Hours"
}
]
}
}
ForEach input:
{
"items": "#activity('Get Metadata1').output.columnCount",
"activities": [
{
"name": "Switch1",
"type": "Switch",
"dependsOn": [],
"userProperties": [],
"typeProperties": {
"on": "#item()",
"cases": [
{
"value": "44",
"activities": [
{
"name": "Copy data1_copy1",
"type": "Copy",
"dependsOn": [],
"policy": {
"timeout": "0.12:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
},
"userProperties": [],
"typeProperties": {
"source": {
"type": "DelimitedTextSource",
"storeSettings": {
"type": "AzureBlobStorageReadSettings",
"recursive": false,
"enablePartitionDiscovery": false
},
"formatSettings": {
"type": "DelimitedTextReadSettings"
}
},
"sink": {
"type": "AzureSqlSink",
"writeBehavior": "insert",
"sqlWriterUseTableLock": false
},
"enableStaging": false,
"translator": {
"type": "TabularTranslator",
"typeConversion": true,
"typeConversionSettings": {
"allowDataTruncation": true,
"treatBooleanAsNumber": false
}
}
},
"inputs": [
{
"referenceName": "ten_eighty_split_CSV",
"type": "DatasetReference",
"parameters": {
"FileName": "#pipeline().parameters.SourceFile"
}
}
],
"outputs": [
{
"referenceName": "ten_eighty_split_10_15_SQL",
"type": "DatasetReference",
"parameters": {}
}
]
}
]
},
{
"value": "52",
"activities": [
{
"name": "Copy data2_copy1",
"type": "Copy",
"dependsOn": [],
"policy": {
"timeout": "0.12:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
},
"userProperties": [],
"typeProperties": {
"source": {
"type": "DelimitedTextSource",
"storeSettings": {
"type": "AzureBlobStorageReadSettings",
"recursive": false,
"enablePartitionDiscovery": false
},
"formatSettings": {
"type": "DelimitedTextReadSettings"
}
},
"sink": {
"type": "AzureSqlSink",
"writeBehavior": "insert",
"sqlWriterUseTableLock": false
},
"enableStaging": false,
"translator": {
"type": "TabularTranslator",
"typeConversion": true,
"typeConversionSettings": {
"allowDataTruncation": true,
"treatBooleanAsNumber": false
}
}
},
"inputs": [
{
"referenceName": "ten_eighty_split_CSV",
"type": "DatasetReference",
"parameters": {
"FileName": "#pipeline().parameters.SourceFile"
}
}
],
"outputs": [
{
"referenceName": "ten_eighty_split_15_20_SQL",
"type": "DatasetReference",
"parameters": {}
}
]
}
]
},
{
"value": "60",
"activities": [
{
"name": "Copy data3_copy1",
"type": "Copy",
"dependsOn": [],
"policy": {
"timeout": "0.12:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
},
"userProperties": [],
"typeProperties": {
"source": {
"type": "DelimitedTextSource",
"storeSettings": {
"type": "AzureBlobStorageReadSettings",
"recursive": false,
"enablePartitionDiscovery": false
},
"formatSettings": {
"type": "DelimitedTextReadSettings"
}
},
"sink": {
"type": "AzureSqlSink",
"writeBehavior": "insert",
"sqlWriterUseTableLock": false
},
"enableStaging": false,
"translator": {
"type": "TabularTranslator",
"typeConversion": true,
"typeConversionSettings": {
"allowDataTruncation": true,
"treatBooleanAsNumber": false
}
}
},
"inputs": [
{
"referenceName": "ten_eighty_split_CSV",
"type": "DatasetReference",
"parameters": {
"FileName": "#pipeline().parameters.SourceFile"
}
}
],
"outputs": [
{
"referenceName": "ten_eighty_split_25_30_SQL",
"type": "DatasetReference",
"parameters": {}
}
]
}
]
},
{
"value": "68",
"activities": [
{
"name": "Copy data4_copy1",
"type": "Copy",
"dependsOn": [],
"policy": {
"timeout": "0.12:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
},
"userProperties": [],
"typeProperties": {
"source": {
"type": "DelimitedTextSource",
"storeSettings": {
"type": "AzureBlobStorageReadSettings",
"recursive": false,
"enablePartitionDiscovery": false
},
"formatSettings": {
"type": "DelimitedTextReadSettings"
}
},
"sink": {
"type": "AzureSqlSink",
"writeBehavior": "insert",
"sqlWriterUseTableLock": false
},
"enableStaging": false,
"translator": {
"type": "TabularTranslator",
"typeConversion": true,
"typeConversionSettings": {
"allowDataTruncation": true,
"treatBooleanAsNumber": false
}
}
},
"inputs": [
{
"referenceName": "ten_eighty_split_CSV",
"type": "DatasetReference",
"parameters": {
"FileName": "#pipeline().parameters.SourceFile"
}
}
],
"outputs": [
{
"referenceName": "ten_eighty_split_30_35_SQL",
"type": "DatasetReference",
"parameters": {}
}
]
}
]
}
]
}
}
]
}
ForEach output:
{}
Not sure how to satisfy this error. Thanks!
Failure type: User configuration issue
Details: The function 'length' expects its parameter to be an array or a string. The provided value is of type 'Integer'.
Since you use an integer value (columnCount) as an input to for-each activity, you are getting this error. If you have array of values and you want to iterate the activity based on each value of array, you can use for-each activity. In this case, you can use the switch case activity directly after get metadata activity. In Switch activity, expression is given within braces {...} .
Expression:
#{activity('Get Metadata1').output.columnCount}
I tried this in my environment and got the same error when I give the expression without braces {..}. When {} are added, it worked. Below are the steps.
Get MetaData activity is taken and column count is taken as an argument.
Output of MetaData activity:
{
"columnCount": 2,
"effectiveIntegrationRuntime": "AutoResolveIntegrationRuntime (West US)",
"executionDuration": 1,
"durationInQueue": {
"integrationRuntimeQueue": 0
},
"billingReference": {
"activityType": "PipelineActivity",
"billableDuration": [
{
"meterType": "AzureIR",
"duration": 0.016666666666666666,
"unit": "Hours"
}
]
}
}
Then Switch activity is taken and expression and case are given.
Expression: #{activity('Get Metadata1').output.columnCount}
When pipeline is debugged, it got executed successfully without error.

Receiving RequestMalformed error when doing Typesense upsert

I have the following interface in typescript:
export interface TypesenseAtlistedProEvent {
// IDs
id: string;
proId: string;
eventId: string;
startTime: Number;
stopTime: Number;
eventRate: Number;
remainingSlots: Number;
displayName: string;
photoURL: string;
indexOptions: string;
location: Number[];
}
and the following schema in Typesense:
{
"created_at": 1665530883,
"default_sorting_field": "location",
"fields": [
{
"facet": false,
"index": true,
"infix": false,
"locale": "",
"name": "proId",
"optional": false,
"sort": false,
"type": "string"
},
{
"facet": false,
"index": true,
"infix": false,
"locale": "",
"name": "eventId",
"optional": false,
"sort": false,
"type": "string"
},
{
"facet": false,
"index": true,
"infix": false,
"locale": "",
"name": "startTime",
"optional": false,
"sort": true,
"type": "int64"
},
{
"facet": false,
"index": true,
"infix": false,
"locale": "",
"name": "stopTime",
"optional": false,
"sort": true,
"type": "int64"
},
{
"facet": false,
"index": true,
"infix": false,
"locale": "",
"name": "eventRate",
"optional": false,
"sort": true,
"type": "float"
},
{
"facet": false,
"index": true,
"infix": false,
"locale": "",
"name": "remainingSlots",
"optional": false,
"sort": true,
"type": "int32"
},
{
"facet": false,
"index": true,
"infix": false,
"locale": "",
"name": "displayName",
"optional": false,
"sort": false,
"type": "string"
},
{
"facet": false,
"index": true,
"infix": false,
"locale": "",
"name": "photoURL",
"optional": false,
"sort": false,
"type": "string"
},
{
"facet": false,
"index": true,
"infix": false,
"locale": "",
"name": "indexOptions",
"optional": false,
"sort": false,
"type": "string"
},
{
"facet": false,
"index": true,
"infix": false,
"locale": "",
"name": "location",
"optional": false,
"sort": true,
"type": "geopoint"
}
],
"name": "atlistedProEventIndex",
"num_documents": 0,
"symbols_to_index": [],
"token_separators": []
}
I look to upsert like the in the following:
const indexedDoc: TypesenseAtlistedProEvent = {
id: proId + eventId,
proId: proId,
eventId: eventId,
startTime: publicEvent.startTime.seconds,
stopTime: publicEvent.stopTime.seconds,
eventRate: publicEvent.eventRate,
remainingSlots: publicEvent.remainingSlots,
displayName: tpi.displayName,
photoURL: tpi.photoURL,
indexOptions: tpi.indexOptions,
location: [tpi.lat, tpi.lng],
};
return await typesenseClient
.collections("atlistedProEventIndex")
.documents()
.upsert(indexedDoc)
.then(() => {
return {success: true, exit: 0};
})
I am getting the following upon the query:
RequestMalformed: Request failed with HTTP code 400 | Server said: [json.exception.type_error.302] type must be number
I am passing it location as Number[], and trying to get that to update the geopoint in typesense. This is not working and thus it would be useful if:
I was able to locate the logs to go through. I would particularly like the logs given by the Typesense Cloud, and am feeling at a loss that I cannot find these.
I would like to pass in the geopoint as the right type in typescript. Right now, as you can see above, the location is of type Number[], which, from the examples I saw, assumed was right. It also may be the case that another field is off and I'm just missing it. Either way, I could really use some kind of server side logging coming from Typesense Cloud.
The error message is a little confusing, but the core of the issue is that the default_sorting_field can only be a numeric field, but it's currently set as a geopoint field (location), which is what that error is trying to convey.
So if you create a new collection without default_sorting_field, the error should not show up.
If you want to sort by geo location, you want to use the sort_by parameter: https://typesense.org/docs/0.23.1/api/geosearch.html#searching-within-a-radius
let searchParameters = {
'q' : '*',
'query_by' : 'title',
'filter_by' : 'location:(48.90615915923891, 2.3435897727061175, 5.1 km)',
'sort_by' : 'location(48.853, 2.344):asc'
}
client.collections('companies').documents().search(searchParameters)

Apm-server transaction not recorded in node js agent (express)

Describe the bug
Transaction not recorded while i am hit some endpoint, but the apm-server get metric data.
To Reproduce
I have start apm-server connection in the first file named server.ts
const apm = elasticApmNode.start({
serviceName: 'Elk Stack',
serverUrl: '${process.env.APM_SERVER_HOST}',
captureBody: 'all',
});
Creating express route using decorators, implement routing using app.use()
router.route('${route.path}')[route.method]([...middlewares.before, handler, ...middlewares.after, ...middlewares.error])
app.use('${uses}${prefix}', router);
And i have try to solve this problem by creating transaction manualy using apm.startTransaction code example.
res.locals.apm.startTransaction(`${req.method.toUpperCase()} ${req.originalUrl}`, 'HTTP Request');
res.locals.apm.setTransactionName(`${req.method.toUpperCase()} ${req.originalUrl}`, 'HTTP Request');
But the transaction still not recorded, actually the span of transaction recorded but transaction object only have transaction.id element. this is the data.
{
"_index": "apm-7.4.0-span-000002",
"_type": "_doc",
"_id": "qE6CsW4B-ATTlniHYhaa",
"_version": 1,
"_score": null,
"_source": {
"parent": {
"id": "e7a181e4404a3b3b"
},
"observer": {
"hostname": "c25837d06ef9",
"id": "25003f85-d836-49cc-b242-36241e59c3b8",
"ephemeral_id": "19bd8d86-e90a-40d2-8d3b-c4a2cfe3e182",
"type": "apm-server",
"version": "7.4.0",
"version_major": 7
},
"agent": {
"name": "nodejs",
"version": "3.2.0"
},
"trace": {
"id": "7da976d15b45dd660f47f21f60155902"
},
"#timestamp": "2019-11-28T10:15:39.471Z",
"ecs": {
"version": "1.1.0"
},
"service": {
"environment": "production",
"name": "Elk Stack"
},
"processor": {
"name": "transaction",
"event": "span"
},
"transaction": {
"id": "e7a181e4404a3b3b"
},
"timestamp": {
"us": 1574936139471892
},
"span": {
"duration": {
"us": 5802
},
"stacktrace": [
{
"library_frame": true,
"exclude_from_grouping": false,
"filename": "node_modules/sequelize/lib/dialects/postgres/query.js",
"abs_path": "/var/www/app/node_modules/sequelize/lib/dialects/postgres/query.js",
"line": {
"number": 59
},
"function": "parameters.length.Promise"
},
{
"library_frame": true,
"exclude_from_grouping": false,
"filename": "node_modules/bluebird/js/release/debuggability.js",
"abs_path": "/var/www/app/node_modules/bluebird/js/release/debuggability.js",
"line": {
"number": 411
},
"function": "Promise._execute"
},
{
"library_frame": true,
"exclude_from_grouping": false,
"filename": "node_modules/bluebird/js/release/promise.js",
"abs_path": "/var/www/app/node_modules/bluebird/js/release/promise.js",
"line": {
"number": 518
},
"function": "Promise._resolveFromExecutor"
},
{
"library_frame": true,
"exclude_from_grouping": false,
"filename": "node_modules/bluebird/js/release/promise.js",
"abs_path": "/var/www/app/node_modules/bluebird/js/release/promise.js",
"line": {
"number": 103
},
"function": "Promise"
},
{
"exclude_from_grouping": false,
"library_frame": true,
"filename": "node_modules/sequelize/lib/dialects/postgres/query.js",
"abs_path": "/var/www/app/node_modules/sequelize/lib/dialects/postgres/query.js",
"line": {
"number": 59
},
"function": "run"
},
{
"library_frame": true,
"exclude_from_grouping": false,
"abs_path": "/var/www/app/node_modules/sequelize/lib/sequelize.js",
"filename": "node_modules/sequelize/lib/sequelize.js",
"line": {
"number": 645
},
"function": "runHooks.then.then"
},
{
"library_frame": true,
"exclude_from_grouping": false,
"filename": "node_modules/bluebird/js/release/util.js",
"abs_path": "/var/www/app/node_modules/bluebird/js/release/util.js",
"line": {
"number": 16
},
"function": "tryCatcher"
},
{
"library_frame": true,
"exclude_from_grouping": false,
"filename": "node_modules/bluebird/js/release/promise.js",
"abs_path": "/var/www/app/node_modules/bluebird/js/release/promise.js",
"line": {
"number": 547
},
"function": "Promise._settlePromiseFromHandler"
},
{
"exclude_from_grouping": false,
"library_frame": true,
"filename": "node_modules/bluebird/js/release/promise.js",
"abs_path": "/var/www/app/node_modules/bluebird/js/release/promise.js",
"line": {
"number": 604
},
"function": "Promise._settlePromise"
},
{
"library_frame": true,
"exclude_from_grouping": false,
"filename": "node_modules/bluebird/js/release/promise.js",
"abs_path": "/var/www/app/node_modules/bluebird/js/release/promise.js",
"line": {
"number": 649
},
"function": "Promise._settlePromise0"
},
{
"library_frame": true,
"exclude_from_grouping": false,
"filename": "node_modules/bluebird/js/release/promise.js",
"abs_path": "/var/www/app/node_modules/bluebird/js/release/promise.js",
"line": {
"number": 729
},
"function": "Promise._settlePromises"
},
{
"library_frame": true,
"exclude_from_grouping": false,
"filename": "node_modules/bluebird/js/release/async.js",
"abs_path": "/var/www/app/node_modules/bluebird/js/release/async.js",
"line": {
"number": 93
},
"function": "_drainQueueStep"
},
{
"library_frame": true,
"exclude_from_grouping": false,
"filename": "node_modules/bluebird/js/release/async.js",
"abs_path": "/var/www/app/node_modules/bluebird/js/release/async.js",
"line": {
"number": 86
},
"function": "_drainQueue"
},
{
"library_frame": true,
"exclude_from_grouping": false,
"filename": "node_modules/bluebird/js/release/async.js",
"abs_path": "/var/www/app/node_modules/bluebird/js/release/async.js",
"line": {
"number": 102
},
"function": "Async._drainQueues"
},
{
"exclude_from_grouping": false,
"library_frame": true,
"filename": "node_modules/bluebird/js/release/async.js",
"abs_path": "/var/www/app/node_modules/bluebird/js/release/async.js",
"line": {
"number": 15
},
"function": "Async.drainQueues"
},
{
"exclude_from_grouping": false,
"library_frame": true,
"filename": "timers.js",
"abs_path": "timers.js",
"line": {
"number": 810
},
"function": "runCallback"
},
{
"exclude_from_grouping": false,
"library_frame": true,
"filename": "timers.js",
"abs_path": "timers.js",
"line": {
"number": 768
},
"function": "tryOnImmediate"
},
{
"library_frame": true,
"exclude_from_grouping": false,
"filename": "timers.js",
"abs_path": "timers.js",
"line": {
"number": 745
},
"function": "processImmediate"
}
],
"subtype": "postgresql",
"name": "INSERT INTO \"user\"",
"action": "query",
"id": "02a2368953f0f483",
"type": "db",
"sync": false,
"db": {
"statement": "INSERT INTO \"user\" (\"id\",\"name\",\"username\",\"email\",\"password\",\"birthdate\",\"phone\",\"createdDate\",\"updatedDate\") VALUES (DEFAULT,$1,$2,$3,$4,$5,$6,$7,$8) RETURNING *;",
"type": "sql"
}
}
},
"fields": {
"#timestamp": [
"2019-11-28T10:15:39.471Z"
]
},
"sort": [
1574936139471
]
}
Expected behavior
APM page like this
The Transaction Data should be like this (example using loopback 2)
Environment (please complete the following information)
OS: Linux (Docker in Windows)
Node.js version: 8
APM Server version: 7.4.0
Agent version: 3.2.0
Express version: 4.17.1
How are you starting the agent? (please tick one of the boxes)
[x] Calling agent.start() directly (e.g. require('elastic-apm-node').start(...))
[ ] Requiring elastic-apm-node/start from within the source code
[ ] Starting node with -r elastic-apm-node/start
I have fixing this case, i put apm-server connection to the first of my server.ts. import 'localenv' is the suspect of this case.

Common connection profile is missing this client's organization and certificate authority

I'm getting Failed to register user user1: Error: Common connection profile is missing this client's organization and certificate authority when trying to register a user in Hyperledger Fabric network. I'm using fabcar sample. I can register an admin.
$ ts-node src/enrollAdmin.ts
Wallet path: /Users/<home>/ws/hlf/fabcar-api/wallet
Successfully enrolled admin user "admin" and imported it into the wallet
When I try to register a new user:
$ ts-node src/registerUser.ts
Wallet path: /Users/<home>/ws/hlf/fabcar-api/wallet
Failed to register user user1: Error: Common connection profile is missing this client's organization and certificate authority
By following sample connection profile and Common connection profile missing error while adding user for organization I've constructed my network connection profile:
{
"name": "fabcar-network",
"version": "1.0.0",
"client": {
"tlsEnable": false,
"organization": "Org1",
"connection": {
"timeout": {
"peer": {
"endorser": "300"
},
"orderer": "300"
}
}
},
"channels": {
"public-channel": {
"orderers": [
"ord1-hlf-ord.orderers.svc.cluster.local"
],
"peers": {
"peer0-hlf-peer.org1.svc.cluster.local": {
"endorsingPeer": true,
"chaincodeQuery": true,
"ledgerQuery": true,
"eventSource": true,
"discover": true
},
"peer0-hlf-peer.org2.svc.cluster.local": {
"endorsingPeer": true,
"chaincodeQuery": true,
"ledgerQuery": true,
"eventSource": true,
"discover": true
},
"peer0-hlf-peer.org3.svc.cluster.local": {
"endorsingPeer": true,
"chaincodeQuery": true,
"ledgerQuery": true,
"eventSource": true,
"discover": true
}
}
}
},
"organizations": {
"Org1": {
"mspid": "Org1MSP",
"peers": [
"peer0-hlf-peer.org1.svc.cluster.local"
],
"certificateAuthorities": [
"ca.org1.svc.cluster.local"
],
"adminPrivateKey": {
"path": "../crypto-config/peerOrganizations/org1.svc.cluster.local/users/Admin#org1.svc.cluster.local/msp/keystore/13d6b4ac07ce51716e94cb1139cfd2807307a8b908768e3297a7fedac0e08e5e_sk"
},
" signedCert": {
"path": "../crypto-config/peerOrganizations/org2.svc.cluster.local/users/Admin#org2.svc.cluster.local/msp/signcerts/Admin#org2.svc.cluster.local-cert.pem"
}
},
"Org2": {
"mspid": "Org2MSP",
"peers": [
"peer0-hlf-peer.org2.svc.cluster.local"
],
"certificateAuthorities": [
"ca.org2.svc.cluster.local"
],
"adminPrivateKey": {
"path": "../crypto-config/peerOrganizations/org2.svc.cluster.local/users/Admin#org2.svc.cluster.local/msp/keystore/98299178c3957967864dbe555e7acf677d329cdce3d7593a5f750bb3f002afb2_sk"
},
" signedCert": {
"path": "../crypto-config/peerOrganizations/org2.svc.cluster.local/users/Admin#org2.svc.cluster.local/msp/signcerts/Admin#org2.svc.cluster.local-cert.pem"
}
},
"Org3": {
"mspid": "Org3MSP",
"peers": [
"peer0-hlf-peer.org3.svc.cluster.local"
],
"certificateAuthorities": [
"ca.org3.svc.cluster.local"
],
"adminPrivateKey": {
"path": "../crypto-config/peerOrganizations/org3.svc.cluster.local/users/Admin#org3.svc.cluster.local/msp/keystore/4f62c790b545b1ff10f42a2ceb629f3f2f1068fe5ea1c78255d6332ed3f4a465_sk"
},
" signedCert": {
"path": "../crypto-config/peerOrganizations/org3.svc.cluster.local/users/Admin#org3.svc.cluster.local/msp/signcerts/Admin#org3.svc.cluster.local-cert.pem"
}
}
},
"orderers": {
"ord1-hlf-ord.orderers.svc.cluster.local": {
"url": "grpc://0.0.0.0:7050"
}
},
"peers": {
"peer0-hlf-peer.org1.svc.cluster.local": {
"url": "grpc://0.0.0.0:7051",
"events": "grpc://0.0.0.0:7053"
},
"peer0-hlf-peer.org2.svc.cluster.local": {
"url": "grpc://0.0.0.0:8051",
"events": "grpc://0.0.0.0:8053"
},
"peer0-hlf-peer.org3.svc.cluster.local": {
"url": "grpc://0.0.0.0:9051",
"events": "grpc://0.0.0.0:9053"
}
},
"certificateAuthorities": {
"ca.org1.svc.cluster.local": {
"url": "http://0.0.0.0:7054",
"httpOptions": {
"verify": false
},
"tlsCACerts": {
"path": ""
},
"caName": "ca.org1.svc.cluster.local",
"registrar": [
{
"enrollId": "admin",
"enrollSecret": "adminpw"
}
]
},
"ca.org2.svc.cluster.local": {
"url": "http://0.0.0.0:8054",
"httpOptions": {
"verify": false
},
"tlsCACerts": {
"path": ""
},
"caName": "ca.org2.svc.cluster.local",
"registrar": [
{
"enrollId": "admin",
"enrollSecret": "adminpw"
}
]
},
"ca.org3.svc.cluster.local": {
"url": "http://0.0.0.0:9054",
"httpOptions": {
"verify": false
},
"tlsCACerts": {
"path": ""
},
"caName": "ca.org3.svc.cluster.local",
"registrar": [
{
"enrollId": "admin",
"enrollSecret": "adminpw"
}
]
}
}
}
Though maybe irrelevant, the network is running on Kubernetes using stable helm charts. I can execute all operations from CLI.
For the connection profile, under "organizations", you could try to use "org3" instead of "Org3".
I still can't figure out why o is in lower case for "org3" but O are in upper case for "Org1" and "Org2".
It just so happens that it works for me!

Mongoose find document by key whose value is a complex object

I am using Mongoose to do a search for documents in a collection based on the criteria described below:
Document:
{
"_id": {
"$oid": "5a60621e20205641281f7c2f"
},
"key1": [
{
"available": true,
"required": true,
"name": "Name-1"
},
{
"available": true,
"required": true,
"name": "Name-2"
},
{
"available": true,
"required": true,
"name": "Name-3"
}
],
"__v": 0
}
I want to perform a search based on property key1. So basically what I want to do is pass the json object as search pattern below and get the result as the document above in return
[
{
"available": true,
"required": true,
"name": "Name-1"
},
{
"available": true,
"required": true,
"name": "Name-2"
},
{
"available": true,
"required": true,
"name": "Name-3"
}
]
Is there a way that I can achieve this?
You can check $eq in mongodb docs $eq.
const selector = [
{
"available": true,
"required": true,
"name": "Name-1"
},
{
"available": true,
"required": true,
"name": "Name-2"
},
{
"available": true,
"required": true,
"name": "Name-3"
}
];
Model.find({key1: {$eq: selector}}, (error, result) => {
if(!err) {
res.send(result);
}
});
Model is the collection you fetch from.

Resources