winston-elasticsearch creates callback error when logging - node.js

When using 'winston-elasticsearch' I am getting this error when logging a message:
TypeError: callback is not a function
My code:
const winston = require("winston");
const logger = new winston.Logger();
...
if( process.env.ELASTIC_SEARCH_LOGGING_URL ){
var elasticsearch = require('elasticsearch');
var client = new elasticsearch.Client({
host: process.env.ELASTIC_SEARCH_LOGGING_URL,
log: 'info'
});
logger.add(
require('winston-elasticsearch'),
{
client
}
);
}
//this causes the error
logger.info("hi")
I am seeing this:
clock_1 | TypeError: callback is not a function
clock_1 | at Elasticsearch.log (/usr/app/node_modules/winston-elasticsearch/index.js:105:5)
clock_1 | at transportLog (/usr/app/node_modules/winston/lib/winston/logger.js:234:15)
clock_1 | at /usr/app/node_modules/winston/node_modules/async/lib/async.js:157:13
I use node#8.9,winston#2.4.1 and winston-elasticsearch#0.7.0. The ELASTIC_SEARCH_LOGGING_URL env variable is accurate.
The error occurs here in the library:
log(info, callback) {
const level = info[LEVEL];
const { message } = info;
let meta = info[SPLAT];
if (meta !== undefined) {
// eslint-disable-next-line prefer-destructuring
meta = meta[0];
}
setImmediate(() => {
this.emit('logged', level);
});
const logData = {
message,
level,
meta,
// timestamp: this.opts.timestamp()
};
const entry = this.opts.transformer(logData);
this.bulkWriter.append(
this.getIndexName(this.opts),
this.opts.messageType,
entry
);
callback();
}
It's invoking 'callback()' which is not defined.
Am I misconfiguring?
Are there better ways to send application logs to ES via Winston?

Needs winston version 3.0 or higher.
const winston = require("winston"); //"winston": "~3",
const logger = winston.createLogger();
if( process.env.ELASTIC_SEARCH_LOGGING_URL ){
var elasticsearch = require('elasticsearch');
var winston_elasticsearch = require('winston-elasticsearch');
var client = new elasticsearch.Client({
host: process.env.ELASTIC_SEARCH_LOGGING_URL,
log: 'info'
});
logger.add( new winston_elasticsearch({
client,
index:"logging"
}));
}
Solution from the winston-elasticsearch developers:
https://github.com/vanthome/winston-elasticsearch/issues/69#issuecomment-430124467

Related

Unable to deploy NFT in terminal

I already deployed my smart contract to my wallet and connected it to my Alchemy account.
Here are my codings (Note that my contract address, PUBLIC_KEY, PRIVATE_KEY, API_URL and alchemy address are edited for security purposes).
mint-nft.js
require('dotenv').config();
const API_URL = process.env.API_URL;
const PUBLIC_KEY = process.env.PUBLIC_KEY;
const PRIVATE_KEY = process.env.PRIVATE_KEY;
const { createAlchemyWeb3 } = require("#alch/alchemy-web3");
const web3 = createAlchemyWeb3(API_URL);
const contract = require("../artifacts/contracts/MyNFT.sol/MyNFT.json");
const contractAddress = "My_Contract_Adress";
const nftContract = new web3.eth.Contract(contract.abi, contractAddress);
async function mintNFT(tokenURI) {
const nonce = await web3.eth.getTransactionCount(PUBLIC_KEY, 'latest'); //get latest nonce
//the transaction
const tx = {
'from': PUBLIC_KEY,
'to': contractAddress,
'nonce': nonce,
'gas': 500000,
'data': nftContract.methods.mintNFT(PUBLIC_KEY, tokenURI).encodeABI()
};
const signPromise = web3.eth.accounts.signTransaction(tx, PRIVATE_KEY);
signPromise.then((signedTx) => {
web3.eth.sendSignedTransaction(signedTx.rawTransaction, function(err, hash) {
if (!err) {
console.log("The hash of your transaction is: ", hash, "\nCheck Alchemy's Mempool to view the status of your transaction!");
} else {
console.log("Something went wrong when submitting your transaction:", err)
}
});
}).catch((err) => {
console.log(" Promise failed:", err);
});
}
mintNFT("https://gateway.pinata.cloud/ipfs/My_NFT_Picture_Hash");
alchemyContext.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.makeAlchemyContext = void 0;
var tslib_1 = require("tslib");
var sturdy_websocket_1 = tslib_1.__importDefault(require("sturdy-websocket"));
var websocket_1 = require("websocket");
var jsonRpc_1 = require("../util/jsonRpc");
var version_1 = require("../version");
var alchemySendHttp_1 = require("./alchemySendHttp");
var alchemySendWebSocket_1 = require("./alchemySendWebSocket");
var httpProvider_1 = require("./httpProvider");
var sendPayload_1 = require("./sendPayload");
var webSocketProvider_1 = require("./webSocketProvider");
var NODE_MAX_WS_FRAME_SIZE = 100 * 1024 * 1024; // 100 MB
function makeAlchemyContext(url, config) {
var makePayload = jsonRpc_1.makePayloadFactory();
if (/^https?:\/\//.test(url)) {
var alchemySend = alchemySendHttp_1.makeHttpSender(url);
var _a = sendPayload_1.makePayloadSender(alchemySend, config), sendPayload = _a.sendPayload, setWriteProvider = _a.setWriteProvider;
var senders = jsonRpc_1.makeSenders(sendPayload, makePayload);
var provider = httpProvider_1.makeAlchemyHttpProvider(sendPayload);
return { provider: provider, senders: senders, setWriteProvider: setWriteProvider };
}
else if (/^wss?:\/\//.test(url)) {
var protocol = isAlchemyUrl(url) ? "alchemy-web3-" + version_1.VERSION : undefined;
var ws = new sturdy_websocket_1.default(url, protocol, {
wsConstructor: getWebSocketConstructor(),
});
var alchemySend = alchemySendWebSocket_1.makeWebSocketSender(ws);
var _b = sendPayload_1.makePayloadSender(alchemySend, config), sendPayload = _b.sendPayload, setWriteProvider = _b.setWriteProvider;
var senders = jsonRpc_1.makeSenders(sendPayload, makePayload);
var provider = new webSocketProvider_1.AlchemyWebSocketProvider(ws, sendPayload, senders);
return { provider: provider, senders: senders, setWriteProvider: setWriteProvider };
}
else {
throw new Error("Alchemy URL protocol must be one of http, https, ws, or wss. Recieved: " + url);
}
}
exports.makeAlchemyContext = makeAlchemyContext;
function getWebSocketConstructor() {
return isNodeEnvironment()
? function (url, protocols) {
return new websocket_1.w3cwebsocket(url, protocols, undefined, undefined, undefined, {
maxReceivedMessageSize: NODE_MAX_WS_FRAME_SIZE,
maxReceivedFrameSize: NODE_MAX_WS_FRAME_SIZE,
});
}
: WebSocket;
}
function isNodeEnvironment() {
return (typeof process !== "undefined" &&
process != null &&
process.versions != null &&
process.versions.node != null);
}
function isAlchemyUrl(url) {
return url.indexOf("alchemyapi.io") >= 0;
}
.env
API_URL = "https://eth-rinkeby.alchemyapi.io/v2/KEY"
PRIVATE_KEY = "MY_PRIVATE_KEY"
PUBLIC_KEY = "MY_PUBLIC_KEY"
But then I was trying to deploy my NFT with metadata and nft-mint.js, I got these error.
Can anyone please tell me what was the error about?
Your issue may be with dotenv not reading in the values in your .env.
If you add console.log(API_URL), is it correct or is it undefined?
If it is undefined, I was able to resolve the issue by adding the path to my .env like so:
require('dotenv').config({path:"../.env"});
(In my case my mint-nft.js was in scripts/mint-nft.js
and .env is in the root directory.)

Error: 4 DEADLINE_EXCEEDED: Deadline exceeded on CloudRUN

I got google express app which deployed in Google CloudRun.
I would like to use some shared data which relevant to all users and loading it to global parameters.
My code is using google secret manager to retrieve some database connection info for retrieving data into global parameters.
When the app is cold, the first user gets error message but initiates async function which loading the data into global parameters, eventually the next calls will be served correctly.
Everything is working fine buy recently I noticed an error when trying to connect to Google secret manager:
Error: 4 DEADLINE_EXCEEDED: Deadline exceeded
at Object.callErrorFromStatus (/usr/src/app/node_modules/#google-cloud/secret-manager/node_modules/#grpc/grpc-js/build/src/call.js:30:26)
at Object.onReceiveStatus (/usr/src/app/node_modules/#google-cloud/secret-manager/node_modules/#grpc/grpc-js/build/src/client.js:175:52)
at Object.onReceiveStatus (/usr/src/app/node_modules/#google-cloud/secret-manager/node_modules/#grpc/grpc-js/build/src/client-interceptors.js:341:141)
at Object.onReceiveStatus (/usr/src/app/node_modules/#google-cloud/secret-manager/node_modules/#grpc/grpc-js/build/src/client-interceptors.js:304:181)
at Http2CallStream.outputStatus (/usr/src/app/node_modules/#google-cloud/secret-manager/node_modules/#grpc/grpc-js/build/src/call-stream.js:116:74)
at Http2CallStream.maybeOutputStatus (/usr/src/app/node_modules/#google-cloud/secret-manager/node_modules/#grpc/grpc-js/build/src/call-stream.js:155:22)
at Http2CallStream.endCall (/usr/src/app/node_modules/#google-cloud/secret-manager/node_modules/#grpc/grpc-js/build/src/call-stream.js:141:18)
at Http2CallStream.cancelWithStatus (/usr/src/app/node_modules/#google-cloud/secret-manager/node_modules/#grpc/grpc-js/build/src/call-stream.js:457:14)
at Timeout.<anonymous> (/usr/src/app/node_modules/#google-cloud/secret-manager/node_modules/#grpc/grpc-js/build/src/deadline-filter.js:59:28)
at listOnTimeout (internal/timers.js:554:17) {
My code is something like that:
const { SecretManagerServiceClient } = require('#google-cloud/secret-manager');
const smClient = new SecretManagerServiceClient();
const express = require('express');
const bodyParser = require('body-parser')
let jsonParser = bodyParser.json()
const pkg = require('./package');
// ============== Express App ========
const app = express();
// Serve the files in /assets at the URI /assets
app.use('/assets', express.static('assets'));
//Params
global.Param1 = null;
global.Param2 = null;
let PgPwd = null;
app.post('/Test', jsonParser, async (req, res) => {
try
{
loadParams();
common.errors_client.report("Params were not loaded");
throw new Error("Params were not loaded");
}
catch (err) {
return res.status(400);
}
}
async function loadParams() {
try {
const promise1 = loadDataFromPostgresqlToParam1();
const promise2 = loadDataFromPostgresqlToParam2();
await Promise.all([promise1, promise2]);
}
catch (err) {
throw new Error(err);
}
}
async function loadDataFromPostgresqlToParam1() {
try {
//=============== PostgresSQL ==================
let pg_client = await getPgClient();
await pg_client.connect()
const resPg = await pg_client.query('select * from tabel1');
await pg_client.end()
let Param1 = JSON.parse(JSON.stringify(resPg));
}
}
async function loadDataFromPostgresqlToParam2() {
try {
//=============== PostgresSQL ==================
let pg_client = await getPgClient();
await pg_client.connect()
const resPg = await pg_client.query('select * from tabel2');
await pg_client.end()
let Param2 = JSON.parse(JSON.stringify(resPg));
}
}
async function getPgClient() {
var ret_client;
if (PgPwd == null) {
await getSecret().then((localPwd) => {
PgPwd = localPwd; // assign to global variable
})
};
ret_client = new Client({
user: process.env.pgUser,
host: process.env.pgHost,
database: process.env.pgDataBase,
password: PgPwd,
port: process.env.pgPort
});
return ret_client;
}
async function getSecret() {
const [version] = await smClient.accessSecretVersion({
name: process.env.pgpwdlocation
});
const pwd = version.payload.data.toString();
return pwd;
}
Not sure what could be the reason for that.
The error is not consistent.

Dialogflow webhook fulfillment parameter not accessible

I want to get input of a parameter in my webhook fulfillment.
Here is the my code:
const bodyParser = require('body-parser')
var request = require('request-promise-native');
const { dialogflow } = require('actions-on-google');
const assistant = dialogflow({
clientId: "30xxxxx08407-rv9kxxxxxxxxuuq8f9ul2eg.apps.googleusercontent.com"
});
module.exports = (app) => {
const logger = console;
assistant.intent('Sales', conv => {
const pcode = agent.parameters['PCODE'];
console.log(pcode)
const token = '3369708919812376';
const serviceID = '502';
const P_STATE_CD = 'ALL';
const P_FO_CD = 'ALL';
const P_DISTT_CD = 'ALL';
const P_DATE = '16/12/2019';
const P_PRD_GROUP = 'UREA';
const P_PERSONAL_NO = '106296';
var data = {"token" : token,"serviceID" : serviceID,"P_STATE_CD" : P_STATE_CD,"P_FO_CD" : P_FO_CD,"P_DISTT_CD" : P_DISTT_CD,"P_DATE" : P_DATE,"P_PRD_GROUP" : P_PRD_GROUP,"P_PERSONAL_NO" : P_PERSONAL_NO };
var sdata = JSON.stringify(data);
const options = {
method: 'POST',
uri: 'http://Webservice/resources/webservice/service' ,
body: JSON.parse(sdata) ,
json: true
}
return request(options)
.then( body => {
var unit = body
console.log(body)
unit.intent = "Sales"
unit.value1 = unit.saleInfo[0].QMTD
unit.value2 = unit.saleInfo[0].QYTD
unit.value3 = unit.saleInfo[0].O_UOM
unit.value4 = null
unit.value5 = null
delete unit.saleInfo
var unit2 = JSON.stringify(unit)
console.log(unit2)
conv.ask(unit2);
})
.catch( err => {
console.error( err );
conv.ask('Something went wrong. What should I do now?');
});
})
I tried with const pcode = agent.parameters.PCODE but it is not working. Giving me error:
ReferenceError: agent is not defined
at assistant.intent.conv (/home/dbalounge/GoogleDF/service.js:15:16)
at Function. (/home/dbalounge/GoogleDF/node_modules/actions-on-google/dist/service/dialogflow/dialogflow.js:151:27)
at Generator.next ()
at /home/dbalounge/GoogleDF/node_modules/actions-on-google/dist/service/dialogflow/dialogflow.js:22:71
at new Promise ()
at __awaiter (/home/dbalounge/GoogleDF/node_modules/actions-on-google/dist/service/dialogflow/dialogflow.js:18:12)
at Function.handler (/home/dbalounge/GoogleDF/node_modules/actions-on-google/dist/service/dialogflow/dialogflow.js:85:16)
at Object. (/home/dbalounge/GoogleDF/node_modules/actions-on-google/dist/assistant.js:55:32)
at Generator.next ()
at /home/dbalounge/GoogleDF/node_modules/actions-on-google/dist/assistant.js:22:71
agent is not defined anywhere in your code, that's why you're getting:
ReferenceError: agent is not defined
In any case if you use assistant.parameters won't work either. Dialogflow intent parameters can be accessed through the second argument of .intent callback.
assistant.intent('Sales', (conv, params) => {
const pcode = params.PCODE;
/* ... */
})
For more info you can check the docs

Return the specific contents from a field of data in the result-set of a select query

I have select query that pulls a row of a data from a largeObject stored in a PostgreSQL table. The particular piece of data in the field is a html file.
I can output the name of the field of data into the console which appears as 16543 (for 16543kB).
So, my burning question is how I can return the actual contents (html) so that I can subsequently export it as one object and send it to the browser.
I am using node and express heres my source code so far:
var database = require('../database/postgresDB.js');
var pg = require('pg');
var html = {};
var connectionString = "postgres://dabladmin:dabldem#localhost:5432/dablpatient";
var client = new pg.Client(connectionString);
client.connect();
var query = client.query('SELECT * FROM htmlfiles WHERE id = 1', function(err, result){
console.log(JSON.stringify(result));
console.log(result.rows[0].htmlfile);
html = result.rows[0].htmlfile;
//return result.rows[0].htmlfile;
//console.dir(html);
});
module.exports = html;
This cannot be done directly. You need to export a function which will return the promise.
Following is an idea of how it can be done. Note: The code is not tested.
// htmlfile.model.js
const promise = require('bluebird'); // or any other Promise/A+ compatible library;
const initOptions = {
promiseLib: promise // overriding the default (ES6 Promise);
};
const pgp = require('pg-promise')(initOptions);
// Database connection details;
const cn = {
host: 'localhost', // 'localhost' is the default;
port: 5432, // 5432 is the default;
database: 'myDatabase',
user: 'myUser',
password: 'myPassword'
};
const db = pgp(cn); // database instance;
const getHtml = id => db.oneOrNone('SELECT * FROM htmlfiles WHERE id = $1', id);
module.exports = getHtml;
Inside some.controller.js:
const html_model = require('./htmlfile.model.js');
html_model.getHtml(1)
.then(data => {
if(data) {
// record found
res.send(data.htmlfile);
} else {
// record not found, do something else
}
})
.catch(error => {
// an error occurred
});

How to add prefix to socket.io logs redirected to winston?

In my node.js application I successfully redirect log messages produced by a socket.io library to a winston library:
var express = require('express')
, winston = require('winston')
, http = require('http');
var logger = new (winston.Logger)({
transports: [
// ... configuring transports ...
]
});
var app = express();
var server = http.createServer(app);
var io = require('socket.io').listen(server, {'logger': logger});
Now I would want to add a prefix (something like "socket.io: ") to all these redirected messages for distinguishing them from log messages produced by other parts of the application. Is there a way how to achieve this?
Add label in logger transports.
var logger = new (winston.Logger)({
transports: [
new (winston.transports.Console)({
json : false,
timestamp : true,
label: "socket.io:"
})
]
});
Log messages will look like this -
2013-08-30T08:26:52.703Z - info: [socket.io:] socket.io started
2013-08-30T08:26:52.705Z - info: [socket.io:] hello
Check here more logging options with winston - https://github.com/flatiron/winston
I solved it by adding a following function
var winston = require('winston');
// Add prefix function
winston.prefix = function (filename)
{
var label = '[' + path.parse(filename).name + ']';
var override = function (lvl) {
return function () {
var args = [].slice.call(arguments);
if (args[0]) {
if (typeof(args[0]) === 'string')
args[0] = label + ' ' + args[0];
else
args.unshift(label);
}
winston[lvl].apply(null, args);
};
};
var log = { };
for (var lvl in winston.levels) {
log[lvl] = override(lvl);
}
return log;
}
And then in each module
var log = require('winston').prefix(__filename);
log.debug('hello');
or
var log = require('winston').prefix('socket_io');
log.debug('hello');
This isn't Winston specific but a generalized JS solution to prefixing and middleware...
var log = console.log;
function x(){ // optionally pass in post/pre fix here
// caution do not put (var log = console.log()) here, the arguments will build up [ arg1 ] -> [ [arg1] , arg2 ] -> [ [ [ arg1 ] , arg2] , arg3 ]
console.log = function () {
var args = Array.from(arguments);
args.push('post fix');
log.apply(console, args);
}
}
new x()
console.log(1)
new x()
console.log(2)
OUTPUTS:
1 post fix
2 post fix
OR better yet...
const log = console.log;
export default function middleWare(event) { // optionally pass in post/pre fix here
console.log = (...args) => {
log(...args, event.id);
}
}

Resources