Google Speech application stops 3 seconds after start - node.js

I'm trying to run nodeJS Google Speech example, but when I start it with node recognize.js listen it stops after 3 seconds without any output. There is no error output.
I've already tried other NodeJS programs and they run without problems.
I'm using nodejs v8.4.0 and npm v5.3.0 on Ubuntu 14.04.5 LTS. I've tried this on Ubuntu 16.04 LTS and worked fine.
This is the code I'm trying to run:
'use strict';
function streamingMicRecognize (encoding, sampleRateHertz, languageCode) {
const record = require('node-record-lpcm16');
const Speech = require('#google-cloud/speech');
const speech = Speech();
const request = {
config: {
encoding: encoding,
sampleRateHertz: sampleRateHertz,
languageCode: languageCode
},
interimResults: false // If you want interim results, set this to true
};
const recognizeStream = speech.streamingRecognize(request)
.on('error', console.error)
.on('data', (data) =>
process.stdout.write(
(data.results[0] && data.results[0].alternatives[0])
? `Transcription: ${data.results[0].alternatives[0].transcript}\n`
: `\n\nReached transcription time limit, press Ctrl+C\n`));
record
.start({
sampleRateHertz: sampleRateHertz,
threshold: 0,
verbose: false,
recordProgram: 'rec', // Try also "arecord" or "sox"
silence: '10.0'
})
.on('error', console.error)
.pipe(recognizeStream);
console.log('Listening, press Ctrl+C to stop.');
}
const cli = require(`yargs`)
.demand(1)
.command(
`listen`,
`Detects speech in a microphone input stream. This command requires that you have SoX installed and available in your $PATH. See https://www.npmjs.com/package/node-record-lpcm16#dependencies`,
{},
(opts) => streamingMicRecognize(opts.encoding, opts.sampleRateHertz, opts.languageCode)
)
.options({
encoding: {
alias: 'e',
default: 'LINEAR16',
global: true,
requiresArg: true,
type: 'string'
},
sampleRateHertz: {
alias: 'r',
default: 16000,
global: true,
requiresArg: true,
type: 'number'
},
languageCode: {
alias: 'l',
default: 'en-US',
global: true,
requiresArg: true,
type: 'string'
}
})
.example(`node $0 listen`)
.wrap(120)
.recommendCommands()
.epilogue(`For more information, see https://cloud.google.com/speech/docs`)
.help()
.strict();
if (module === require.main) {
cli.parse(process.argv.slice(2));
}

I've figured out that it was the device option from node-record-lpcm16. Only added the option device: 'plughw:0,0' at record.start.

Related

error enoent when assigning path to ffmpeg

I am trying to set up a relay from my rtmp server and I'm using an npm module called node-media-server. I have set up my rtmp protocol successfully but when I am trying to set up the relay it expects me to add a path my my ffmpeg library so i npm installed ffmpeg then supplied the node_module path to the relay but I keep on getting error uncaughtException Error: spawn C:\blah\blah\node_modules\ffmpeg ENOENT the ffmpeg library definitely exist at the specified location. Why is this happening? Thanks in advance.
Link to the module im using: https://www.npmjs.com/package/node-media-server
const NodeMediaServer = require('node-media-server');
const path = require('path')
const ffmpegPath = path.join(__dirname, '..', 'node_modules', 'ffmpeg')
const config = {
rtmp: {
port: 1935,
chunk_size: 60000,
gop_cache: true,
ping: 30,
ping_timeout: 60
},
http: {
port: 8000,
allow_origin: '*'
},
relay: {
ffmpeg: ffmpegPath,
tasks: [{
app: 'live',
mode: 'push',
edge: 'rtmp://localhost:1936',
},
{
app: 'live',
mode: 'push',
edge: 'rtmp://localhost:1937',
}
]
}
};
var nms = new NodeMediaServer(config)
nms.run();
It seems like the rtmp server is not finding a ffmpeg installation. I had to install node-ffmpeg-installer, a package that installs ffmpeg as a node-module. Then I just assigned ffmpeg.path to ffmpeg key in the relay config. Hope this helps.
const NodeMediaServer = require('node-media-server');
const ffmpeg = require('#ffmpeg-installer/ffmpeg');
const config = {
rtmp: {
port: 1935,
chunk_size: 60000,
gop_cache: true,
ping: 30,
ping_timeout: 60
},
http: {
port: 8000,
allow_origin: '*'
},
relay: {
ffmpeg: ffmpeg.path,
tasks: [{
app: 'live',
mode: 'push',
edge: 'rtmp://localhost:1936',
},
{
app: 'live',
mode: 'push',
edge: 'rtmp://localhost:1937',
}
]
}
};
var nms = new NodeMediaServer(config)
nms.run();

ytdl-core no stream data

I am making a NodeJS Music bot for discord, and I suddenly encountered a problem. The bot properly joins the channel, lights up (indicating it is speaking), but then there's no audio. After trying to find the root cause of the problem, I believe it to be a problem with the ytdl() function from the ytdl-core module.
const stream = await ytdl(song.url, {
filter: 'audioonly',
type: 'opus',
highWaterMark: waterMark
});
Looking at the result of stream, I found this:
PassThrough {
_readableState: ReadableState {
objectMode: false,
highWaterMark: 524288,
buffer: BufferList { head: null, tail: null, length: 0 },
length: 0,
...
Which meant that I am not getting any buffer/stream data. It is indeed playing, but because there's nothing-- there's only silence to be heard.
I tried using pipe() and it worked just fine, but I can't play it as it is through my Music Bot.
ytdl function is like synchronous function.
"ytdl-core": "^4.10.1"
const stream = await ytdl(song.url); pause code execution until ytdl finished downloading stream.
console.log("YTDL Start");
const stream = await ytdl(song.url, {
filter: 'audioonly',
type: 'opus',
highWaterMark: waterMark
});
console.log("Is it done?");
stream.on('close', () => {
console.log('Read stream closed');
});
stream.on('finish', () => {
console.log('Read stream Finished');
});

Can't send log from Hapi-Pino to Elasticsearch

I'm building server for a website use hapi and hapi-pino for logging.
I want to analyze log (about status code, route, timestamp) for some business purpose. I use elasticsearch and kibana to do it
Between hapi-pino and Elasticsearch, I try to use pino-elasticsearch to send log. However, it didn't work, elasticsearch didn't get anything
This is my code for registering hapi-pino:
const streamToElastic = PinoElasticsearch({
index: 'api',
type: 'log',
consistency: 'one',
node: 'http://elastic:changeme#localhost:9200',
'es-version': 6,
'bulk-size': 200,
ecs: true
});
await server.register({
plugin: HapiPino,
options: {
logPayload: true,
prettyPrint: process.env.NODE_ENV !== 'production',
redact: {
paths: ['req.headers', 'payload.user.password', 'payload.file'],
remove: true
},
stream: streamToElastic
}
});
Thanks for any helps and sorry about my poor English!

prevent ffmpeg from opening console window

I have a node/express server which is used to give streams from IP camera to a website. Everything is working well. I run that webserver with PM2 on a windows server.
The problem : for each stream I have a windows console opening with just nothing logged in. The console reopen when I try to close it.
Is there a way to prevent those console to open ?
Here is the related node.js code :
const { NodeMediaServer } = require('node-media-server');
private _initiate_streams(): void{
DatabaseProvider.instance.camerasDao.getCamerasList().pipe(
take(1)
).subscribe(
(databaseReadOperationResult: DatabaseReadOperationResult<ICamera[]>) => {
if (databaseReadOperationResult.successful === true){
const cameras = databaseReadOperationResult.result;
const tasks = [];
cameras.forEach( camera => {
tasks.push(
{
app : config.get('media_server.app_name'),
mode: 'static',
edge: camera.rtsp_url,
name: camera.stream_name,
rtsp_transport: 'tcp'
}
)
});
const configMediaServer = {
logType: 3, // 3 - Log everything (debug)
rtmp: {
port: 1935,
chunk_size: 60000,
gop_cache: true,
ping: 60,
ping_timeout: 30
},
http: {
port: config.get('media_server.port'),
allow_origin: '*'
},
auth: {
play: true,
api: true,
publish: true,
secret: config.get('salt'),
api_user: 'user',
api_pass: 'password',
},
relay: {
ffmpeg: 'C:\\FFmpeg\\bin\\ffmpeg.exe',
tasks: tasks
}
};
var nms = new NodeMediaServer(configMediaServer)
nms.run();
} else {
// catch exception
}
}
);
}

Video streaming from file system

I am trying to make a video server, my scenario is users will upload there files and I have a screen that will play all the files of the day like advertisement that we usually watch in our televisions.
So far I have achieved .hls and .flv streaming through node media server.
I have found the best way to stream a file is through FFMPEG, but I am unable to stream multiple files on same output url for example localhost:8000/index.m3u8, Can any one suggest me the right way to stream the next file when FFMPEG ends streaming of a previous file.
Thanks in advance. :)
My node media service config
{
rtmp: {
port: 1935,
chunk_size: 60000,
gop_cache: true,
ping: 30,
ping_timeout: 60
},
http: {
port: 8000,
mediaroot: './media',
allow_origin: '*'
},
trans: {
ffmpeg: 'E:/Red Matrix/node-stream-master/node-stream-master/usr/bin/ffmpeg.exe',
tasks: [
{
app: 'live',
hls: true,
hlsFlags: '[hls_time=2:hls_list_size=3:hls_flags=delete_segments]',
dash: true,
dashFlags: '[f=dash:window_size=3:extra_window_size=5]'
}
]
}
};
FFMPEG stream function
function ffmpegStream(stream){
var proc = ffmpeg()
.input('D:/Videos/1.MP4')
.on('error', function (err) {
console.log('An error occurred: ' + err.message);
})
.on('end', function (e) {
console.log('Processing end !' + JSON.stringify(this));
ffmpegStream('D:/Videos/2.MP4')
})
.on('done', function() {
console.log('Processing finished !');
})
.save('./media/live/abc/index.m3u8');
}

Resources