I am making a NodeJS Music bot for discord, and I suddenly encountered a problem. The bot properly joins the channel, lights up (indicating it is speaking), but then there's no audio. After trying to find the root cause of the problem, I believe it to be a problem with the ytdl() function from the ytdl-core module.
const stream = await ytdl(song.url, {
filter: 'audioonly',
type: 'opus',
highWaterMark: waterMark
});
Looking at the result of stream, I found this:
PassThrough {
_readableState: ReadableState {
objectMode: false,
highWaterMark: 524288,
buffer: BufferList { head: null, tail: null, length: 0 },
length: 0,
...
Which meant that I am not getting any buffer/stream data. It is indeed playing, but because there's nothing-- there's only silence to be heard.
I tried using pipe() and it worked just fine, but I can't play it as it is through my Music Bot.
ytdl function is like synchronous function.
"ytdl-core": "^4.10.1"
const stream = await ytdl(song.url); pause code execution until ytdl finished downloading stream.
console.log("YTDL Start");
const stream = await ytdl(song.url, {
filter: 'audioonly',
type: 'opus',
highWaterMark: waterMark
});
console.log("Is it done?");
stream.on('close', () => {
console.log('Read stream closed');
});
stream.on('finish', () => {
console.log('Read stream Finished');
});
Related
How nodejs stdio streams could be wrapped into a Duplex.
I've tried something like
const pair = {
readable: process.stdin,
writable: process.stdout,
};
const duplex = Duplex.fromWeb(pair, { encoding: 'utf8', objectMode: true });
Still it doesn't want to accept ReadStream as a ReadableStream (as well as the WriteStream I suppose)
node:internal/webstreams/adapters:596
throw new ERR_INVALID_ARG_TYPE(
^
TypeError [ERR_INVALID_ARG_TYPE]: The "pair.readable" property must be an instance of ReadableStream. Received an instance of ReadStream
So what is the way to wrap one into another?
Hello i new in elasticsearch and i tried to make search in Node.js with elastic search. I make JSON.stringify() show error. Internet said that problem with data format. If you working in elasticsearch in Node.JS. Can you help with code please? In search.js i try make function bulk, index that insert json data. I put JSON data from API if you need in parser code you can write but i think error in search.js.
My code (search.js):
const es = require('elasticsearch');
const search = async (search_keyword, data) => {
const esClient = new es.Client({
host: 'localhost:9200',
log: 'trace'
});
await esClient.indices.create({
index: 'test',
body: data,
});
await esClient.search({
index: 'test',
body: {
query: {
multi_match: {
query: search_keyword,
fields: ["id", "name", "snippet"],
operator: "or"
}
}
}
})
};
module.exports=search;
server.js
const express = require("express");
const app = express();
const parser = require('./parser/parser');
const search=require('./elasticsearch/search')
;(async () => {
const result = await parser();
for (const item of result) {
const data = JSON.parse(item.data);
const output = data.items.map(({id, name, snippet}) => ({
id,
name,
snippet
}));
//console.log(output);
try {
await search("JS", output);
}catch (err) {
console.log(err);
}
}
})();
const server = app.listen(3001, () => {
console.log("listening on port %s...", server.address().port);
});
Error:
StatusCodeError: [not_x_content_exception] Compressor detection can only be called on some xcontent bytes or compressed xcontent bytes
at respond (D:\Programming\test-task\node_modules\elasticsearch\src\lib\transport.js:349:15)
at checkRespForFailure (D:\Programming\test-task\node_modules\elasticsearch\src\lib\transport.js:306:7)
at HttpConnector.<anonymous> (D:\Programming\test-task\node_modules\elasticsearch\src\lib\connectors\http.js:173:7)
at IncomingMessage.wrapper (D:\Programming\test-task\node_modules\lodash\lodash.js:4949:19)
at IncomingMessage.emit (events.js:215:7)
at endReadableNT (_stream_readable.js:1183:12)
at processTicksAndRejections (internal/process/task_queues.js:80:21) {
status: 500,
displayName: 'InternalServerError',
message: '[not_x_content_exception] Compressor detection can only be called on some xcontent bytes or compressed xcontent bytes',
path: '/test',
query: {},
body: {
error: {
root_cause: [Array],
type: 'not_x_content_exception',
reason: 'Compressor detection can only be called on some xcontent bytes or compressed xcontent bytes'
},
status: 500
},
statusCode: 500,
response: '{"error":{"root_cause":[{"type":"not_x_content_exception","reason":"Compressor detection can only be called on some xcontent bytes or compressed xcontent bytes"}],"type":"not_x_content_excep
tion","reason":"Compressor detection can only be called on some xcontent bytes or compressed xcontent bytes"},"status":500}',
toString: [Function],
toJSON: [Function]
}
I think the problematic part is
await esClient.indices.create({
index: 'test',
body: data,
});
because data is the output you're passing to the search search function:
const output = data.items.map(({id, name, snippet}) => ({
id,
name,
snippet
}));
which is guaranteed to be an array. And the client.incides.create function does not accept that. Check this valid .create usage and see if it helps you further.
P.S.: The search function does multiple things at once:
it instantiates the client
creates an index (with the hope of indexing docs -- which fails. Check out _bulk indexing instead.)
and performs the search.
All of this combined violates a principle called separation of concerns. See, the search function failed but it shouldn't have because there's nothing wrong with search per se. Even if it didn't fail, other people (or you in 6 months' time) reading the code will wonder why multiple unrelated actions are being performed there. So a bit of refactoring would be reasonable.
I am trying to make a video server, my scenario is users will upload there files and I have a screen that will play all the files of the day like advertisement that we usually watch in our televisions.
So far I have achieved .hls and .flv streaming through node media server.
I have found the best way to stream a file is through FFMPEG, but I am unable to stream multiple files on same output url for example localhost:8000/index.m3u8, Can any one suggest me the right way to stream the next file when FFMPEG ends streaming of a previous file.
Thanks in advance. :)
My node media service config
{
rtmp: {
port: 1935,
chunk_size: 60000,
gop_cache: true,
ping: 30,
ping_timeout: 60
},
http: {
port: 8000,
mediaroot: './media',
allow_origin: '*'
},
trans: {
ffmpeg: 'E:/Red Matrix/node-stream-master/node-stream-master/usr/bin/ffmpeg.exe',
tasks: [
{
app: 'live',
hls: true,
hlsFlags: '[hls_time=2:hls_list_size=3:hls_flags=delete_segments]',
dash: true,
dashFlags: '[f=dash:window_size=3:extra_window_size=5]'
}
]
}
};
FFMPEG stream function
function ffmpegStream(stream){
var proc = ffmpeg()
.input('D:/Videos/1.MP4')
.on('error', function (err) {
console.log('An error occurred: ' + err.message);
})
.on('end', function (e) {
console.log('Processing end !' + JSON.stringify(this));
ffmpegStream('D:/Videos/2.MP4')
})
.on('done', function() {
console.log('Processing finished !');
})
.save('./media/live/abc/index.m3u8');
}
I'm trying to run nodeJS Google Speech example, but when I start it with node recognize.js listen it stops after 3 seconds without any output. There is no error output.
I've already tried other NodeJS programs and they run without problems.
I'm using nodejs v8.4.0 and npm v5.3.0 on Ubuntu 14.04.5 LTS. I've tried this on Ubuntu 16.04 LTS and worked fine.
This is the code I'm trying to run:
'use strict';
function streamingMicRecognize (encoding, sampleRateHertz, languageCode) {
const record = require('node-record-lpcm16');
const Speech = require('#google-cloud/speech');
const speech = Speech();
const request = {
config: {
encoding: encoding,
sampleRateHertz: sampleRateHertz,
languageCode: languageCode
},
interimResults: false // If you want interim results, set this to true
};
const recognizeStream = speech.streamingRecognize(request)
.on('error', console.error)
.on('data', (data) =>
process.stdout.write(
(data.results[0] && data.results[0].alternatives[0])
? `Transcription: ${data.results[0].alternatives[0].transcript}\n`
: `\n\nReached transcription time limit, press Ctrl+C\n`));
record
.start({
sampleRateHertz: sampleRateHertz,
threshold: 0,
verbose: false,
recordProgram: 'rec', // Try also "arecord" or "sox"
silence: '10.0'
})
.on('error', console.error)
.pipe(recognizeStream);
console.log('Listening, press Ctrl+C to stop.');
}
const cli = require(`yargs`)
.demand(1)
.command(
`listen`,
`Detects speech in a microphone input stream. This command requires that you have SoX installed and available in your $PATH. See https://www.npmjs.com/package/node-record-lpcm16#dependencies`,
{},
(opts) => streamingMicRecognize(opts.encoding, opts.sampleRateHertz, opts.languageCode)
)
.options({
encoding: {
alias: 'e',
default: 'LINEAR16',
global: true,
requiresArg: true,
type: 'string'
},
sampleRateHertz: {
alias: 'r',
default: 16000,
global: true,
requiresArg: true,
type: 'number'
},
languageCode: {
alias: 'l',
default: 'en-US',
global: true,
requiresArg: true,
type: 'string'
}
})
.example(`node $0 listen`)
.wrap(120)
.recommendCommands()
.epilogue(`For more information, see https://cloud.google.com/speech/docs`)
.help()
.strict();
if (module === require.main) {
cli.parse(process.argv.slice(2));
}
I've figured out that it was the device option from node-record-lpcm16. Only added the option device: 'plughw:0,0' at record.start.
I've been stuck on this issue for a while the answer might be really basic but I fail to understand what the problem is. AFAIU It execute the function but doesnt trigger the callback and I dont know why.
My script aim to have both a tcp server to have a device (raspberry pi) that connect a tcp socket and a client to connect to a websocket on a sailsjs app.
I manage to have both this thing running on the following code, the problem is they only work separatly, simultanuously but separatly, when I try a get outside the socket everything works fine but when I do inside, the io.socket object is just piling up the get request in a requestQueue.
{ useCORSRouteToGetCookie: true,
url: 'http://localhost:1337',
multiplex: undefined,
transports: [ 'polling', 'websocket' ],
eventQueue: { 'sails:parseError': [ [Function] ] },
query:'__sails_io_sdk_version=0.11.0&__sails_io_sdk_platform=node&__sails_io_sdk_language=javascript',
_raw:
{ socket:
{ options: [Object],
connected: true,
open: true,
connecting: false,
reconnecting: false,
namespaces: [Object],
buffer: [],
doBuffer: false,
sessionid: '0xAlU_CarIOPQAGUGKQW',
closeTimeout: 60000,
heartbeatTimeout: 60000,
origTransports: [Object],
transports: [Object],
heartbeatTimeoutTimer: [Object],
transport: [Object],
connectTimeoutTimer: [Object],
'$events': {} },
name: '',
flags: {},
json: { namespace: [Circular], name: 'json' },
ackPackets: 0,
acks: {},
'$events':
{ 'sails:parseError': [Function],
connect: [Object],
disconnect: [Function],
reconnecting: [Function],
reconnect: [Function],
error: [Function: failedToConnect],
undefined: undefined } },
requestQueue:
[ { method: 'get', headers: {}, data: {}, url: '/', cb: [Function] },
{ method: 'get', headers: {}, data: {}, url: '/', cb: [Function] } ] }
The code is the following :
//library to connect to sailsjs websockets
var socketIOClient = require('socket.io-client');
var sailsIOClient = require('sails.io.js');
//library to do the tcp server
var net = require('net');
// Instantiate the socket client (`io`)
// (for now, you must explicitly pass in the socket.io client when using this library from Node.js)
var io = sailsIOClient(socketIOClient);
// Set some options:
// (you have to specify the host and port of the Sails backend when using this library from Node.js)
io.sails.url = 'http://localhost:1337';
var server = net.createServer(function(tcpSocket) { //'connection' listener
//socket was sucessfully connected
console.log('client connected');
//notify on deconnection
tcpSocket.on('end', function() {
console.log('client disconnected');
});
// Handle incoming messages from clients.
tcpSocket.on('data', function (data) {
console.log(data.toString('utf8', 0, data.length));
//if data is PING respond PONG
if(data.toString('utf8', 0, 4)=='PING'){
console.log('I was pinged');
tcpSocket.write('PONG\r\n');
}
console.log(io.socket);//debugging purpose
//trigger a socket call on the sails app
io.socket.get('/', function (body, JWR) {
//display the result
console.log('Sails responded with: ', body);
console.log('with headers: ', JWR.headers);
console.log('and with status code: ', JWR.statusCode);
});
});
});
server.listen(8124, function() { //'listening' listener
console.log('server bound');
});
It looks like your socket isn't autoconnecting. Try connecting manually:
// Instantiate the socket client (`io`)
// (for now, you must explicitly pass in the socket.io client when using this library from Node.js)
var io = sailsIOClient(socketIOClient);
// Set some options:
// (you have to specify the host and port of the Sails backend when using this library from Node.js)
io.sails.url = 'http://localhost:1337';
var socket = io.sails.connect();
socket.on('connect', function() {
... connect TCP server and continue ...
});
I found a solution, I just got rid of sails.io.js and used plain socket.io it now works as intended feel free to explain though why it didnt in sails.io.js
//library to connect to sailsjs websockets
var socketIOClient = require('socket.io-client');
//var sailsIOClient = require('sails.io.js');
//library to do the tcp server
var net = require('net');
var socket=socketIOClient.connect('http://localhost:1337', {
'force new connection': true
});
var server = net.createServer(function(tcpSocket) { //'connection' listener
//socket was sucessfully connected
console.log('client connected');
//notify on deconnection
tcpSocket.on('end', function() {
console.log('client disconnected');
});
// Handle incoming messages from clients.
tcpSocket.on('data', function (data) {
console.log(data.toString('utf8', 0, data.length));
console.log(data.toString('utf8', 0, data.length));
//if data is PING respond PONG
if(data.toString('utf8', 0, 4)=='PING'){
console.log('I was pinged');
tcpSocket.write('PONG\r\n');
}
if(data.toString('utf8', 0, 4)=='test'){
socket.emit('test',{message : 'test'});
//io.socket.disconnect();
}
});
});