Converting audio with sox in nodejs - node.js

I have made sure that all that sox needs to function is installed, and I've also installed the sox package for node. Yet, this does not seem to work.
function encode(file, destination, quality) {
return new Promise((resolve, reject) => {
console.log(destination)
let job = sox.transcode(file, destination, {
sampleRate: 44100,
format: 'mp3',
bitRate: quality * 1024,
})
job.on('src', function(info) {
console.log(info)
});
job.on('progress', (amountDone, amountTotal) => {
console.log("progress", amountDone, amountTotal);
});
job.on('error', (err) => {
reject('Could not transcode mp3.')
})
job.on('end', () => {
resolve('mp3 transcoded.')
})
console.log(job)
})
}
The console.log at the end logs:
Transcode {
domain: null,
_events:
{ src: [Function],
progress: [Function],
error: [Function],
end: [Function] },
_eventsCount: 4,
_maxListeners: undefined,
inputFile: 'C:\\Users\\User\\Documents\\App\\Media\\media\\uploads\\audio/2016/269/1/0//10c746ef62374c6ab1f2ecfc36705618/original.mp3',
outputFile: 'C:\\Users\\User\\Documents\\App\\Media\\media\\uploads\\audio/2016/269/1/0//10c746ef62374c6ab1f2ecfc36705618/128.mp3',
options:
{ sampleRate: 44100,
format: 'mp3',
bitRate: 196608,
channelCount: 2,
compressionQuality: 5 } }
There are no errors, so I wonder if something here is failing silently. Identifying a file works just fine:
let original = (path + 'original.mp3')
sox.identify(original, function(err, info) {
if (err) {
console.log(err)
throw(err)
}
if (info.format !== 'mp3') {
throw('File must be mp3!')
}
console.log(info) // {format: 'mp3', ..}
});
The above code works just fine. but sox.transcode does not.

The example code mentions a job.start() that (presumably) starts the transcoding job. I don't see that being called in your code.

Related

Can't get values for a table located inside a frame through Puppeteer

Below is the error I get when I try to apply the 'table[0].$$eval' method (see code snipped below) :
Failed to execute 'querySelectorAll' on 'Element': '# 297d0e3 > table > tbody > tr:nth-child(1)' is not a valid selector
const puppeteer = require('puppeteer')
const scrape = async () => {
const browser = await puppeteer.launch({headless:false,defaultViewport: null,args: [
'--disable-web-security',
'--disable-features=IsolateOrigins,site-per-process'
]});
const page = await browser.newPage();
await page.goto('https://dealers.carwow.co.uk/dealers/sign_in')
await page.type('#dealer_email', 'email')
await page.type('#dealer_password', 'password')
await page.click('#new_dealer > p > input')
await new Promise(resolve => setTimeout(resolve, 5000));
let xpathArray = await page.$x('//*[#id="dealer-dashboard"]/div[3]/div/div/a')
await xpathArray[0].click()
await new Promise(resolve => setTimeout(resolve, 5000));
const frameHandle = await page.$x('//*[#id="klipfolio-iframe"]');
await new Promise(resolve => setTimeout(resolve, 5000));
const frame = await frameHandle[0].contentFrame();
await frame.waitForXPath('//*[#id="0297d0e3"]/table');
const table = await frame.$x('//*[#id="0297d0e3"]/table');
console.log(table)
browser.close()
};
The above function returns an array containing a ElementHandle (below) rather than an element.
[
ElementHandle {
_disposed: false,
_context: ExecutionContext {
_client: [CDPSession],
_world: [DOMWorld],
_contextId: 17,
_contextName: ''
},
_client: CDPSession {
eventsMap: [Map],
emitter: [Object],
_callbacks: Map(0) {},
_connection: [Connection],
_targetType: 'page',
_sessionId: '326BCCF50B6BBE8CA175CB21AB46C382'
},
_remoteObject: {
type: 'object',
subtype: 'node',
className: 'HTMLTableElement',
description: 'table.layout-grid',
objectId: '3652992625290954585.17.4'
},
_page: Page {
eventsMap: Map(0) {},
emitter: [Object],
_closed: false,
_timeoutSettings: [TimeoutSettings],
_pageBindings: Map(0) {},
_javascriptEnabled: true,
_workers: Map(0) {},
_fileChooserInterceptors: Set(0) {},
_userDragInterceptionEnabled: false,
_client: [CDPSession],
_target: [Target],
_keyboard: [Keyboard],
_mouse: [Mouse],
_touchscreen: [Touchscreen],
_accessibility: [Accessibility],
_frameManager: [FrameManager],
_emulationManager: [EmulationManager],
_tracing: [Tracing],
_coverage: [Coverage],
_screenshotTaskQueue: [ScreenshotTaskQueue],
_viewport: null
},
_frameManager: FrameManager {
eventsMap: [Map],
emitter: [Object],
_frames: [Map],
_contextIdToContext: [Map],
_isolatedWorlds: [Set],
_client: [CDPSession],
_page: [Page],
_networkManager: [NetworkManager],
_timeoutSettings: [TimeoutSettings],
_mainFrame: [Frame]
}
}
]
I have tried iterating over the array then doing applying the method (see below) to extract data from a table (see picture)
What exactly is an Element handle and how do I solve this issue?
table[0].$$eval('#\30 297d0e3 > table > tbody > tr:nth-child(1)', rows => {
return Array.from(rows, row => {
const columns = row.querySelectorAll('td');
return Array.from(columns, column => column.innerText);
});
});
If you need a selector with a number in its id, try this workaround:
table[0].$$eval('[id="the_number"] > table > tbody > tr:nth-child(1)', rows => {

Basic broadcast example using agora.io not working

I'm trying to get agora.io's audio broadcasting working on a webpage, following this example.
Everything works, if I also broadcast video. If I broadcast audio only, nothing shows, nothing is heard, but no errors are showing in the console.
Here's my HTML:
<div class="video-grid" id="video">
<div class="video-view">
<div id="local_stream" class="video-placeholder"></div>
<div id="local_video_info" class="video-profile hide"></div>
<div id="video_autoplay_local" class="autoplay-fallback hide"></div>
</div>
</div>
Here's my Agora code:
var rtc = {
client: null,
joined: false,
published: false,
localStream: null,
remoteStreams: [],
params: {}
};
// Options for joining a channel
//Self-generated token
var option = {
appID: "{{myappid}}",
channel: "event-2123",
uid: "1",
token: "{{mytoken}}"
}
rtc.client = AgoraRTC.createClient({mode: "live", codec: "h264"});
// Initialize the client
rtc.client.init(option.appID, function () {
console.log("init success");
// Join a channel
rtc.client.join(option.token ? option.token : null, option.channel, option.uid ? +option.uid : null, function (uid) {
console.log("join channel: " + option.channel + " success, uid: " + uid);
rtc.params.uid = uid;
rtc.client.setClientRole("host");
rtc.localStream = AgoraRTC.createStream({
streamID: rtc.params.uid,
audio: true,
video: true,
screen: false,
})
// Initialize the local stream
rtc.localStream.init(function () {
console.log("--------");
console.log("init local stream success");
// play stream with html element id "local_stream"
rtc.localStream.play("local_stream");
// Publish the local stream
rtc.client.publish(rtc.localStream, function (err) {
console.log("publish failed");
console.error(err);
})
}, function (err) {
console.error("init local stream failed ", err);
});
}, function(err) {
console.error("client join failed", err)
})
}, (err) => {
console.error(err);
});
This works (but not, it seems, on Safari). But If I change the stream parameters to this, nothing works:
rtc.localStream = AgoraRTC.createStream({
streamID: rtc.params.uid,
audio: true,
video: false,
screen: false,
})
I've noticed that, in some browsers, the video is muted by default. So, if no interface elements are showing, and muting is 'on', perhaps this is the source of the problem?
How to make this work?

CPU usage gets higher as more WebRTC peers are added?

I'm streaming video/audio from a server with an electron app to get the desktop. When one user is connected CPU usage on both cores is 30-50%. As more users join the usage gets higher, when there were ~6 users it was a constant 100% on both cores and video quality becomes laggy and poor.
It's like it's encoding the video for each user that joins? How can I make it encode once and send that stream to everyone? That's my only guess as to why cpu usage would get so much higher anyway, maybe I'm wrong about why. Thank you for any help you can give! I'm open to other ways of doing this as well, as only the server needs to send video out.
Getting the video and audio:
function getAudio(audioID){
navigator.mediaDevices.getUserMedia( { video: false, audio: {deviceId: {exact: audioID},
autoGainControl: false, channelCount: 2, echoCancellation: false, noiseSuppression: false, sampleRate: 44100, sampleSize: 16 } } )
.then(function(stream) {
console.log("audio got??");
var audio = stream.getAudioTracks()[0];
mediaStream.addTrack(audio);
})
.catch(function(err) {
console.log(err.message);
});
}
desktopCapturer.getSources({ types: ['screen'] })
.then(async sources => {
console.log(sources);
let constraints2 = {
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
maxWidth: 1280,
maxHeight: 720
}
}
}
let constraints3 = {
frameRate: {max: 24}
}
navigator.mediaDevices.getUserMedia(constraints2)
.then(function(stream){
mediaStream = stream;
let track = stream.getVideoTracks()[0];
track.applyConstraints(constraints3);
setTimeout(function(){
getAudio(audioID, 0);
}, 2000);
})
.catch(console.error);
})
.catch(console.error);
Calling the peers that join:
peer = new Peer(myPeerID, {host: 'selfhostedpeerjs.com', port: 9000, path: '/', key: 'peerjs', config: {'iceServers': [{ url: 'stun:stun.l.google.com:19302' },
{url:'stun:stun1.l.google.com:19302'},
{url:'stun:stun2.l.google.com:19302'},
{url:'stun:stun3.l.google.com:19302'},
{url:'stun:stun4.l.google.com:19302'}
]}
});
peer.on('open', function(id){
console.log("My peer id is: " + id);
});
peer.on('connection', function(conn)
{
conn.on('open', function(){
console.log("connection opened");
var id = conn.peer;
//conn.send('Hello!');
console.log("Trying to call now");
var call = peer.call(id, mediaStream);
call.on('error', function(err){
console.log('calling error');
console.log(err);
})
});

Get camera stream on embedded system

I have an embedded system with camera and gstreamer and I-m trying to get the stream of my camera. I have a web application built with aurelia and electron.
I tried with mediaDevices.getUserMedia but I get a NotFoundError, but usinge enumerateDevices I get the devices I need.
Can be a problem that the getUserMedia doesn-t work properly with Gstreamer? If I run the same project on my pc it works perfectly.
Here it is my HTML:
<video ref="videoPlayer" hide.bind="screenSharing" id="videoPlayer" autoplay muted></video>
And this is my js:
let j = 0;
navigator.mediaDevices.enumerateDevices()
.then((deviceInfos) => {
for (var i = 0; i !== deviceInfos.length; ++i) {
console.log(deviceInfos[i]);
if (deviceInfos[i].kind === 'videoinput') {
this.deviceInfo[j] = deviceInfos[i];
j++;
}
}
if (this.deviceInfo.length > 1) {
console.log(this.deviceInfo.length);
this.constraints = {
audio: true,
video: {
deviceId: { exact: this.deviceInfo[1].deviceId }
}
};
}
else {
console.log("Only one camera");
this.constraints = {
video: {
deviceId: { exact: this.deviceInfo[0].deviceId }
},
audio: true
};
console.log(this.constraints);
}
})
.then(() => {
navigator.mediaDevices.getUserMedia(this.constraints)
.then((stream) => {
console.log('Got mic+video stream', stream);
this.localStream = stream;
this.videoPlayer.srcObject = this.localStream;
})
.catch((err) => {
console.error(err);
});
})
}
I've seen on internet there some packages like livecam but no idea on how to use it.
I attach the output of mediaDevices.enumerateDevices:
console.log(navigator.mediaDevices.enumerateDevices())
VM149:1 Promise {[[PromiseStatus]]: "pending", [[PromiseValue]]: undefined}__proto__: Promise[[PromiseStatus]]: "resolved"[[PromiseValue]]:
Array(5)0: MediaDeviceInfodeviceId: "default"groupId: "6dbae3b74e14f5e239133b5feea86e5ae7a9741a3e3fd21a86eab9273fe135aa"kind: "audioinput"label: "Default"__proto__:
MediaDeviceInfo1: MediaDeviceInfodeviceId: "d415346fe3db142f8daa611ad3dedb298b5d94b70f4221c38e7e6582f45c3008"groupId: "8d82cc2495eebb4c40bb77a5e0287d4b365ac1de8205684eae39cb605a703f11"kind: "audioinput"label: "Built-in Audio Stereo"__proto__:
MediaDeviceInfo2: MediaDeviceInfodeviceId: "82378e03eff67ac471305e50ac95e629ebf441c1ab1819d6a36aca137e37e89d"groupId: ""kind: "videoinput"label: ""__proto__: MediaDeviceInfodeviceId: (...)groupId: (...)kind: (...)label: (...)toJSON: function toJSON()constructor: function MediaDeviceInfo()Symbol(Symbol.toStringTag): "MediaDeviceInfo"get deviceId: function ()get groupId: function ()get kind: function ()get label: function ()__proto__:
Object3: MediaDeviceInfodeviceId: "default"groupId: "default"kind: "audiooutput"label: "Default"__proto__:
MediaDeviceInfo4: MediaDeviceInfodeviceId: "31a7efff94b610d3fce02b21a319cc43e2541d56d98b4138b6e3fe854b0df38c"groupId: "391b1de381c11ab437d507abc0543f288dd29d999717dbb0e949c006ef120935"kind: "audiooutput"label: "Built-in Audio Stereo"__proto__:
MediaDeviceInfolength: 5__proto__: Array(0)
undefined

aws Nodejs sdk:: autoscaling.describeAutoScalingGroups

I need to get the status of the autoscaling group processes (whether they're suspended or resumed). I've written the below script which returns the properties for the given ASG but the SuspendedProcesses: value is "[Object]". How do I expand the object. The ASG I'm querying has the Terminate process suspended, so I'd expect to see this in the output of the script:
var AWS = require('aws-sdk');
var uuid = require('uuid');
AWS.config.update({ region: 'eu-west-1' });
AWS.config.apiVersions = {
autoscaling: '2011-01-01',
};
var autoscaling = new AWS.AutoScaling();
var params = {
AutoScalingGroupNames: ["myAutoScalingGroup"]
};
function status() {
autoscaling.describeAutoScalingGroups(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
}
status();
This is the response from the above script:
C:\Node.js\NodeJsSamples\package01>node SuspendProcess.js
{ ResponseMetadata: { RequestId: 'myRequestId' },
AutoScalingGroups:
[ { AutoScalingGroupName: 'myAutoScalingGroupName',
AutoScalingGroupARN: 'arn:aws:autoscaling:eu-west-
1:myAccNumber:autoScalingGroup:myAutoScalingGroupName',
LaunchConfigurationName: 'myLaunchConfigurationName',
MinSize: 1,
MaxSize: 1,
DesiredCapacity: 1,
DefaultCooldown: 300,
AvailabilityZones: [Object],
LoadBalancerNames: [Object],
TargetGroupARNs: [],
HealthCheckType: 'ELB',
HealthCheckGracePeriod: 300,
Instances: [Object],
CreatedTime: 2017-11-02T08:08:31.364Z,
SuspendedProcesses: [Object],
VPCZoneIdentifier: 'subnet-########,subnet-########,subnet-########',
EnabledMetrics: [],
Tags: [Object],
TerminationPolicies: [Object],
NewInstancesProtectedFromScaleIn: false } ] }
How can I expand the [Object] values?
thanks.
Use this snipped. Is native.
console.log('string', require('util').inspect(<variable>, 1, 10, 1));
in your code:
function status() {
autoscaling.describeAutoScalingGroups(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log('output', require('util').inspect(data, 1, 10, 1)); // successful response
});
}
You need to JSON.stringify(data) instead of printing only data in else clause.
If you print console.log(JSON.stringify(data)) in else condition, you will get proper response.
Use JSON.stringify -
var obj = { "name":"John", "age":function () {return 30;}, "city":"New York"};
console.log(JSON.stringify(obj));

Resources