==== SOLVED ====
To be 100% honest I'm not sure what I did, but I have a fully working audio bar visualizer based on desktop audio. I went back to the original codepen (link below) I started working off of and edited everything as needed to accept a media stream, and it works.
Full Code
const {desktopCapturer} = require('electron')
desktopCapturer.getSources({types: ['window', 'screen']}, (error, sources) => {
if (error) throw error
for (let i = 0; i < sources.length; ++i) {
if (sources[i].name === 'Entire screen') {
navigator.mediaDevices.getUserMedia({
audio: { mandatory : { chromeMediaSource: 'desktop' }},
video: { mandatory : { chromeMediaSource: 'desktop' }}
})
.then((stream) => handleStream(stream))
return
}
}
})
function handleStream (stream)
{
const context = new AudioContext()
let src = context.createMediaStreamSource(stream)
let analyser = context.createAnalyser()
let canvas = document.getElementById("canvas")
canvas.width = window.innerWidth
canvas.height = window.innerHeight
let ctx = canvas.getContext("2d")
src.connect(analyser)
analyser.fftSize = 256
let bufferLength = analyser.frequencyBinCount
let dataArray = new Uint8Array(bufferLength)
let WIDTH = canvas.width
let HEIGHT = canvas.height
let barWidth = (WIDTH / bufferLength) * 2.5
let barHeight
let x = 0
function renderFrame()
{
requestAnimationFrame(renderFrame)
x = 0
analyser.getByteFrequencyData(dataArray)
ctx.fillStyle = "#000"
ctx.fillRect(0, 0, WIDTH, HEIGHT)
for (let i = 0; i < bufferLength; i++)
{
barHeight = dataArray[i]
let r = barHeight + (25 * (i / bufferLength))
var g = 250 * (i/bufferLength)
var b = 50
ctx.fillStyle = `rgb(${r}, ${g}, ${b})`
ctx.fillRect(x, HEIGHT - barHeight, barWidth, barHeight)
x += barWidth + 1
}
}
renderFrame()
}
Codepen I used as a starting point
https://codepen.io/nfj525/pen/rVBaab
Original Post
I'm working on setting up a desktop visualizer that will graph the users desktop audio.
desktopCapture appears to be grabbing the media as sending it to a video tag displays the stream, along with an echo of the media. Since I only need the audio, I'm setting the mediaStraem to an AudioContext MediaStreamSource, which also appears to be working as I get an audio echo if I connect the analyser to the ctx destination. The issue I'm running into is when I try to get the frequency data its returning an array of only 0s. Below is my current code
const {desktopCapturer} = require('electron')
desktopCapturer.getSources({types: ['window', 'screen']}, (error, sources) => {
if (error) throw error
for (let i = 0; i < sources.length; ++i) {
if (sources[i].name === 'Entire screen') {
navigator.mediaDevices.getUserMedia({
audio: {
mandatory : {
chromeMediaSource: 'desktop'
}
},
video: {
mandatory: {
chromeMediaSource: 'desktop',
}
}
})
.then((stream) => handleStream(stream))
return
}
}
})
function handleStream (stream) {
let audioCtx = new AudioContext();
let source = audioCtx.createMediaStreamSource(stream);
let analyser = audioCtx.createAnalyser()
//uncommenting results in echo, but still all 0s
//analyser.connect(audioCtx.destination)
analyser.fftSize = 256
let bufferLength = analyser.frequencyBinCount
let dataArray = new Uint8Array(bufferLength)
console.log(dataArray)
}
===== EDIT =====
I've been able to get this working; kind of but am still running into a slight issue.
1) I had connect the source and the analyzer with
source.connect(analyser)
2) Had to fill the dataArray with time domain with
getByteTimeDomainData
3) Outstanding issue is when there is no media playing the dataArray is filled with values of 126, 127, & 128 making the bars "dance" at almost full height.
4) The FPS also seems extremely fast, but have some plans to fix that
Current somewhat working code :
const {desktopCapturer} = require('electron')
desktopCapturer.getSources({types: ['window', 'screen']}, (error, sources) => {
if (error) throw error
for (let i = 0; i < sources.length; ++i) {
if (sources[i].name === 'Entire screen') {
navigator.mediaDevices.getUserMedia({
audio: {
mandatory : {
chromeMediaSource: 'desktop'
}
},
video: {
mandatory: {
chromeMediaSource: 'desktop',
}
}
})
.then((stream) => handleStream(stream))
return
}
}
})
function handleStream (stream) {
const audioCtx = new AudioContext()
let source = audioCtx.createMediaStreamSource(stream)
let analyser = audioCtx.createAnalyser()
source.connect(analyser) //Had To Connect Source To Analyser
analyser.fftSize = 128
let bufferLength = analyser.frequencyBinCount
let dataArray = new Uint8Array(bufferLength)
let canvas = document.getElementById("canvas")
canvas.width = window.innerWidth
canvas.height = window.innerHeight
let canvasCtx = canvas.getContext("2d")
let WIDTH = canvas.width;
let HEIGHT = canvas.height;
let barWidth = (WIDTH / bufferLength);
let barHeight;
let x = 0;
function draw() {
var drawVisual = requestAnimationFrame(draw)
analyser.getByteTimeDomainData(dataArray) //added to the draw to fill the dataArray
canvasCtx.fillStyle = "#000";
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
var x = 0;
for (let i = 0; i < bufferLength; i++) {
barHeight = dataArray[i];
let r = 50
let g = 250
let b = 50
canvasCtx.fillStyle = `rgb(${r}, ${g}, ${b})`
canvasCtx.fillRect(x, HEIGHT - barHeight, barWidth, barHeight);
x += barWidth + 1;
}
}
draw()
}
Related
I am using pdf-lib to capture and html2canvas, but when the page is captured, Button labels, they go wrong. "Buy Again" becomes "BuyA Gain".
Also when I scroll the page, the page captured gets cut, when it should not because it is being captured on the basis of elementID
const onClickPrint = () => {
const domElement = document.getElementById(elementId);
var printButton = document.getElementById("printIcon");
printButton.style.visibility = "hidden";
var downloadButton;
try {
downloadButton = document.getElementById("downloadIcon");
downloadButton.style.visibility = "hidden";
} catch (e) { }
html2canvas(domElement).then((canvas) => {
(async () => {
const pdfDoc = await PDFDocument.create();
const imagePDF = await pdfDoc.embedPng(canvas.toDataURL("image/PNG"));
let height = imagePDF.height;
let width = imagePDF.width;
const page = pdfDoc.addPage([A4_PAGE.width, A4_PAGE.height]);
let widthRatio = A4_PAGE.width / width;
let heightRatio = A4_PAGE.height / height;
let ratio = widthRatio > heightRatio ? heightRatio : widthRatio;
page.drawImage(imagePDF, {
x: A4_PAGE.width / 2 - (width * ratio) / 2,
y: A4_PAGE.height / 2 - (height * ratio) / 2,
width: width * ratio,
height: height * ratio,
});
const pdfBytes = await pdfDoc.save();
const blob = new Blob([pdfBytes], { type: "application/pdf" });
openPrintDialog(blob);
})();
});
printButton.style.visibility = "visible";
if (downloadButton != null) {
downloadButton.style.visibility = "visible";
}
};
I have JSON file contain games objects, I want to get top 5 games that have the highest total playtime between users.
I tried to get all objects by reading the file using file system in nodejs :
const queryGames = async () => {
let data = fs.readFileSync(path.resolve(__dirname, '../../games.json'))
let games = JSON.parse(data)
return games
}
/**
* Query for top games by play time
* #returns {Promise<QueryResult>}
*/
const selectTopByPlaytime = async () => {
}
this is the json file : https://jsoneditoronline.org/#left=cloud.3b82169327044c04b7207fa186aee85b&right=local.tiniqu
something like this should work.
const gamePlayData = require('./gamePlay.json').data
/**
* Query for games and time
* #returns {
'League of legends': 1650,
'World of warcraft': 2300,
'Dark Souls': 218,
'The Witcher 3: Wild Hunt': 987,
etc....
}
*/
const getGamePlayTimes = () => {
gamePlayTimes = {}
gamePlayData.forEach( (playData) => {
const gameName = playData.game
if(gamePlayTimes[gameName]) {
gamePlayTimes[gameName] += playData.playTime
}
else {
gamePlayTimes[gameName] = playData.playTime
}
})
return gamePlayTimes;
}
const getGamesAndTimesAsList = (playTimes) => {
let gamesWithTimeArr = [];
let i = 0;
for(let game in playTimes) {
let gameAndPlayTime = {game: "", playTime: 0};
gameAndPlayTime.game = game;
gameAndPlayTime.playTime = playTimes[game];
gamesWithTimeArr[i++] = gameAndPlayTime
}
return gamesWithTimeArr;
}
const reverseBubbleSort = (a, par) => {
let swapped;
do {
swapped = false;
for (var i = 0; i < a.length - 1; i++) {
if (a[i][par] < a[i + 1][par]) {
var temp = a[i];
a[i] = a[i + 1];
a[i + 1] = temp;
swapped = true;
}
}
} while (swapped);
return a;
}
sortedArr = reverseBubbleSort(getGamesAndTimesAsList( getGameAndPlayTimes() ) , 'playTime')
const top5 = sortedArr.slice(0, 5);
console.log(top5);
I am working on a whatsapp chatbot where I receive audio file(ogg format) file url from Whatsapp and I get buffer and upload that file on S3(sample.ogg) Now what is want to use AWS Transcribe Streaming so I am creating readStream of file and sending to AWS transcribe I am using websocket but I am receiving Empty response of Sometimes when I Mhm mm mm response. Please can anyone tell what wrong I am doing in my code
const express = require('express')
const app = express()
const fs = require('fs');
const crypto = require('crypto'); // tot sign our pre-signed URL
const v4 = require('./aws-signature-v4'); // to generate our pre-signed URL
const marshaller = require("#aws-sdk/eventstream-marshaller"); // for converting binary event stream messages to and from JSON
const util_utf8_node = require("#aws-sdk/util-utf8-node");
var WebSocket = require('ws') //for opening a web socket
// our converter between binary event streams messages and JSON
const eventStreamMarshaller = new marshaller.EventStreamMarshaller(util_utf8_node.toUtf8, util_utf8_node.fromUtf8);
// our global variables for managing state
let languageCode;
let region = 'ap-south-1';
let sampleRate;
let inputSampleRate;
let transcription = "";
let socket;
let micStream;
let socketError = false;
let transcribeException = false;
// let languageCode = 'en-us'
app.listen(8081, (error, data) => {
if(!error) {
console.log(`running at 8080----->>>>`)
}
})
let handleEventStreamMessage = function (messageJson) {
let results = messageJson.Transcript.Results;
if (results.length > 0) {
if (results[0].Alternatives.length > 0) {
let transcript = results[0].Alternatives[0].Transcript;
// fix encoding for accented characters
transcript = decodeURIComponent(escape(transcript));
console.log(`Transcpted is----->>${transcript}`)
}
}
}
function downsampleBuffer (buffer, inputSampleRate = 44100, outputSampleRate = 16000){
if (outputSampleRate === inputSampleRate) {
return buffer;
}
var sampleRateRatio = inputSampleRate / outputSampleRate;
var newLength = Math.round(buffer.length / sampleRateRatio);
var result = new Float32Array(newLength);
var offsetResult = 0;
var offsetBuffer = 0;
while (offsetResult < result.length) {
var nextOffsetBuffer = Math.round((offsetResult + 1) * sampleRateRatio);
var accum = 0,
count = 0;
for (var i = offsetBuffer; i < nextOffsetBuffer && i < buffer.length; i++ ) {
accum += buffer[i];
count++;
}
result[offsetResult] = accum / count;
offsetResult++;
offsetBuffer = nextOffsetBuffer;
}
return result;
}
function pcmEncode(input) {
var offset = 0;
var buffer = new ArrayBuffer(input.length * 2);
var view = new DataView(buffer);
for (var i = 0; i < input.length; i++, offset += 2) {
var s = Math.max(-1, Math.min(1, input[i]));
view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
return buffer;
}
function getAudioEventMessage(buffer) {
// wrap the audio data in a JSON envelope
return {
headers: {
':message-type': {
type: 'string',
value: 'event'
},
':event-type': {
type: 'string',
value: 'AudioEvent'
}
},
body: buffer
};
}
function convertAudioToBinaryMessage(raw) {
if (raw == null)
return;
// downsample and convert the raw audio bytes to PCM
let downsampledBuffer = downsampleBuffer(raw, inputSampleRate);
let pcmEncodedBuffer = pcmEncode(downsampledBuffer);
setTimeout(function() {}, 1);
// add the right JSON headers and structure to the message
let audioEventMessage = getAudioEventMessage(Buffer.from(pcmEncodedBuffer));
//convert the JSON object + headers into a binary event stream message
let binary = eventStreamMarshaller.marshall(audioEventMessage);
return binary;
}
function createPresignedUrl() {
let endpoint = "transcribestreaming." + "us-east-1" + ".amazonaws.com:8443";
// get a preauthenticated URL that we can use to establish our WebSocket
return v4.createPresignedURL(
'GET',
endpoint,
'/stream-transcription-websocket',
'transcribe',
crypto.createHash('sha256').update('', 'utf8').digest('hex'), {
'key': <AWS_KEY>,
'secret': <AWS_SECRET_KEY>,
'protocol': 'wss',
'expires': 15,
'region': 'us-east-1',
'query': "language-code=" + 'en-US' + "&media-encoding=pcm&sample-rate=" + 8000
}
);
}
function showError(message) {
console.log("Error: ",message)
}
app.get('/convert', (req, res) => {
var file = 'recorded.mp3'
const eventStreamMarshaller = new marshaller.EventStreamMarshaller(util_utf8_node.toUtf8, util_utf8_node.fromUtf8);
let url = createPresignedUrl();
let socket = new WebSocket(url);
socket.binaryType = "arraybuffer";
let output = '';
const readStream = fs.createReadStream(file, { highWaterMark: 32 * 256 })
readStream.setEncoding('binary')
//let sampleRate = 0;
let inputSampleRate = 44100
readStream.on('end', function() {
console.log('finished reading----->>>>');
// write to file here.
// Send an empty frame so that Transcribe initiates a closure of the WebSocket after submitting all transcripts
let emptyMessage = getAudioEventMessage(Buffer.from(new Buffer([])));
let emptyBuffer = eventStreamMarshaller.marshall(emptyMessage);
socket.send(emptyBuffer);
})
// when we get audio data from the mic, send it to the WebSocket if possible
socket.onopen = function() {
readStream.on('data', function(chunk) {
let binary = convertAudioToBinaryMessage(chunk);
if (socket.readyState === socket.OPEN) {
console.log(`sending to steaming API------->>>>`)
socket.send(binary);
}
});
// the audio stream is raw audio bytes. Transcribe expects PCM with additional metadata, encoded as binary
}
// the audio stream is raw audio bytes. Transcribe expects PCM with additional metadata, encoded as binary
socket.onerror = function () {
socketError = true;
showError('WebSocket connection error. Try again.');
};
// handle inbound messages from Amazon Transcribe
socket.onmessage = function (message) {
//convert the binary event stream message to JSON
let messageWrapper = eventStreamMarshaller.unmarshall(Buffer(message.data));
//console.log(`messag -->>${JSON.stringify(messageWrapper)}`)
let messageBody = JSON.parse(String.fromCharCode.apply(String, messageWrapper.body));
console.log("results:.. ",JSON.stringify(messageBody))
if (messageWrapper.headers[":message-type"].value === "event") {
handleEventStreamMessage(messageBody);
}
else {
transcribeException = true;
showError(messageBody.Message);
}
}
let closeSocket = function () {
if (socket.OPEN) {
// Send an empty frame so that Transcribe initiates a closure of the WebSocket after submitting all transcripts
let emptyMessage = getAudioEventMessage(Buffer.from(new Buffer([])));
let emptyBuffer = eventStreamMarshaller.marshall(emptyMessage);
socket.send(emptyBuffer);
}
}
})
I use pixi.js v 3.0.0
My simple code is
(function () {
document.addEventListener('DOMContentLoaded', function () {
var width = screen.availWidth;
var height = screen.availHeight;
var renderer = PIXI.CanvasRenderer(width, height, {
backgroundColor : 0x1099bb
});
document.body.appendChild(renderer.view);
var stage = new PIXI.Container();
var texture = PIXI.Texture.fromImage('asset/bunny.png');
var bunny = new PIXI.Sprite(texture);
bunny.anchor.x = 0.5;
bunny.anchor.y = 0.5;
bunny.position.x = 200;
bunny.position.y = 150;
stage.addChild(bunny);
animate();
function animate() {
requestAnimationFrame(animate);
bunny.rotation += 0.1;
renderer.render(stage);
}
}, false);
}
());
But i get: TypeError: this.initPlugins is not a function if use CanvasRenderer but it works in other cases
Just add new keyword when creating the CanvasRenderer.
Here is my code regarding tilesprite & a player, player should not pass through tilesprite but unfortunately my code is not working, i tried all ways but couldn't achieve the desired result, i don't know why tile2.body.immovable=true; flag not working.
var main = {
preload: function () {
this.game.load.image('ground', 'images/ground.jpg');
this.game.load.image('bg', 'images/gabg.jpg');
this.game.load.atlasJSONHash('mover', 'images/sheet.png', 'images/sprites.json');
},
create: function () {
this.game.physics.startSystem(Phaser.Physics.ARCADE);
cursor = this.game.input.keyboard.createCursorKeys();
tile1 = game.add.tileSprite(0, 10, 1365, 582, 'bg');
tile2 = game.add.tileSprite(0, 590, 1366, 91, 'ground');
player = this.game.add.sprite(50, 550, 'mover');
player.frame = 0;
player.anchor.setTo(0.5, 0.5);
while (i < 11) {
anim.push(i)
i++;
}
player.animations.add('move', anim, 20, true);
this.game.physics.arcade.enable(player);
this.game.physics.arcade.enable(tile2);
tile2.body.immovable = true;
tile2.body.allowGravity = false;
player.body.gravity.y = 800;
},
update: function () {
this.game.physics.arcade.collide(tile2, player);
tile1.tilePosition.x += -1;
tile2.tilePosition.x += -4;
this.move();
},
move: function () {
if (cursor.up.isDown) {
player.body.velocity.y = -200;
player.animations.stop('move');
}
else {
player.animations.play('move');
}
},
};
var game = new Phaser.Game(1366, 768, Phaser.CANVAS, 'gamediv');
this.game.state.add('main', main);
this.game.state.start('main');
var anim = [];
var cursor, i=0;
var tile1;
var tile2;
var player;
Which version of Phaser are you using? Could it be related to this bug in 2.3.0?
http://www.html5gamedevs.com/topic/13856-problem-with-collide-method-from-physics-arcade-in-different-versions-of-phaser/?p=79032