how to play sound on raspberry pi - after startup without login - node.js

i need to play sound on the raspberry pi with a node.js script. Everything is fine when i'm starting the script from the commandline myself. When i run the same script after startup out of the /etc/rc.local script i see my running process when doing "ps aux" but i cant hear any sound. I also tried to start the node script after 40 seconds, because i thought that there was too less time for some initialization stuff or something, but within this time i could start the script from the command line and hear sound...
i tried both users: root and pi. They both work from the cmd (because the user that runs the script after the autostart is the root user)
i linked my programm to /usr/bin/node because if not the process wasnt able to start on startup.
i forced the raspberryPi to use the sereo jack: amixer cset numid=3 1
my node.js code is:
var fs = require("fs");
var lame = require("lame");
var Speaker = require("speaker");
var SerialPort = require("serialport").SerialPort;
var playing = false;
var stream = [];
stream[0] = "sound1.mp3";
stream[1] = "sound2.mp3";
stream[2] = "sound3.mp3";
stream[3] = "sound4.mp3";
var getCurrentStream = function(){
var i = Math.round( Math.random() * 3)
return stream[i];
}
var serialPort = new SerialPort("/dev/ttyACM0", {
baudrate: 9600
}, false);
serialPort.open(function(){
console.log("open");
serialPort.on("data", function(data){
console.log("data received"+data);
if(!playing){
try{
var currentStream = fs.createReadStream( getCurrentStream() );
var speaker = new Speaker();
speaker.on('finish', function(){
playing = false;
});
currentStream.pipe(new lame.Decoder()).pipe(speaker);
playing = true;
}
catch(e){
console.log("Error: "+e);
}
}
});
});
for the startup i tried:
as a cronjob, after crontab -e i attached:
#reboot /opt/node/bin/forever start /var/www/node/residenz/server.js
i also tried the same inside the file /etc/rc.local :
/opt/node/bin/forever start /var/www/node/residenz/server.js
thanks for any help!

I had the same problem, and this question (and analyzing the answer) gave me hope that it was possible, but for me it was the paths that was a problem - I was using a relative path but the working directory (and perhaps user?) being executed under cron needed the path to the file be absolute. BTW, I used cron, python, and pygame (pygame.mixer.music) and was able to make it work.
My testing program (pygame.mixer.Sound did not work but I believe that was because I was using an MP3 instead of a WAV)
import pygame
import time
import os
import sys
#soundFile = "alarm.mp3" # BAD
soundFile = "/home/pi/alarm.mp3" # GOOD
channel = None
if len(sys.argv) > 1:
pygame.mixer.init(44100, -16, 2, 4096)
if sys.argv[1] == "music":
print "Testing pygame.mixer"
pygame.mixer.music.load(soundFile)
pygame.mixer.music.set_volume(1.0)
pygame.mixer.music.play()
elif sys.argv[1] == "sound":
print "Testing pygame.sound"
pygame.mixer.init()
s = pygame.mixer.Sound(soundFile)
s.set_volume(1.0)
channel = s.play()
elif sys.argv[1] == "mpg":
print "Using mpg321 Player"
os.system("mpg321 " + soundFile)
else:
print "Using OMX Player"
os.system("omxplayer " + soundFile)
print "Execution control has returned"
while pygame.mixer.get_busy() or pygame.mixer.music.get_busy() or \
(channel is not None and channel.get_busy()):
continue
pygame.mixer.quit()
else:
print "Unknown option. Options are omx, mpg, music, or sound"
In cron, I had #reboot python /home/pi/soundtest.py music & and it played the file on boot up.

Just in case, that anyone else has the same Problem i want to share my final solution. I simply did the functionality with python. For the startup, i put the line which starts the python script into the file /etc/rc.local
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# server.py
import RPi.GPIO as GPIO
import subprocess
import time
import serial
from random import randint
port = serial.Serial("/dev/ttyACM0", baudrate=9600, timeout=0)
PATH=[4]
PATH.append("/var/www/node/Boom1.mp3")
PATH.append("/var/www/node/Boom2.mp3")
PATH.append("/var/www/node/Boom3.mp3")
PATH.append("/var/www/node/Boom4.mp3")
def main():
count = 0
while True:
value=0
line = port.readlines()
if( len(line)!= 0 and count < 4 ):
try:
job=subprocess.Popen(["mpg321", returnPath()], stdin=subprocess.PIPE)
time.sleep( float(line[0])/10 )
except:
print("cant play soundfile")
def returnPath():
x = randint(1,4)
return PATH[x]
if __name__ == '__main__':
main()

Related

Nifi ExecuteScript with Groovy : org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:

I am having issues with Apache NiFi execute Script processor.
Following the executeScript cook book tutorial
https://community.cloudera.com/t5/Community-Articles/ExecuteScript-Cookbook-part-2/ta-p/249018, I was able to write a groovy script that writes to the output stream.
I am writing a Json string to the the output stream.
However on execution on Nifi, I get an error as depicted on the following link:
https://imgur.com/jYgH8EY.png
Below is the code
import groovy.json.JsonBuilder
import org.apache.commons.io.IOUtils
import java.nio.charset.StandardCharsets
import org.apache.nifi.processor.io.StreamCallback
flowFile = session.get()
if(flowFile == null){
return;
}
def incomingFlowFileName = flowFile.getAttribute('filename')
def pathToIngestionScript = pathtobashscript.value
def command = '''
docker ps | grep 'visallo/dev' | awk '{print $1}'
'''
def containerId = ['bash','-c',command].execute().in.text
if(containerId.replaceAll("\\s","").length() != 0){
/* "docker exec -i " + container_id + " bash < " + path_to_bash_script */
"docker exec -i ${containerId} bash < ${pathToIngestionScript}".execute()
}else {
/*ingest data like a savage*/
}
def result = ["fileId":incomingFlowFileName.tokenize('*')[1],"status":"2"]
flowFile = session.write(flowFile,{outputStream ->
outputStream.write(new JsonBuilder(result).toPrettyString().getBytes(StandardCharsets.UTF_8))
} as StreamCallBack)
session.transfer(flowFile,REL_SUCCESS)
Doesn't org.apache.nifi.processor.io.StreamCallback exist in the Script execution space?
I am running Nifi 1.9.2
You have as StreamCallBack but it is StreamCallback (without the capital B)

Python3 paho-mqtt speed vs nodejs mqtt

I have conducted some speed tests for MQTT in Python3 and Node.js, using af QoS level 0 and have found Node.js to be remarkably faster than the Python3 implementation.
How can this be?
I'm open to using either framework as bridge on the server side to handle data from multiple clients. However, I'm losing my confidence that I should be using Python3 for anything on the server.
Running code snippets.
Python3:
import paho.mqtt.client as mqtt
import logging
import time
import threading
import json
import sys
class MqttAdaptor(threading.Thread):
def __init__(self,topic, type=None):
threading.Thread.__init__(self)
self.topic = topic
self.client = None
self.type = type
def run(self):
self.client = mqtt.Client(self.type)
self.client.on_connect = self.on_connect
self.client.on_disconnect = self.on_disconnect
if self.type is not None:
self.client.connect("localhost", 1883, 60)
self.client.on_message = self.on_message
self.client.loop_forever()
else:
self.client.connect_async("localhost", 1883, 60)
self.client.loop_start()
# The callback for when the client receives a CONNACK response from the server.
def on_connect(self,client, userdata, flags, rc):
self.client.subscribe(self.topic)
def on_disconnect(self, client, userdata, rc):
if rc != 0:
print("Unexpected disconnection from local MQTT broker")
# The callback for when a PUBLISH message is received from the server.
def on_message(self,client, userdata, msg):
jsonMsg = ""
try:
jsonMsg = json.loads(msg.payload)
if jsonMsg['rssi'] is not None:
jsonMsg['rssi'] = round(jsonMsg['rssi']*3.3 * 100000)/ 10000
except:
pass
print(json.dumps(jsonMsg))
def publish(self,topic, payload, qos=0,retain=False):
self.client.publish(topic,payload,qos,retain)
def close(self):
if self.client is not None:
self.client.loop_stop()
self.client.disconnect()
if __name__=="__main__":
topic = '/test/+/input/+'
subber = MqttAdaptor(topic,'sub')
subber.start()
topic = None
test = MqttAdaptor(topic)
test.run()
print("start")
while True:
data = sys.stdin.readline()
if not len(data):
print("BREAK")
break
msg = data.split('\t')
topic = msg[0]
test.publish(topic,msg[1],0)
print("done")
sys.exit(0)
Node.js:
"use strict";
const fs = require('fs');
const readline = require('readline');
const mqtt = require('mqtt');
const mqttClient = mqtt.connect();
mqttClient.on('connect', () => {
console.error('==== MQTT connected ====');
mqttClient.subscribe('/test/+/input/+');
});
mqttClient.on('close', () => {
console.error('==== MQTT closed ====');
});
mqttClient.on('error', (error) => {
console.error('==== MQTT error ' + error + ' ====');
});
mqttClient.on('offline', () => {
console.error('==== MQTT offline ====');
});
mqttClient.on('reconnect', () => {
console.error('==== MQTT reconnect ====');
});
mqttClient.on('message', (topic, message) => {
const topicSegments = topic.split('/');
topicSegments[topicSegments.length - 2] = 'done';
topic = topicSegments.join('/');
try {
//The message might not always be valid JSON
const json = JSON.parse(message);
//If rssi is null/undefined in input, it should be left untouched
if (json.rssi !== undefined && json.rssi !== null) {
//Multiply by 3 and limit the number of digits after comma to four
json.rssi = Math.round(json.rssi * 3.3 * 10000) / 10000;
}
console.log(topic + "\t" + JSON.stringify(json));
} catch (ex) {
console.error('Error: ' + ex.message);
}
});
const rl = readline.createInterface({
input: process.stdin,
terminal: false,
});
rl.on('line', (line) => {
const lineSegments = line.split("\t");
if (lineSegments.length >= 2) {
const topic = lineSegments[0];
const message = lineSegments[1];
mqttClient.publish(topic, message);
}
});
rl.on('error', () => {
console.error('==== STDIN error ====');
process.exit(0);
});
rl.on('pause', () => {
console.error('==== STDIN paused ====');
process.exit(0);
});
rl.on('close', () => {
console.error('==== STDIN closed ====');
process.exit(0);
});
Both script are run on the command line connecting to the same broker.
They are run using a scripting pipe (node):
time cat test-performance.txt | pv -l -L 20k -q | nodejs index.js | pv -l | wc -l
and (python):
time cat test-performance.txt | pv -l -L 20k -q | python3 mqttTestThread.py | pv -l | wc -l
The test file contains around 2Gb of text in this format:
/test/meny/input/test {"sensor":"A1","data1":"176","time":1534512473545}
As shown in the scripts, I count the number of lines during the time they run. For a small test the Python3 script has a throughput of roughly 3k/sec, while node has a throughput og roughly 20k/sec.
This is a big difference. Does anyone have an idea why? And/or how to get python to run with a comparable throughput?
There are multiple reasons why Node is faster for this task than Python. The main reason is: Python is slooooow. Only the libraries which are implemented in C like numpy or pandas are somewhat fast. But then also just for numeric tasks.
The second reason is, as Nhosko mentioned in a comment, that Node is per default async and therefore faster in I/O bound tasks.
A potential third reason could be that MQTT sends JSON data. JSON stands for Java-Script-Object-Notation and can be natively converted into NodeJS objects.
I wouldn't recommend you to use Python for this task. Python is great for machine learning and data science. For server and I/O bound tasks you may consider using Node or Go.

Python multiprocessing within node.js - Prints on sub process not working

I have a node.js application that runs a client interface which exposes action that triggers machine-learn tasks. Since python is a better choice when implementing machine-learn related stuff, I've implemented a python application that runs on demand machine learning tasks.
Now, I need to integrate both applications. It has been decided that we need to use a single (AWS) instance to integrate both applications.
One way found to do such integration was using python-shell node module. There, the communications between Python and Node are done by stdin and stdout.
On node I have something like this:
'use strict';
const express = require('express');
const PythonShell = require('python-shell');
var app = express();
app.listen(8000, function () {
console.log('Example app listening on port 8000!');
});
var options = {
mode: 'text',
pythonPath: '../pythonapplication/env/Scripts/python.exe',
scriptPath: '../pythonapplication/',
pythonOptions: ['-u'], // Unbuffered
};
var pyshell = new PythonShell('start.py', options);
pyshell.on('message', function (message) {
console.log(message);
});
app.get('/task', function (req, res) {
pyshell.send('extract-job');
});
app.get('/terminate', function (req, res) {
pyshell.send('terminate');
pyshell.end(function (err, code, signal) {
console.log(err)
console.log(code)
console.log(signal);
});
});
On python, I have a main script which loads some stuff and the calls a server script, that runs forever reading lines with sys.stdin.readline() and then executes the corresponding task.
start.py is:
if __name__ == '__main__':
# data = json.loads(sys.argv[1])
from multiprocessing import Manager, Pool
import logging
import provider, server
# Get logging setup objects
debug_queue, debug_listener = provider.shared_logging(logging.DEBUG, 'python-server-debug.log')
info_queue, info_listener = provider.shared_logging(logging.INFO, 'python-server.log')
logger = logging.getLogger(__name__)
# Start logger listener
debug_listener.start()
info_listener.start()
logger.info('Initializing pool of workers...')
pool = Pool(initializer=provider.worker, initargs=[info_queue, debug_queue])
logger.info('Initializing server...')
try:
server.run(pool)
except (SystemError, KeyboardInterrupt) as e:
logger.info('Execution terminated without errors.')
except Exception as e:
logger.error('Error on main process:', exc_info=True)
finally:
pool.close()
pool.join()
debug_listener.stop()
info_listener.stop()
print('Done.')
Both info_queue and debug_queue are multiprocessing.Queue to handle multiprocessing logging. If I run my python application as standalone, everything works fine, even when using the pool of workers (logs get properly logged, prints, get properly printed...)
But, if I try to run using python-shell, only my main process prints and logs get printed and logged correctly... Every message (print or log) from my pool of workers get held until I terminate the python script.
In other words, every message will get held until the finally step on server.py run...
Does anyone has any insights on this issue? Have you guys heard about python-bridge module? Is it a better solution? Can you suggest a better approach for such integration that does not uses two separated servers?
Here I post my real provider script, and a quick mock I did for the server script (the real one has too much stuff)
mock server.py:
import json
import logging
import multiprocessing
import sys
import time
from json.decoder import JSONDecodeError
from threading import Thread
def task(some_args):
logger = logging.getLogger(__name__)
results = 'results of machine learn task goes here, as a string'
logger.info('log whatever im doing')
# Some machine-learn task...
logger.info('Returning results.')
return results
def answer_node(message):
print(message)
# sys.stdout.write(message)
# sys.stdout.flush()
def run(pool, recrutai, job_pool, candidate_queue):
logger = logging.getLogger(__name__)
workers = []
logger.info('Server is ready and waiting for commands')
while True:
# Read input stream
command = sys.stdin.readline()
command = command.split('\n')[0]
logger.debug('Received command: %s', command)
if command == 'extract-job':
logger.info(
'Creating task.',
)
# TODO: Check data attributes
p = pool.apply_async(
func=task,
args=('args'),
callback=answer_node
)
# What to do with workers array?!
workers.append(p)
elif command == 'other-commands':
pass
# Other task here
elif command == 'terminate':
raise SystemError
else:
logger.warn(
'Received an invalid command %s.',
command
)
my provider.py:
import logging
import os
from logging.handlers import QueueHandler, QueueListener
from multiprocessing import Queue
def shared_logging(level, file_name):
# Create main logging file handler
handler = logging.FileHandler(file_name)
handler.setLevel(level)
# Create logging format
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
# Create queue shared between all process to centralize logging features
logger_queue = Queue() # multiprocessing.Queue
# Create logger queue listener to send records from logger_queue to handler
logger_listener = QueueListener(logger_queue, handler)
return logger_queue, logger_listener
def process_logging(info_queue, debug_queue, logger_name=None):
# Create logging queue handlers
debug_queue_handler = QueueHandler(debug_queue)
debug_queue_handler.setLevel(logging.DEBUG)
info_queue_handler = QueueHandler(info_queue)
info_queue_handler.setLevel(logging.INFO)
# Setup level of process logger
logger = logging.getLogger()
if logger_name:
logger = logging.getLogger(logger_name)
logger.setLevel(logging.DEBUG)
# Add handlers to the logger
logger.addHandler(debug_queue_handler)
logger.addHandler(info_queue_handler)
def worker(info_queue, debug_queue):
# Setup worker process logging
process_logging(info_queue, debug_queue)
logging.debug('Process %s initialized.', os.getpid())

How to output live webcam feed to browser using nodejs and HTML

When i am running node start.js and seeing the output on localhost:3000 i am seeing the webcam window.But i want the webcam window should show as a browser page not as webcam window.
start.js
below is the js which is acting as a server
// import express JS module into app
// and creates its variable.
var express = require('express');
var app = express();
// Creates a server which runs on port 3000 and
// can be accessed through localhost:3000
app.listen(3000, function() {
console.log('server running on port 3000');
} )
// Function callName() is executed whenever
// url is of the form localhost:3000/name
app.get('/name', callName);
function callName(req, res) {
// Use child_process.spawn method from
// child_process module and assign it
// to variable spawn
var spawn = require("child_process").spawn;
// Parameters passed in spawn -
// 1. type_of_script
// 2. list containing Path of the script
// and arguments for the script
var process = spawn('python',["/home/saswat/Desktop/Fabric demo/FR+Nodejs/camera.py"] );
// Takes stdout data from script which executed
// with arguments and send this data to res object
process.stdout.on('data', function(data) {
res.send(data.toString());
} )
}
Below is the python code for enabling webcam
camera.py
import cv2
def show_webcam(mirror=False):
cam = cv2.VideoCapture(0)
while True:
ret_val, img = cam.read()
if mirror:
img = cv2.flip(img, 1)
cv2.imshow('my webcam', img)
if cv2.waitKey(1) == 27:
break # esc to quit
cv2.destroyAllWindows()
def main():
show_webcam(mirror=True)
if __name__ == '__main__':
main()

groovy executing shell commands on remote server

I have an issue about executing shell commands on a remote server.
I'm trying various solutions and I have one working but it is not optimized in terms of maintenance : I use a batch file that launches putty which connects to the remote server ans sends the command.
ie. in groovy :
def batchFile = "C:\\Validation\\Tests_Auto\\Scripts\\remote_process\\setOldDate.bat"
Runtime.runtime.exec(batchFile)
and in my batch file :
c:
cd C:\Validation\Tests_Auto\Scripts\remote_process\
putty.exe -ssh root#xx.xx.xx.xx -pw **** -m "C:\Validation\Tests_Auto\Scripts\remote_process\setOldDate.txt"
setOldDate.txt contains the command date -s #1522018800
This works. However I'd like to launch it in a cleaner way, either avoiding the use of text file for the command or, better, avoiding using putty.
I tried several another way to do the same thing but it doesn't work. I think I'm not too far but I need a little help.
I tried to launch a direct command via ssh:
Runtime.getRuntime().exec('"c:\\Program Files\\OpenSSH\\bin\\ssh.exe" root:****#xx.xx.xx.xx date -s #1522018800')
I'd be grateful if anyone could help
thanks
#Grab(group='com.jcraft', module='jsch', version='0.1.54')
def ant = new AntBuilder()
ant.sshexec( host:"somehost", username:"dude", password:"yo", command:"touch somefile" )
for other sshexec and scp tasks parameters see doc:
https://ant.apache.org/manual/Tasks/sshexec.html
https://ant.apache.org/manual/Tasks/scp.html
for soapui
this method using apache ant + jsch-0.1.54.jar
the only way i know for soapui:
download the following libraries and put them into soapui\bin\endorsed directory (create the endorsed directory)
https://central.maven.org/maven2/org/apache/ant/ant/1.9.11/ant-1.9.11.jar
https://central.maven.org/maven2/org/apache/ant/ant-launcher/1.9.11/ant-launcher-1.9.11.jar
https://central.maven.org/maven2/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar
edit the soapui\bin\soapui.bat and add the following line where other JAVA_OPTS are defined:
set JAVA_OPTS=%JAVA_OPTS% -Djava.endorsed.dirs="%SOAPUI_HOME%endorsed"
that's because ant libs must be loaded before groovy.
then the code above should work in soapui (except #Grab)
Alternatively you can download only jsch-XXX.jar into existing soapui\bin\ext directory and use jsch library directly from groovy
see examples: http://www.jcraft.com/jsch/examples/
or search for groovy jsch examples
Finally, compiling my various research and struggling to fit my environment constraints (groovy in soapui), I ended up with the following solution that works for me :
download jsch-0.1.54.jar and set it in C:\Program Files\SmartBear\ReadyAPI-2.3.0\bin\ext
use the following groovy script :
import java.util.Properties
import com.jcraft.jsch.ChannelExec
import com.jcraft.jsch.JSch
import com.jcraft.jsch.Session
def ip = context.expand( '${#Project#projectEndpoint}' )
try
{
JSch jsch = new JSch();
Session session = jsch.getSession("root","$ip", 22);
session.setPassword("****");
// Avoid asking for key confirmation
Properties prop = new Properties();
prop.put("StrictHostKeyChecking", "no");
session.setConfig(prop);
session.connect();
// SSH Channel
ChannelExec channelssh = (ChannelExec)session.openChannel("exec");
// Execute command
//channelssh.setCommand("date -s #1520018000"); // change date
channelssh.setCommand("ntpdate -u pool.ntp.org"); // restore date
channelssh.connect();
channelssh.disconnect();
}
catch (Exception e)
{
log.info "exception : " + e
System.out.println(e.getMessage());
}
finally
{
session.disconnect();
}
UPGRADE
Here is a generalization I've made as my needs evolved. The following script, still using jsch allows to send any command.
This deals with host checking and eliminates hazards due to no host checking.
User and password are passed as parameters
import java.util.Properties
import com.jcraft.jsch.ChannelExec
import com.jcraft.jsch.JSch
import com.jcraft.jsch.Session
import java.util.regex.Pattern
def ip = context.expand( '${get endpoint#endpoint}' )
ip = ip.replaceFirst("http[s]?://","")
def user = context.expand( '${#Project#ssh_user}' )
def password = context.expand( '${#Project#ssh_password}' )
def command = context.expand( '${#TestCase#command}' )
def timeout = context.expand( '${#TestCase#timeout_ms}' )
if (timeout == "")
timeout = 1000 // default timeout 1s
else
timeout = timeout.toInteger()
log.info "command = " + command
Session session
try
{
JSch jsch = new JSch();
session = jsch.getSession(user,ip, 22);
session.setPassword(password);
//log.info "user : $user"
//log.info "set password : $password"
//log.info System.getProperty("user.home")+"/.ssh/known_hosts"
jsch.setKnownHosts(System.getProperty("user.home")+"/.ssh/known_hosts");
session.connect();
//log.info "session connect"
// SSH Channel
ChannelExec channelssh = (ChannelExec)session.openChannel("exec");
// Execute command
channelssh.setCommand(command);
InputStream commandOutput = channelssh.getInputStream();
channelssh.connect();
int readByte = commandOutput.read();
outputBuffer = [];
// timeout to avoid infinite loop
while((readByte != -1) && (timeout > 0))
{
outputBuffer.add(readByte)
readByte = commandOutput.read();
timeout = timeout -1
}
// process output
outputBuffer = outputBuffer as byte[]
// convert byte array into string
output = new String(outputBuffer, "UTF-8")
sleep(3000)
//log.info "disconnect"
channelssh.disconnect();
testRunner.testCase.setPropertyValue("cmd_output", output)
}
catch (Exception e)
{
msg = "exception : " + e
log.error msg
testRunner.fail(msg)
}
finally
{
session.disconnect();
}

Resources