I am plotting real time audio data from audiojack and trying to have entire signal in a plot(from time 0th second to current) ; therefore I am appending the audio signal into a list ( i.e. 'merged' in my case) and plotting the updated list again and again, but as the data increases (i.e. the no. of elements in merged) the plotting becomes slower and slower. Any suggestions to make it faster , keeping in mind that I need to have all the data points from start till end in the end to be including plot.
Please find my code below
import pyaudio
import itertools
import numpy as np
import time
import matplotlib.pyplot as plt
from scipy.signal import butter, lfilter
import matplotlib.animation as animation
RATE = 44100
CHUNK = int(RATE/2) # RATE / number of updates per second
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.set_ylim([-1,1])
line, = ax.plot([], [],'-k',label='red')
ax.legend()
frames = []
# define callback (2)
def callback(in_data, frame_count, time_info, status):
# convert data to array
data = (np.fromstring(in_data, dtype=np.float32))
frames.append(data)
return (in_data, pyaudio.paContinue)
if __name__=="__main__":
# instantiate PyAudio (1)
p = pyaudio.PyAudio()
# open stream using callback (3)
stream = p.open(format=pyaudio.paFloat32,
channels=1,
rate=RATE,
input=True,
frames_per_buffer=CHUNK,
stream_callback=callback)
# start the stream (4)
stream.start_stream()
tt = 0
xar = []
while stream.is_active():
if frames:
t1 = time.time()
def animate(i):
#data is appended in the frames (global variable is pulled out every time one cycle of plotting is over
data_out= frames.pop()
xar.append(data_out.tolist())
merged = list(itertools.chain.from_iterable(xar)) #merging of audio data
line.set_ydata(merged)
line.set_xdata(range(len(merged)))
ax.relim()
ax.autoscale_view()
data_filter = []
data_out = []
print((time.time() - t1) % 60)
ani = animation.FuncAnimation(fig, animate, interval=1000)
plt.show()
# close stream and connection
stream.close()
p.terminate()
# wait for stream to finish (5)
Related
I am having an issue when I run the code below. The goal is to develop an app that acheives real time sound acquisition. I have set the CHUNK (frame) size to 320 using 16KHz sampling rate, hence, frame duration of 0.02 s. The issue when I record, the result (the content of the variable "many") contains some glitch sounds or noise. When I double the CHUNK, the problem disapears. The value 0.02 depends on the nature of the problem I am trying to resolve. It is required to set to 0.02. Do you have any suggestions?
import pyaudio
import struct
import numpy as np
import matplotlib.pyplot as plt
import time
import IPython.display as ipd
CHUNK = int(1*320)
FORMAT = pyaudio.paFloat32
CHANNELS = 1
RATE = 16000
p = pyaudio.PyAudio()
chosen_device_index = 1
for x in range(0,p.get_device_count()):
info = p.get_device_info_by_index(x)
#print p.get_device_info_by_index(x)
if info["name"] == "pulse":
chosen_device_index = info["index"]
print("Chosen index: ", chosen_device_index)
stream = p.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
input_device_index=chosen_device_index,
input=True,
output=False,
frames_per_buffer=CHUNK)
plt.ion()
%matplotlib qt
fig, ax = plt.subplots()
x = np.arange(0, CHUNK)
data = stream.read(CHUNK)
print(len(data))
data_ = struct.unpack(str(CHUNK) + 'f', data)
line, = ax.plot(x, data_)
ax.set_ylim([-1,1])
many = []
while True:
data = struct.unpack(str(CHUNK) + 'f', stream.read(CHUNK))
line.set_ydata(data)
fig.canvas.draw()
fig.canvas.flush_events()
many= np.concatenate((many, data),axis=None)
ipd.Audio(many,rate = 16000)
From the conversation between you can fdcpp, it seems true that the piece of code
line.set_ydata(data)
fig.canvas.draw()
fig.canvas.flush_events()
many= np.concatenate((many, data),axis=None)
takes more than 0.02 s to run. That's why when the next CHUNK size data comes, your code hasn't been ready to receive it, which causes input overflow.
There are different ways to bypass it. But I agree with fdcpp that the best way to solve this problem is to think about your end goal.
For example, you can separate the processing of receiving audio data from processing the data, i.e., your line, fig code. One process just receives and stores the audio data, while the other process takes the stored data and draws it.
But please keep in mind that as long as the drawing part takes more than 0.02 s, you cannot achieve "real-time" as you wanted.
I use miniconda jupyter notebook python and I'm trying to implement a machine (Audio filtering). I got this error and I really don't know how to fix it.
Here I imported libraries that I need with the path of the file:
import wave as we
import numpy as np
import matplotlib.pyplot as plt
dir = r'/home/pc/Downloads/Bubble audios'
Here the fuction that should plot the graph:
def read_wav(wavfile, plots=True, normal=False):
f = wavfile
params = f.getparams()
# print(params)
nchannels, sampwidth, framerate, nframes = params[:4]
strData = f.readframes(nframes) # , string format
waveData = np.frombuffer(strData, dtype=np.int16) # Convert a string to an int
# wave amplitude normalization
if normal == True:
waveData = waveData*1.0/(max(abs(waveData)))
#
if plots == True:
time = np.arange(0, nframes ,dtype=np.int16) *(1.0 / framerate)
plt.figure(dpi=100)
plt.plot(time, waveData)
plt.xlabel("Time")
plt.ylabel("Amplitude")
plt.title("Single channel wavedata")
plt.show()
return (Wave, time)
def fft_wav(waveData, plots=True):
f_array = np.fft.fft(waveData) # Fourier transform, the result is a complex array
f_abs = f_array
axis_f = np.linspace(0, 250, np.int(len(f_array)/2)) # map to 250
# axis_f = np.linspace(0, 250, np.int(len(f_array))) # map to 250
if plots == True:
plt.figure(dpi=100)
plt.plot(axis_f, np.abs(f_abs[0:len(axis_f)]))
# plt.plot(axis_f, np.abs(f_abs))
plt.xlabel("Frequency")
plt.ylabel("Amplitude spectrum")
plt.title("Tile map")
plt.show()
return f_abs
And here I call the function with the file that I want to be read and plotted.
f = we.open(dir+r'/Ars1_Aufnahme.wav', 'rb')
Wave, time = read_wav(f)
The error that I got:
ValueError: x and y must have same first dimension, but have shapes (2140699,) and (4281398,)
I tried to use np.reshape but it didn't work or I might have used it wrong. So, any advice?
it's seems that your time is 1/2 of the size of your wave. Maybe your nframe is too short. If you do nframses = 2*nframes what is the error ?
I want to visualise values from a pressure sensing mat (32x32 pressure point) in realtime as a heatmap with MatPlotLib animation.
The mat outputs 1025 bytes (1024 values + 'end byte' which is always 255). I print these out from inside the animate function but it only works if I comment out plt.imshow(np_ints).
With plt.imshow the MatPlotLib window pops up and even reads the values... I see it in the heatmap when I start the program while pressing down on the sensor but when I release it, it seems like it slowly goes through all the readings in the serial buffer, instead of being realtime. Not sure if it's because I'm not handling the serial properly or something to do with how the FuncAnimation works. Can someone point me in the right direction please?
import numpy as np
import serial
import matplotlib.pyplot as plt
import matplotlib.animation as animation
np.set_printoptions(threshold=1024,linewidth=1500)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
def animate(i):
# np_ints = np.random.random((200, 200)) # FOR TESTING ONLY
if ser.inWaiting:
ser_bytes = bytearray(ser.read_until(b'\xFF')) # should read 1025 bytes (1024 values + end byte)
if len(ser_bytes) != 1025: return # prevent error from an 'incomplete' serial reading
ser_ints = [int(x) for x in ser_bytes]
np_ints = np.array(ser_ints[:-1]) # drop the end byte
np_ints = np_ints.reshape(32, 32)
print(len(ser_ints))
print(np.matrix(np_ints))
plt.imshow(np_ints) # THIS BRAKES IT
if __name__ == '__main__':
ser = serial.Serial('/dev/tty.usbmodem14101', 11520)
ser.flushInput()
ani = animation.FuncAnimation(fig, animate, interval=10)
plt.show()
The code below allows to animate random numbers using blitting. The trick is to not use plt.imshow but update the artist data. plt.imshow would create another image by getting the current axis. The slowdown would be caused by the many artists that are then in the figure.
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
np.set_printoptions(threshold=1024,linewidth=1500)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
# create dummy data
h = ax.imshow(np.random.rand(32, 32))
def animate(i):
# np_ints = np.random.random((200, 200)) # FOR TESTING ONLY
# put here code for reading data
np_ints = np.random.rand(32, 32) # not ints here, but principle stays the same
# start blitting
h.set_data(np_ints)
return h
if __name__ == '__main__':
ani = animation.FuncAnimation(fig, animate, interval=10)
plt.show()
I have 4D NIFTI images with different dimensions [x,y,slices,frames], the first two are the spatial resolution, the third is slice number, while the last one is frame number, I tried to plot all the slices of a specific frame into one figure and update frame by frame using for loops instead of doing all the indexing manually as before, but I have a problem that my images are not updating the frame (except the last one down) as you can see in the attached photo, how can I solve this issue please ??
#==================================
import nibabel as nib
import numpy as np
import matplotlib.pyplot as plt
#==================================
# load image (4D) [X,Y,Z_slice,time]
nii_img = nib.load(path)
nii_data = nii_img.get_fdata()
#===================================================
fig, ax = plt.subplots(4,3,constrained_layout=True)
fig.canvas.set_window_title('4D Nifti Image')
fig.suptitle('4D_Nifti 10 slices 30 time Frames', fontsize=16)
#-------------------------------------------------------------------------------
mng = plt.get_current_fig_manager()
mng.full_screen_toggle()
slice_counter = 0
for i in range(30):
for j in range(3):
for k in range(3):
if slice_counter<9:
ax[j,k].cla()
ax[j,k].imshow(nii_data[:,:,slice_counter,i],cmap='gray', interpolation=None)
ax[j,k].set_title("frame {}".format(i))
ax[j,k].axis('off')
slice_counter+=1
else:
#---------------------------------
ax[3,0].axis('off')
ax[3,2].axis('off')
#---------------------------------
ax[3,1].cla()
ax[3,1].nii_data(nii_data[:,:,9,i],cmap='gray', interpolation=None)
ax[3,1].set_title("frame {}".format(i))
ax[3,1].axis('off')
#---------------------------------
# Note that using time.sleep does *not* work here!
#---------------------------------
plt.pause(.05)
plt.close('all')
At the moment it is not quite clear to me how your output should look like because the second column in the image has more entries than the others.
Please clarify this better in your questions as well as updating your code which is not working due to inconsistent variable names and messed up indenting.
In the meanwhile, I will try it with a first shot where your goal is to print all your slices on the x-axis whereas each frame is on the y-axis.
The code I adapted that it will print for the first three slices the first four frames.
#==================================
import nibabel as nib
import numpy as np
import matplotlib.pyplot as plt
#==================================
# load image (4D) [X,Y,Z_slice,time]
nii_img = nib.load(path)
nii_data = nii_img.get_fdata()
#===================================================
number_of_slices = 3
number_of_frames = 4
fig, ax = plt.subplots(number_of_frames, number_of_slices,constrained_layout=True)
fig.canvas.set_window_title('4D Nifti Image')
fig.suptitle('4D_Nifti 10 slices 30 time Frames', fontsize=16)
#-------------------------------------------------------------------------------
mng = plt.get_current_fig_manager()
mng.full_screen_toggle()
for slice in range(number_of_slices):
for frame in range(number_of_frames):
ax[frame, slice].imshow(nii_data[:,:,slice,frame],cmap='gray', interpolation=None)
ax[frame, slice].set_title("layer {} / frame {}".format(slice, frame))
ax[frame, slice].axis('off')
plt.show()
The sample output for a black image looks like this:
sample output
Update - 05.04.2020
Given the information from the discussion in the comments here the updated version:
#==================================
import nibabel as nib
import numpy as np
import matplotlib.pyplot as plt
from math import ceil
#==================================
# Load image (4D) [X,Y,Z_slice,time]
nii_img = nib.load(path)
nii_data = nii_img.get_fdata()
#===================================================
number_of_slices = nii_data.shape[2]
number_of_frames = nii_data.shape[3]
# Define subplot layout
aspect_ratio = 16./9
number_of_colums = int(number_of_slices / aspect_ratio)
if( number_of_slices % number_of_colums > 0):
number_of_colums += 1
number_of_rows = ceil(number_of_slices / number_of_colums)
# Setup figure
fig, axs = plt.subplots(number_of_rows, number_of_colums,constrained_layout=True)
fig.canvas.set_window_title('4D Nifti Image')
fig.suptitle('4D_Nifti {} slices {} time Frames'.format(number_of_slices, number_of_frames), fontsize=16)
#-------------------------------------------------------------------------------
mng = plt.get_current_fig_manager()
mng.full_screen_toggle()
for frame in range(number_of_frames):
for slice, ax in enumerate(axs.flat):
# For every slice print the image otherwise show empty space.
if slice < number_of_slices:
ax.imshow(nii_data[:,:,slice,frame],cmap='gray', interpolation=None)
ax.set_title("layer {} / frame {}".format(slice, frame))
ax.axis('off')
else:
ax.axis('off')
plt.pause(0.05)
plt.close('all')
The output will look like: second sample output
I have a series to plot at y-axis.
y = [3,4,5,1,4,7,4,7,1,9]
However, I want to plot it by recent time by second. I've done it like this,
import time
def xtime():
t = time.strftime("%H%M%S")
t = int(t)
xtime = [t]
while xtime:
t = time.strftime("%H%M%S")
t = int(t)
xtime.extend([t])
time.sleep(1)
I'm having problem when I want to plot each one of the number at y by each second. Please correct my code here,
import time
import matplotlib.pyplot as plt
import matplotlib.animation as animation
fig = plt.figure()
def animate(i):
x = xtime()
y = [3,4,5,1,4,7,4,7,1,9]
plt.plot(x,y)
ani = animation.FuncAnimation(fig, animate, interval=1000)
plt.show()
xtime function is referred as code at first.
Thanks!
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import numpy as np
# Y data
ydata = [3,4,5,1,4,7,4,7,1,9]
# how many points
N = len(ydata)
# make x data
xdata = np.arange(N)
def animate(i):
# update the date in our Line2D artist
# note that when run this will look at the global namespace for
# an object called `ln` which we will define later
ln.set_data(xdata[:i], ydata[:i])
# return the updated artist for the blitting
return ln,
# make our figure and axes
fig, ax = plt.subplots()
# make the artist we will be using. Note this was used in `animate`
ln, = ax.plot([], [], animated=True)
# set the axes limits
ax.set_xlim(0, N)
ax.set_ylim(0, 10)
# run the animation. Keeping a ref to the animation object is important
# as if it gets garbage collected it takes you timer and callbacks with it
ani = animation.FuncAnimation(fig, animate, frames=N, interval=1000, blit=True)