AttributeError: 'tuple' object has no attribute 'read' music21 .show() - python-3.x

i have been trying to show a score converted from krn file
import music21 as m
import os
test_data = "D:/Programming/DATA - SCIENCE/deep learning/music generation/data/test"
def load_krn_files(data_path):
# go through the whole files
songs = []
for path , subdirs,files in os.walk(data_path):
for file in files :
if file [-3:] == "krn":
song = m.converter.parse(os.path.join(path,file))
songs.append(song)
return songs
def preproccessing(data_path):
pass
#1 ) load the kern files and pars them
if __name__ == "__main__":
songs = load_krn_files(test_data)
print(f"loaded {len(songs)} songs.")
song = songs[0]
song.show()
but the method (show) return the following error
loaded 12 songs.
Traceback (most recent call last):
File "d:/Programming/DATA - SCIENCE/deep learning/music generation/scripts/preprocess.py", line 25, in
song.show()
File "C:\Users\ae504\AppData\Local\Programs\Python\Python38\lib\site-packages\music21\stream\base.py", line 334, in show
return super().show(fmt=fmt, app=app, **keywords) File "C:\Users\ae504\AppData\Local\Programs\Python\Python38\lib\site-packages\music21\base.py", line 2788, in show
return formatWriter.show(self,
File "C:\Users\ae504\AppData\Local\Programs\Python\Python38\lib\site-packages\music21\converter\subConverters.py", line 1114, in show
self.launch(returnedFilePath, fmt=fmt, app=app)
File "C:\Users\ae504\AppData\Local\Programs\Python\Python38\lib\site-packages\music21\converter\subConverters.py", line 197, in launch
subprocess.run(cmd, check=False, shell=shell)
File "C:\Users\ae504\AppData\Local\Programs\Python\Python38\lib\site-packages\run_init_.py", line 145, in new
process = cls.create_process(command, stdin, cwd=cwd, env=env, shell=shell)
File "C:\Users\ae504\AppData\Local\Programs\Python\Python38\lib\site-packages\run_init_.py", line 121, in create_process
shlex.split(command),
File "C:\Users\ae504\AppData\Local\Programs\Python\Python38\lib\shlex.py", line 311, in split
return list(lex)
File "C:\Users\ae504\AppData\Local\Programs\Python\Python38\lib\shlex.py", line 300, in next
token = self.get_token()
File "C:\Users\ae504\AppData\Local\Programs\Python\Python38\lib\shlex.py", line 109, in get_token
raw = self.read_token()
File "C:\Users\ae504\AppData\Local\Programs\Python\Python38\lib\shlex.py", line 140, in read_token
nextchar = self.instream.read(1)
AttributeError: 'tuple' object has no attribute 'read'

Related

AttributeError: 'list' object has no attribute 'decode'. im getting this error while writing a csv file in a array . how do i solve it?

import csv
import requests
from bs4 import BeautifulSoup
import wget
with open('__memes_magic_thumbnails.csv', newline='') as csvfile:
data = list(csv.reader(csvfile))
print(data)
k=0
for link in data:
print(k)
wget.download(link , "vid/logo.jpg")
k+=1
print("succes")
for this code im getting the following error
Traceback (most recent call last):
File "i:\Meme Channel\channel1\automated_youtube_channel-master\automated_youtube_channel-master\scraper.py", line 12, in <module>
wget.download(link , "vid/logo.jpg")
File "C:\Users\Sambhaji Karbhari\AppData\Local\Programs\Python\Python310\lib\site-packages\wget.py", line 505, in download
prefix = detect_filename(url, out)
File "C:\Users\Sambhaji Karbhari\AppData\Local\Programs\Python\Python310\lib\site-packages\wget.py", line 484, in detect_filename
names["url"] = filename_from_url(url) or ''
File "C:\Users\Sambhaji Karbhari\AppData\Local\Programs\Python\Python310\lib\site-packages\wget.py", line 230, in filename_from_url
fname = os.path.basename(urlparse.urlparse(url).path)
File "C:\Users\Sambhaji Karbhari\AppData\Local\Programs\Python\Python310\lib\urllib\parse.py", line 392, in urlparse
url, scheme, _coerce_result = _coerce_args(url, scheme)
File "C:\Users\Sambhaji Karbhari\AppData\Local\Programs\Python\Python310\lib\urllib\parse.py", line 128, in _coerce_args return _decode_args(args) + (_encode_result,)
File "C:\Users\Sambhaji Karbhari\AppData\Local\Programs\Python\Python310\lib\urllib\parse.py", line 112, in _decode_args return tuple(x.decode(encoding, errors) if x else '' for x in args)
File "C:\Users\Sambhaji Karbhari\AppData\Local\Programs\Python\Python310\lib\urllib\parse.py", line 112, in <genexpr>
return tuple(x.decode(encoding, errors) if x else '' for x in args)
AttributeError: 'list' object has no attribute 'decode'
here i downloading a image from the link which is taken fro array which is inserted in it from a csv file.

Is it possible to iterate through Tensor in graph mode?

I am trying to implement Aleju's Imgaug to TFOD API. Noticed that you can not iterate through Tensors in the graph mode . I looked up for the solution and tried many suggestions but neither of them worked for my case. Do you know any work around?
import imgaug.augmenters as iaa
from imgaug.augmentables.bbs import BoundingBox, BoundingBoxesOnImage
from tensorflow.python.framework.ops import EagerTensor
import tensorflow.compat.v1 as tf
import numpy as np
augseq = iaa.Sequential([# augmentation options], random_order=True)
#tf.function
def augment(image, boxes):
image_np = image.numpy().astype(np.uint8) if type(image) == EagerTensor else image
boxes_np = boxes.numpy() if type(boxes) == EagerTensor else boxes
width, height, _ = image_np.shape
bbs = []
for i in range(len(boxes_np)):
box = boxes_np[i]
ymin, xmin, ymax, xmax = box.numpy()
bbs.append(BoundingBox(
x1=xmin*width, y1=ymin*height,
x2=xmax*width, y2=ymax*height,))
bbs = BoundingBoxesOnImage(bbs, shape=image_np.shape)
image_aug, bbs_aug = augseq(image=image_np, bounding_boxes=bbs) # float np.ndarray
bbs_aug = bbs_aug.remove_out_of_image().clip_out_of_image()
boxes_aug = []
for bb in bbs_aug:
boxes_aug.append([bb.y1/height, bb.x1/width, bb.y2/height, bb.x2/width])
boxes_aug = np.array(boxes_aug)
return image_aug, boxes_aug
Stack Trace:
raceback (most recent call last):
File "/content/models/research/object_detection/model_main_tf2.py", line 115, in <module>
tf.compat.v1.app.run()
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/platform/app.py", line 40, in run
_run(main=main, argv=argv, flags_parser=_parse_flags_tolerate_undef)
File "/usr/local/lib/python3.7/dist-packages/absl/app.py", line 303, in run
_run_main(main, args)
File "/usr/local/lib/python3.7/dist-packages/absl/app.py", line 251, in _run_main
sys.exit(main(argv))
File "/content/models/research/object_detection/model_main_tf2.py", line 112, in main
record_summaries=FLAGS.record_summaries)
File "/usr/local/lib/python3.7/dist-packages/object_detection/model_lib_v2.py", line 558, in train_loop
train_dataset_fn)
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/util/deprecation.py", line 348, in new_func
return func(*args, **kwargs)
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py", line 1199, in experimental_distribute_datasets_from_function
return self.distribute_datasets_from_function(dataset_fn, options)
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py", line 1191, in distribute_datasets_from_function
dataset_fn, options)
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/tpu_strategy.py", line 979, in _distribute_datasets_from_function
options=options)
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/input_lib.py", line 181, in get_distributed_datasets_from_function
build=build,
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/input_lib.py", line 1618, in __init__
self.build()
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/input_lib.py", line 1639, in build
self._input_contexts, self._input_workers, self._dataset_fn))
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/input_lib.py", line 2350, in _create_datasets_from_function_with_input_context
dataset = dataset_fn(ctx)
File "/usr/local/lib/python3.7/dist-packages/object_detection/model_lib_v2.py", line 553, in train_dataset_fn
input_context=input_context)
File "/usr/local/lib/python3.7/dist-packages/object_detection/inputs.py", line 906, in train_input
reduce_to_frame_fn=reduce_to_frame_fn)
File "/usr/local/lib/python3.7/dist-packages/object_detection/builders/dataset_builder.py", line 258, in build
batch_size, input_reader_config)
File "/usr/local/lib/python3.7/dist-packages/object_detection/builders/dataset_builder.py", line 237, in dataset_map_fn
fn_to_map, num_parallel_calls=num_parallel_calls)
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/util/deprecation.py", line 348, in new_func
return func(*args, **kwargs)
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/data/ops/dataset_ops.py", line 3886, in map_with_legacy_function
use_legacy_function=True))
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/data/ops/dataset_ops.py", line 5505, in __init__
use_legacy_function=use_legacy_function)
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/data/ops/dataset_ops.py", line 4540, in __init__
self._function.add_to_graph(ops.get_default_graph())
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/function.py", line 544, in add_to_graph
self._create_definition_if_needed()
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/function.py", line 380, in _create_definition_if_needed
self._create_definition_if_needed_impl()
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/function.py", line 407, in _create_definition_if_needed_impl
capture_resource_var_by_value=self._capture_resource_var_by_value)
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/function.py", line 970, in func_graph_from_py_func
outputs = func(*func_graph.inputs)
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/data/ops/dataset_ops.py", line 4458, in wrapped_fn
ret = wrapper_helper(*args)
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/data/ops/dataset_ops.py", line 4440, in wrapper_helper
ret = autograph.tf_convert(self._func, ag_ctx)(*nested_args)
File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/autograph/impl/api.py", line 699, in wrapper
raise e.ag_error_metadata.to_exception(e)
AttributeError: in user code:
File "/usr/local/lib/python3.7/dist-packages/object_detection/inputs.py", line 886, in transform_and_pad_input_data_fn *
tensor_dict = pad_input_data_to_static_shapes(
File "/usr/local/lib/python3.7/dist-packages/object_detection/inputs.py", line 272, in transform_input_data *
out_tensor_dict = data_augmentation_fn(out_tensor_dict)
File "/usr/local/lib/python3.7/dist-packages/object_detection/inputs.py", line 623, in augment_input_data *
tensor_dict = preprocessor.preprocess(
File "/usr/local/lib/python3.7/dist-packages/object_detection/core/preprocessor.py", line 4812, in preprocess *
results = func(*args, **params)
File "/usr/local/lib/python3.7/dist-packages/object_detection/core/preprocessor.py", line 4422, in _adjust_imgaug *
adjusted_image, adjusted_boxes = tf.cast(imgaug_utils.augment(image,boxes), tf.float32)
File "/usr/local/lib/python3.7/dist-packages/object_detection/core/imgaug_utils.py", line 24, in augment *
ymin, xmin, ymax, xmax = box.numpy()
AttributeError: 'Tensor' object has no attribute 'numpy'
Here is what I tried and did not work:
Enable eager execution(It is default in tf 2.x)
Decorate/Not Decorate function with #tf.function.
Create Tf session and try to eval() or run():
InvalidArgumentError: You must feed a value for placeholder tensor 'while/Placeholder' with dtype int32
Tried on both TPU and CPU

Tornado simplejson.errors.JSONDecodeError: Expecting value: line 1 column 1 (char 0)

I have a TornadoAPI for keras model, is it possible to check concurrency using python multiprocessing module, I tried with below code, but it throws error
from multiprocessing import Pool
import requests, json
url = 'http://localhost:8888/index/predict'
payload = { "colA":"some1", "colB":"some2",...….)
pl = json.dumps(payload)
def callAPI(x):
session = requests.Session()
r = session.post(url, json=json.loads(pl))
response = r.json()
return response
if __name__ == '__main__':
Pool(processes=15).map(callAPI, range(5))
Error
multiprocessing.pool.RemoteTraceback:
"""
Traceback (most recent call last):
File "C:\Users\HZ\Anaconda3\lib\multiprocessing\pool.py", line 121, in worker
result = (True, func(*args, **kwds))
File "C:\Users\HZ\Anaconda3\lib\multiprocessing\pool.py", line 44, in mapstar
return list(map(*args))
File "C:\Models\CallThreadsCheck.py", line 40, in callAPI
response = r.json()
File "C:\Users\HZ\Anaconda3\lib\site-packages\requests\models.py", line 897, in json
return complexjson.loads(self.text, **kwargs)
File "C:\Users\HZ\Anaconda3\lib\site-packages\simplejson\__init__.py", line 525, in loads
return _default_decoder.decode(s)
File "C:\Users\HZ\Anaconda3\lib\site-packages\simplejson\decoder.py", line 370, in decode
obj, end = self.raw_decode(s)
File "C:\Users\HZ\Anaconda3\lib\site-packages\simplejson\decoder.py", line 400, in raw_decode
return self.scan_once(s, idx=_w(s, idx).end())
simplejson.errors.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
"""
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "CallThreadsCheck.py", line 49, in <module>
p.map(callAPI, range(calls))
File "C:\Users\HZ\Anaconda3\lib\multiprocessing\pool.py", line 268, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "C:\Users\HZ\Anaconda3\lib\multiprocessing\pool.py", line 657, in get
raise self._value
simplejson.errors.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
May I know what's this error about pls.

Upload Excel file to Python

I'm simply trying to upload excel.xlsx file with Python panda package so i can tokenize the text. I tried for hours but nothing works. Any help will be great:
import pandas as pd
excel_file = open(r'''C:\Users\farid-PC\Desktop\Tester.xlsx''', errors
='ignore')
movies = pd.read_excel(excel_file)
movies.head()
Errors:
Traceback (most recent call last):
File "C:/Users/farid-PC/PycharmProjects/fake_news/fault.py", line 4, in <module>
movies = pd.read_excel(excel_file)
File "C:\Users\farid-PC\PycharmProjects\fake_news\venv\lib\site-packages\pandas\util\_decorators.py", line 178, in wrapper
return func(*args, **kwargs)
File "C:\Users\farid-PC\PycharmProjects\fake_news\venv\lib\site-packages\pandas\util\_decorators.py", line 178, in wrapper
return func(*args, **kwargs)
File "C:\Users\farid-PC\PycharmProjects\fake_news\venv\lib\site-packages\pandas\io\excel.py", line 307, in read_excel
io = ExcelFile(io, engine=engine)
File "C:\Users\farid-PC\PycharmProjects\fake_news\venv\lib\site-packages\pandas\io\excel.py", line 392, in __init__
self.book = xlrd.open_workbook(file_contents=data)
File "C:\Users\farid-PC\PycharmProjects\fake_news\venv\lib\site-packages\xlrd\__init__.py", line 162, in open_workbook
ragged_rows=ragged_rows,
File "C:\Users\farid-PC\PycharmProjects\fake_news\venv\lib\site-packages\xlrd\book.py", line 91, in open_workbook_xls
biff_version = bk.getbof(XL_WORKBOOK_GLOBALS)
File "C:\Users\farid-PC\PycharmProjects\fake_news\venv\lib\site-packages\xlrd\book.py", line 1267, in getbof
opcode = self.get2bytes()
File "C:\Users\farid-PC\PycharmProjects\fake_news\venv\lib\site-packages\xlrd\book.py", line 672, in get2bytes
return (BYTES_ORD(hi) << 8) | BYTES_ORD(lo)
TypeError: unsupported operand type(s) for <<: 'str' and 'int'

Receiving Type Error: 0 while updating pandas df using Data Nitro

I am updating a Pandas Data Frame.
The script looks up for a product. If the product is already in data frame, it just updates it columns with accumulated new values.
If the product is not there it creates a new set of rows to insert the values of the product.
Code
for m in range(0,len(product_sales_price)):
if exact_match(str(sales_record[n-1]),str(product_sales_price[m]))==True:
total_product_daily_sales = counter * product_sales_price[m+1]
'''
print(total_product_daily_sales)
'''
total_product_daily_net_profit = total_product_daily_sales *.1
print(counter)
print(product_sales_price[m+1])
print(total_product_daily_sales)
print(total_product_daily_net_profit)
print(m)
print(product_sales_price[m])
if (product_revenue_and_net_profit_df.ix[:,0] == product_sales_price[m]).any() == True :
product_revenue_and_net_profit_df.ix[:,:][(product_revenue_and_net_profit_df.ix[:,
0] == product_sales_price[m])] = [
product_revenue_and_net_profit_df.ix[:,0][(product_revenue_and_net_profit_df.ix[:,
0] == product_sales_price[m])],
product_revenue_and_net_profit_df.ix[:,1][(product_revenue_and_net_profit_df.ix[:,
0] == product_sales_price[m])]+counter,
product_revenue_and_net_profit_df.ix[:,2][(product_revenue_and_net_profit_df.ix[:,
0] == product_sales_price[
m])]+total_product_daily_sales,product_revenue_and_net_profit_df.ix[:,
3][(product_revenue_and_net_profit_df.ix[:,0] == product_sales_price[
m])]+total_product_daily_net_profit]
else:
product_revenue_and_net_profit_df.ix[(product_revenue_and_net_profit_df.shape[0]+1),:] = (
[product_sales_price[m],counter,total_product_daily_sales,
total_product_daily_net_profit]
)
Run Time
<sale_frequency time (in seconds):
1
423.44
423.44
42.344
0
Bushwacker Dodge Pocket Style Fender Flare Set of 4
Traceback (most recent call last):
File "32\scriptStarter.py", line 120, in <module>
File "C:\Python Projects\Amazon-Sales\amazon_analysis.py", line 162, in <module>
print (timeit.timeit(fn + "()", "from __main__ import "+fn, number=1))
File "C:\Users\onthego\Anaconda3\lib\timeit.py", line 219, in timeit
return Timer(stmt, setup, timer).timeit(number)
File "C:\Users\onthego\Anaconda3\lib\timeit.py", line 184, in timeit
timing = self.inner(it, self.timer)
File "<timeit-src>", line 6, in inner
File "C:\Python Projects\Amazon-Sales\amazon_analysis.py", line 91, in sale_frequency
m])]+total_product_daily_net_profit]
File "C:\Users\onthego\Anaconda3\lib\site-packages\pandas\core\frame.py", line 2122, in __setitem__
self._setitem_array(key, value)
File "C:\Users\onthego\Anaconda3\lib\site-packages\pandas\core\frame.py", line 2142, in _setitem_array
self.ix._setitem_with_indexer(indexer, value)
File "C:\Users\onthego\Anaconda3\lib\site-packages\pandas\core\indexing.py", line 448, in _setitem_with_indexer
elif np.array(value).ndim == 2:
File "C:\Users\onthego\Anaconda3\lib\site-packages\pandas\core\series.py", line 521, in __getitem__
result = self.index.get_value(self, key)
File "C:\Users\onthego\Anaconda3\lib\site-packages\pandas\core\index.py", line 1595, in get_value
return self._engine.get_value(s, k)
File "pandas\index.pyx", line 100, in pandas.index.IndexEngine.get_value (pandas\index.c:3113)
File "pandas\index.pyx", line 108, in pandas.index.IndexEngine.get_value (pandas\index.c:2844)
File "pandas\index.pyx", line 154, in pandas.index.IndexEngine.get_loc (pandas\index.c:3704)
File "pandas\hashtable.pyx", line 375, in pandas.hashtable.Int64HashTable.get_item (pandas\hashtable.c:7224)
File "pandas\hashtable.pyx", line 381, in pandas.hashtable.Int64HashTable.get_item (pandas\hashtable.c:7162)
KeyError: 0
>>>
>>>
>>>

Resources