Tornado OAuth2 Error in get_argument() (Facebook change) - python-3.x

I have a problem that I can't work out. My code worked for localhost however how I have a proper domain setup i'm getting some strange problems trying to login with facebook. I have since moved to python 3.6
I have the following tornado setup code:
handlers = [
(r"/facebookAuth",FBAuth),
# other handlers...
]
# Settings dict for Application
settings = {
# static handler
# Set specific HTTP404 errors to Error404 Class
"default_handler_class": Error404,
"cookie_secret":"xxx",
"facebook_redirect_uri":"https://www.example.com/facebookAuth",
"facebook_secret":"xxx",
"facebook_app_id":"xxx",
}
class FBAuth(BaseHandler,tornado.auth.FacebookGraphMixin):
async def get(self):
if self.get_argument("code", False):
print("not code")
user = await self.get_authenticated_user(redirect_uri=self.settings["facebook_redirect_uri"],
client_id=self.settings["facebook_app_id"],
client_secret=self.settings["facebook_secret"],
code=self.get_argument("code"))
print("******")
print(user)
firstName=user["first_name"]
lastName=user["last_name"]
# set cookie and start up code
else:
print("code")
await self.authorize_redirect(redirect_uri=self.settings["facebook_redirect_uri"],
client_id=self.settings["facebook_app_id"],
scope=["email","public_profile"])
I can't work out the result of the code. It shows:
code
not code
and crashes with the following:
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/tornado-4.5.dev1-py3.6-linux-x86_64.egg/tornado/web.py", line 1474, in _execute
result = yield result
File "/usr/local/lib/python3.6/site-packages/tornado-4.5.dev1-py3.6-linux-x86_64.egg/tornado/gen.py", line 1045, in run
value = future.result()
File "/usr/local/lib/python3.6/site-packages/tornado-4.5.dev1-py3.6-linux-x86_64.egg/tornado/concurrent.py", line 237, in result
raise_exc_info(self._exc_info)
File "<string>", line 3, in raise_exc_info
File "/usr/local/lib/python3.6/site-packages/tornado-4.5.dev1-py3.6-linux-x86_64.egg/tornado/gen.py", line 1051, in run
yielded = self.gen.throw(*exc_info)
File "<string>", line 6, in _wrap_awaitable
File "/home/cs/charliesays/authHandlers.py", line 13, in get
code=self.get_argument("code"))
File "<string>", line 3, in __await__
File "/usr/local/lib/python3.6/site-packages/tornado-4.5.dev1-py3.6-linux-x86_64.egg/tornado/gen.py", line 1045, in run
value = future.result()
File "/usr/local/lib/python3.6/site-packages/tornado-4.5.dev1-py3.6-linux-x86_64.egg/tornado/concurrent.py", line 237, in result
raise_exc_info(self._exc_info)
File "<string>", line 3, in raise_exc_info
File "/usr/local/lib/python3.6/site-packages/tornado-4.5.dev1-py3.6-linux-x86_64.egg/tornado/stack_context.py", line 314, in wrapped
ret = fn(*args, **kwargs)
File "/usr/local/lib/python3.6/site-packages/tornado-4.5.dev1-py3.6-linux-x86_64.egg/tornado/auth.py", line 983, in _on_access_token
"access_token": args["access_token"][-1],
KeyError: 'access_token'
It seems there is a problem with the:
code=self.get_argument("code") in the call to get_authenticated_user()

Related

Jupyter notebook kernel error when added python flask app as a service using pywin32 on windows

I have created flask app that can run jupyter notebook server.
When we click on app from the UI, a new subprocess is created using subprocess.Popen() to construct a Jupyter notebook server.
I created the windows service for the flask webapp using win32serviceutil.
There are three scenarios.
Scenario 1: Jupyter notebooks work properly when a flask app run using python.exe .
Scenario 2: When the Flask app service is executed in debug mode , it works fine.
command :-
python service.py debug
Scenario 3 :- When flask app is executed as windows service jupyter notebook kernel dies.
Error :-
Traceback (most recent call last):
File "d:\v307\lib\site-packages\tornado\web.py", line 1704, in _execute
result = await result
File "C:\Program Files\Python39\lib\asyncio\tasks.py", line 328, in __wakeup
future.result()
File "d:\v307\lib\site-packages\tornado\gen.py", line 769, in run
yielded = self.gen.throw(*exc_info) # type: ignore
File "d:\v307\lib\site-packages\notebook\services\sessions\handlers.py", line 74, in post
model = yield maybe_future(
File "d:\v307\lib\site-packages\tornado\gen.py", line 762, in run
value = future.result()
File "d:\v307\lib\site-packages\tornado\gen.py", line 769, in run
yielded = self.gen.throw(*exc_info) # type: ignore
File "d:\v307\lib\site-packages\notebook\services\sessions\sessionmanager.py", line 98, in create_session
kernel_id = yield self.start_kernel_for_session(session_id, path, name, type, kernel_name)
File "d:\v307\lib\site-packages\tornado\gen.py", line 762, in run
value = future.result()
File "d:\v307\lib\site-packages\tornado\gen.py", line 769, in run
yielded = self.gen.throw(*exc_info) # type: ignore
File "d:\v307\lib\site-packages\notebook\services\sessions\sessionmanager.py", line 110, in start_kernel_for_session
kernel_id = yield maybe_future(
File "d:\v307\lib\site-packages\tornado\gen.py", line 762, in run
value = future.result()
File "C:\Program Files\Python39\lib\asyncio\futures.py", line 201, in result
raise self._exception
File "C:\Program Files\Python39\lib\asyncio\tasks.py", line 256, in __step
result = coro.send(None)
File "d:\v307\lib\site-packages\notebook\services\kernels\kernelmanager.py", line 176, in start_kernel
kernel_id = await maybe_future(self.pinned_superclass.start_kernel(self, **kwargs))
File "d:\v307\lib\site-packages\jupyter_client\utils.py", line 30, in wrapped
raise e
File "d:\v307\lib\site-packages\jupyter_client\utils.py", line 27, in wrapped
return loop.run_until_complete(future)
File "d:\v307\lib\site-packages\nest_asyncio.py", line 89, in run_until_complete
return f.result()
File "C:\Program Files\Python39\lib\asyncio\futures.py", line 201, in result
raise self._exception
File "C:\Program Files\Python39\lib\asyncio\tasks.py", line 256, in __step
result = coro.send(None)
File "d:\v307\lib\site-packages\jupyter_client\multikernelmanager.py", line 212, in _async_start_kernel
starter = ensure_async(km.start_kernel(**kwargs))
File "d:\v307\lib\site-packages\jupyter_client\utils.py", line 30, in wrapped
raise e
File "d:\v307\lib\site-packages\jupyter_client\utils.py", line 27, in wrapped
return loop.run_until_complete(future)
File "d:\v307\lib\site-packages\nest_asyncio.py", line 89, in run_until_complete
return f.result()
File "C:\Program Files\Python39\lib\asyncio\futures.py", line 201, in result
raise self._exception
File "C:\Program Files\Python39\lib\asyncio\tasks.py", line 256, in __step
result = coro.send(None)
File "d:\v307\lib\site-packages\jupyter_client\manager.py", line 83, in wrapper
raise e
File "d:\v307\lib\site-packages\jupyter_client\manager.py", line 75, in wrapper
out = await method(self, *args, **kwargs)
File "d:\v307\lib\site-packages\jupyter_client\manager.py", line 383, in _async_start_kernel
kernel_cmd, kw = await ensure_async(self.pre_start_kernel(**kw))
File "d:\v307\lib\site-packages\jupyter_client\utils.py", line 30, in wrapped
raise e
File "d:\v307\lib\site-packages\jupyter_client\utils.py", line 27, in wrapped
return loop.run_until_complete(future)
File "d:\v307\lib\site-packages\nest_asyncio.py", line 89, in run_until_complete
return f.result()
File "C:\Program Files\Python39\lib\asyncio\futures.py", line 201, in result
raise self._exception
File "C:\Program Files\Python39\lib\asyncio\tasks.py", line 256, in __step
result = coro.send(None)
File "d:\v307\lib\site-packages\jupyter_client\manager.py", line 349, in _async_pre_start_kernel
kw = await self.provisioner.pre_launch(**kw)
File "d:\v307\lib\site-packages\jupyter_client\provisioning\local_provisioner.py", line 190, in pre_launch
km.write_connection_file()
File "d:\v307\lib\site-packages\jupyter_client\connect.py", line 503, in write_connection_file
self.connection_file, cfg = write_connection_file(
File "d:\v307\lib\site-packages\jupyter_client\connect.py", line 164, in write_connection_file
with secure_write(fname) as f:
File "C:\Program Files\Python39\lib\contextlib.py", line 117, in __enter__
return next(self.gen)
File "d:\v307\lib\site-packages\jupyter_core\paths.py", line 893, in secure_write
win32_restrict_file_to_user(fname)
File "d:\v307\lib\site-packages\jupyter_core\paths.py", line 442, in win32_restrict_file_to_user
user, _domain, _type = win32security.LookupAccountName(
pywintypes.error: (1789, 'LookupAccountName', 'The trust relationship between this workstation and the primary domain failed.')
I have used waitress as a web server and win32serviceutil for creating service of flask app
Python version 3.9
I have tried using NSSM and Winsw for adding flask app as a service but faced same error.
Added creationflags=CREATE_NEW_CONSOLE argument while creating subprocess using Popen but no progress.
Expection
Way to solve issue when we run flask webapp as a service.

Error when using joblib in python with undetected chromedriver

when i use (self.links is an array of strings)
Parallel(n_jobs=2)(delayed(self.buybysize)(link) for link in self.links)
with this function
def buybysize(self, link):
browser = self.browser()
//other commented stuff
def browser(self):
options = uc.ChromeOptions()
options.user_data_dir = self.user_data_dir
options.add_argument(self.add_argument)
driver = uc.Chrome(options=options)
return driver
i get the error
oblib.externals.loky.process_executor._RemoteTraceback:
Traceback (most recent call last):
File "/home/Me/PycharmProjects/zalando_buy/venv/lib/python3.8/site-packages/joblib/externals/loky/process_executor.py", line 436, in _process_worker
r = call_item()
File "/home/Me/PycharmProjects/zalando_buy/venv/lib/python3.8/site-packages/joblib/externals/loky/process_executor.py", line 288, in __call__
return self.fn(*self.args, **self.kwargs)
File "/home/Me/PycharmProjects/zalando_buy/venv/lib/python3.8/site-packages/joblib/_parallel_backends.py", line 595, in __call__
return self.func(*args, **kwargs)
File "/home/Me/PycharmProjects/zalando_buy/venv/lib/python3.8/site-packages/joblib/parallel.py", line 262, in __call__
return [func(*args, **kwargs)
File "/home/Me/PycharmProjects/zalando_buy/venv/lib/python3.8/site-packages/joblib/parallel.py", line 262, in <listcomp>
return [func(*args, **kwargs)
File "/home/Me/PycharmProjects/zalando_buy/Zalando.py", line 91, in buybysize
browser = self.browser()
File "/home/Me/PycharmProjects/zalando_buy/Zalando.py", line 38, in browser
driver = uc.Chrome(options=options)
File "/home/Me/PycharmProjects/zalando_buy/venv/lib/python3.8/site-packages/undetected_chromedriver/__init__.py", line 388, in __init__
self.browser_pid = start_detached(
File "/home/Me/PycharmProjects/zalando_buy/venv/lib/python3.8/site-packages/undetected_chromedriver/dprocess.py", line 30, in start_detached
multiprocessing.Process(
File "/usr/lib/python3.8/multiprocessing/process.py", line 121, in start
self._popen = self._Popen(self)
File "/usr/lib/python3.8/multiprocessing/context.py", line 224, in _Popen
return _default_context.get_context().Process._Popen(process_obj)
File "/home/Me/PycharmProjects/zalando_buy/venv/lib/python3.8/site-packages/joblib/externals/loky/backend/process.py", line 39, in _Popen
return Popen(process_obj)
File "/home/Me/PycharmProjects/zalando_buy/venv/lib/python3.8/site-packages/joblib/externals/loky/backend/popen_loky_posix.py", line 52, in __init__
self._launch(process_obj)
File "/home/Me/PycharmProjects/zalando_buy/venv/lib/python3.8/site-packages/joblib/externals/loky/backend/popen_loky_posix.py", line 157, in _launch
pid = fork_exec(cmd_python, self._fds, env=process_obj.env)
AttributeError: 'Process' object has no attribute 'env'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/Me/PycharmProjects/zalando_buy/Start.py", line 4, in <module>
class Start:
File "/home/Me/PycharmProjects/zalando_buy/Start.py", line 7, in Start
zalando.startshopping()
File "/home/Me/PycharmProjects/zalando_buy/Zalando.py", line 42, in startshopping
self.openlinks()
File "/home/Me/PycharmProjects/zalando_buy/Zalando.py", line 50, in openlinks
Parallel(n_jobs=2)(delayed(self.buybysize)(link) for link in self.links)
File "/home/Me/PycharmProjects/zalando_buy/venv/lib/python3.8/site-packages/joblib/parallel.py", line 1056, in __call__
self.retrieve()
File "/home/Me/PycharmProjects/zalando_buy/venv/lib/python3.8/site-packages/joblib/parallel.py", line 935, in retrieve
self._output.extend(job.get(timeout=self.timeout))
File "/home/Me/PycharmProjects/zalando_buy/venv/lib/python3.8/site-packages/joblib/_parallel_backends.py", line 542, in wrap_future_result
return future.result(timeout=timeout)
File "/usr/lib/python3.8/concurrent/futures/_base.py", line 444, in result
return self.__get_result()
File "/usr/lib/python3.8/concurrent/futures/_base.py", line 389, in __get_result
raise self._exception
AttributeError: 'Process' object has no attribute 'env'
Process finished with exit code 1
For me it looks like there are instabilities because undetected chromedriver maybe uses multiprocessing already, but isnt there any way where i can open multiple Browsers with UC and process each iteration parallel?
Edit: i debugged and the error appears after trying to execute this line:
driver = uc.Chrome(options=options)

how to prevent ThreadedWebsocketManager (binance python api) from asyncio.exceptions.TimeoutError?

this error happens every 10 min in my code , I just do not know how to keep the ThreadedWebsocketManager alive all the time .the document did not express it clearly 。
will you good guys tell me how to fix it .
I have the same issue here. It would be of great help if someone could provide a solution to this problem.
Traceback (most recent call last):
File "test.py", line 351, in <module>
loop.run_until_complete(main())
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.8_3.8.2800.0_x64__qbz5n2kfra8p0\lib\asyncio\base_events.py", line 616, in run_until_complete
return future.result()
File "test.py", line 257, in main
await asyncio.gather(
File "test.py", line 208, in calculate
second_pair_usdt = await client.get_symbol_ticker(symbol=second_token+FIAT_USD)
File "C:\Users\Thoma\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.8_qbz5n2kfra8p0\LocalCache\local-packages\Python38\site-packages\binance\client.py", line 6836, in get_symbol_ticker
return await self._get('ticker/price', data=params, version=self.PRIVATE_API_VERSION)
File "C:\Users\Thoma\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.8_qbz5n2kfra8p0\LocalCache\local-packages\Python38\site-packages\binance\client.py", line 6551, in _get
return await self._request_api('get', path, signed, version, **kwargs)
File "C:\Users\Thoma\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.8_qbz5n2kfra8p0\LocalCache\local-packages\Python38\site-packages\binance\client.py", line 6514, in _request_api
return await self._request(method, uri, signed, **kwargs)
File "C:\Users\Thoma\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.8_qbz5n2kfra8p0\LocalCache\local-packages\Python38\site-packages\binance\client.py", line 6495, in _request
async with getattr(self.session, method)(uri, **kwargs) as response:
File "C:\Users\Thoma\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.8_qbz5n2kfra8p0\LocalCache\local-packages\Python38\site-packages\aiohttp\client.py", line 1117, in __aenter__
self._resp = await self._coro
File "C:\Users\Thoma\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.8_qbz5n2kfra8p0\LocalCache\local-packages\Python38\site-packages\aiohttp\client.py", line 619, in _request
break
File "C:\Users\Thoma\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.8_qbz5n2kfra8p0\LocalCache\local-packages\Python38\site-packages\aiohttp\helpers.py", line 656, in __exit__
raise asyncio.TimeoutError from None
asyncio.exceptions.TimeoutError

Celery configuration in Django, connecting tasks to the view

I've recently configured celery to run some dummy tasks, and ran the workers through Terminal on my Mac. It all seems to run accordingly, took a while, since some of the literature out there seems to advise different configuration scenarios, but I got there anyway. Now the next step is to trigger the tasks via my view in Django. I'm using celery 1.2.26.post2
My project structure:
/MyApp
celery_tasks.py
celeryconfig.py
__init__.py
I've been following several tutorials and found this video and this video and this video very helpful to obtain an overall view of celery.
My scripts are:
celery_tasks.py
from celery import Celery
from celery.task import task
app = Celery() # Initialise the app
app.config_from_object('celeryconfig') # Tell Celery instance to use celeryconfig module
suf = lambda n: "%d%s" % (n, {1: "st", 2: "nd", 3: "rd"}.get(n if n < 20 else n % 10, "th"))
#task
def fav_doctor():
"""Reads doctor.txt file and prints out fav doctor, then adds a new
number to the file"""
with open('doctor.txt', 'r+') as f:
for line in f:
nums = line.rstrip().split()
print ('The {} doctor is my favorite'.format(suf(int(nums[0]))))
for num in nums[1:]:
print ('Wait! The {} doctor is my favorite'.format(suf(int(num))))
last_num = int(nums[-1])
new_last_num = last_num + 1
f.write(str(new_last_num) + ' ')
#task
def reverse(string):
return string[::-1]
#task
def add(x, y):
return x+y
celeryconfig.py
from datetime import timedelta
## List of modules to import when celery starts.
CELERY_IMPORTS = ('celery_tasks',)
## Message Broker (RabbitMQ) settings.
BROKER_URL = 'amqp://'
BROKER_PORT = 5672
#BROKER_TRANSPORT = 'sqlalchemy'
#BROKER_HOST = 'sqlite:///tasks.db'
#BROKER_VHOST = '/'
#BROKER_USER = 'guest'
#BROKER_PASSWORD = 'guest'
## Result store settings.
CELERY_RESULT_BACKEND = 'rpc://'
#CELERY_RESULT_DBURI = 'sqlite:///mydatabase.db'
## Worker settings
#CELERYD_CONCURRENCY = 1
#CELERYD_TASK_TIME_LIMIT = 20
#CELERYD_LOG_FILE = 'celeryd.log'
#CELERYD_LOG_LEVEL = 'INFO'
## Misc
CELERY_IGNORE_RESULT = False
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT=['json']
CELERY_TIMEZONE = 'Europe/Berlin'
CELERY_ENABLE_UTC = True
CELERYBEAT_SCHEDULE = {
'doctor-every-10-seconds': {
'task': 'celery_tasks.fav_doctor',
'schedule': timedelta(seconds=3),
},
}
__init__.py
from .celery_tasks import app as celery_app # Ensures app is always imported when Django starts so that shared_task will use this app.
__all__ = ['celery_app']
In settings.py
INSTALLED_APPS = [
...
'djcelery',
]
In my views folder, I have a specific view module, admin_scripts.py
from MyApp.celery_tasks import fav_doctor, reverse, send_email, add
#login_required
def admin_script_dashboard(request):
if request.method == 'POST':
form = Admin_Script(request.POST)
if form.is_valid():
backup_script_select = form.cleaned_data['backup_script_select']
dummy_script_select = form.cleaned_data['dummy_script_select']
print ("backup_script_select: {0}".format(backup_script_select))
print ("dummy_script_select: {0}".format(dummy_script_select))
if backup_script_select:
print ("Backup script exectuting. Please wait...")
dbackup_script_dir = str(Path.home()) + '/Software/MyOtherApp/cli-tools/dbbackup_DRAFT.py'
subprocess.call(" python {} ".format(dbackup_script_dir), shell=True)
async_result = reverse.delay('Using Celery')
print ("async_result: {0}".format(async_result))
result = reverse.AsyncResult(async_result.id)
print ("result: {0}".format(result))
print ("Something occured...")
if dummy_script_select:
print ("Dummy script exectuting. Please wait...")
dummy_script_dir = str(Path.home()) + '/Software/MyOtherApp/cli-tools/dummy.py'
subprocess.call(" python {} ".format(dummy_script_dir), shell=True)
async_result = add.delay(2, 5)
print ("async_result: {0}".format(async_result))
result = add.AsyncResult(async_result.id)
print ("result: {0}".format(result))
print ("Something occured...")
return render(request, 'MyApp/admin_scripts_db.html')
The problem occurs at the line in my admin_scripts.py file, where async_result = add.delay(2, 5) is called. Below the traceback:
[12/Jul/2018 09:23:19] ERROR [django.request:135] Internal Server Error: /MyProject/adminscripts/
Traceback (most recent call last):
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/local.py", line 309, in _get_current_object
return object.__getattribute__(self, '__thing')
AttributeError: 'PromiseProxy' object has no attribute '__thing'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/kombu/utils/__init__.py", line 323, in __get__
return obj.__dict__[self.__name__]
KeyError: 'conf'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/loaders/base.py", line 158, in _smart_import
return imp(path)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/loaders/base.py", line 112, in import_from_cwd
package=package,
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/utils/imports.py", line 101, in import_from_cwd
return imp(module, package=package)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/loaders/base.py", line 106, in import_module
return importlib.import_module(module, package=package)
File "/Users/MyMBP/anaconda3/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 978, in _gcd_import
File "<frozen importlib._bootstrap>", line 961, in _find_and_load
File "<frozen importlib._bootstrap>", line 948, in _find_and_load_unlocked
ModuleNotFoundError: No module named 'celeryconfig'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/django/core/handlers/exception.py", line 41, in inner
response = get_response(request)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/django/core/handlers/base.py", line 187, in _get_response
response = self.process_exception_by_middleware(e, request)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/django/core/handlers/base.py", line 185, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/django/contrib/auth/decorators.py", line 23, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/Users/MyMBP/Software/MyProject/MyProjectsite/MyProject/views/admin_scripts.py", line 44, in admin_script_dashboard
async_result = add.delay(2, 5)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/local.py", line 143, in __getattr__
return getattr(self._get_current_object(), name)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/local.py", line 311, in _get_current_object
return self.__evaluate__()
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/local.py", line 341, in __evaluate__
thing = Proxy._get_current_object(self)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/local.py", line 101, in _get_current_object
return loc(*self.__args, **self.__kwargs)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/app/base.py", line 270, in _task_from_fun
'__wrapped__': fun}, **options))()
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/app/task.py", line 201, in __new__
instance.bind(app)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/app/task.py", line 365, in bind
conf = app.conf
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/kombu/utils/__init__.py", line 325, in __get__
value = obj.__dict__[self.__name__] = self.__get(obj)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/app/base.py", line 638, in conf
return self._get_config()
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/app/base.py", line 454, in _get_config
self.loader.config_from_object(self._config_source)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/loaders/base.py", line 140, in config_from_object
obj = self._smart_import(obj, imp=self.import_from_cwd)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/loaders/base.py", line 161, in _smart_import
return symbol_by_name(path, imp=imp)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/kombu/utils/__init__.py", line 96, in symbol_by_name
module = imp(module_name, package=package, **kwargs)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/loaders/base.py", line 112, in import_from_cwd
package=package,
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/utils/imports.py", line 101, in import_from_cwd
return imp(module, package=package)
File "/Users/MyMBP/anaconda3/lib/python3.6/site-packages/celery/loaders/base.py", line 106, in import_module
return importlib.import_module(module, package=package)
File "/Users/MyMBP/anaconda3/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 978, in _gcd_import
File "<frozen importlib._bootstrap>", line 961, in _find_and_load
File "<frozen importlib._bootstrap>", line 948, in _find_and_load_unlocked
ModuleNotFoundError: No module named 'celeryconfig'
Numerous errors get thrown, and the traceback is very large, about 9000 lines long in total. This is just a snippet. I'm new to celery and task queueing in general, so perhaps for some of the experts out there you can pick out some very obvious mistakes from my code.
As I said, the configuration of celery is successful, and when triggering the tasks in Terminal, the tasks do what they are supposed to do. I'm building this up piece by piece, so this next step is to trigger the tasks using my view in Django (instead of being called using Terminal). Once I have figured that out, then the ultimate aim is to track the progress of a task, and report the output to the user in a separate window (.js, AJAX etc.) that shows for example the line output that you see in Console.
I read that the tasks.py (in my case celery_tasks.py) file needs to be in a django app that's registered in settings.py. Is this true?
This is not a full answer, but may help partly others who encounter a similar issue:
Basically in the celery_tasks.py there is the following:
app.config_from_object('celeryconfig')
When I trigger the workers through Terminal, this works. When I do it via my view, then the error message above can be seen. Changing this line works via the view:
app.config_from_object('MyApp.celeryconfig')
I still need to figure out why there is this discrepancy and how to resolve this so that it is indifferent whether the Tasks are called via my view or Terminal.

python-telegram-bot doesn't work on Python 3.x

I'm trying to make a program that uses python-telegram-bot, and I need to use Python 3 for that. But when I try to launch it using Python 3, I get some error I don't quite understand. The same happens when I use the built-in examples. Could somebody explain what it means? The precise output of the program follows.
2016-06-29 06:17:44,260 - telegram.ext.updater - ERROR - unhandled exception
Traceback (most recent call last):
File "/usr/local/lib/python3.4/dist-packages/telegram/ext/updater.py", line 105, in _thread_wrapper
target(*args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/telegram/ext/updater.py", line 216, in _start_polling
self._bootstrap(bootstrap_retries, clean=clean, webhook_url='')
File "/usr/local/lib/python3.4/dist-packages/telegram/ext/updater.py", line 320, in _bootstrap
self.bot.setWebhook(webhook_url=webhook_url, certificate=cert)
File "/usr/local/lib/python3.4/dist-packages/telegram/bot.py", line 121, in decorator
result = func(self, *args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/telegram/bot.py", line 1263, in setWebhook
result = request.post(url, data, timeout=kwargs.get('timeout'))
File "/usr/local/lib/python3.4/dist-packages/telegram/utils/request.py", line 174, in post
headers={'Content-Type': 'application/json'})
File "/usr/local/lib/python3.4/dist-packages/telegram/utils/request.py", line 100, in _request_wrapper
resp = _get_con_pool().request(*args, **kwargs)
File "/usr/lib/python3/dist-packages/urllib3/request.py", line 72, in request
**urlopen_kw)
File "/usr/lib/python3/dist-packages/urllib3/request.py", line 135, in request_encode_body
**urlopen_kw)
TypeError: urlopen() got multiple values for keyword argument 'body'
Exception in thread updater:
Traceback (most recent call last):
File "/usr/lib/python3.4/threading.py", line 920, in _bootstrap_inner
self.run()
File "/usr/lib/python3.4/threading.py", line 868, in run
self._target(*self._args, **self._kwargs)
File "/usr/local/lib/python3.4/dist-packages/telegram/ext/updater.py", line 105, in _thread_wrapper
target(*args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/telegram/ext/updater.py", line 216, in _start_polling
self._bootstrap(bootstrap_retries, clean=clean, webhook_url='')
File "/usr/local/lib/python3.4/dist-packages/telegram/ext/updater.py", line 320, in _bootstrap
self.bot.setWebhook(webhook_url=webhook_url, certificate=cert)
File "/usr/local/lib/python3.4/dist-packages/telegram/bot.py", line 121, in decorator
result = func(self, *args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/telegram/bot.py", line 1263, in setWebhook
result = request.post(url, data, timeout=kwargs.get('timeout'))
File "/usr/local/lib/python3.4/dist-packages/telegram/utils/request.py", line 174, in post
headers={'Content-Type': 'application/json'})
File "/usr/local/lib/python3.4/dist-packages/telegram/utils/request.py", line 100, in _request_wrapper
resp = _get_con_pool().request(*args, **kwargs)
File "/usr/lib/python3/dist-packages/urllib3/request.py", line 72, in request
**urlopen_kw)
File "/usr/lib/python3/dist-packages/urllib3/request.py", line 135, in request_encode_body
**urlopen_kw)
TypeError: urlopen() got multiple values for keyword argument 'body'
2016-06-29 06:17:45,248 - telegram.ext.dispatcher - CRITICAL - stopping due to exception in another thread
For this to work, install urllib3>=1.10 from the package manager.

Resources