I am trying to deploy my own django app on google cloud.
I'm following this documentation
by Google Cloud to deploy the app.
I have changed the settings.py file of my app according to the settings.py file of settings.py file of the sample app provided by google and I think this issue is due to some mistake in that setting file or maybe something else. I'm not able to solve the error after lots of attempts.
please help me thank you
Settings.py file
from pathlib import Path
import os
import io
from urllib.parse import urlparse
import environ
from google.cloud import secretmanager
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# [START gaestd_py_django_secret_config]
env = environ.Env(DEBUG=(bool, False))
env_file = os.path.join(BASE_DIR, ".env")
if os.path.isfile(env_file):
# Use a local secret file, if provided
env.read_env(env_file)
# [START_EXCLUDE]
elif os.getenv("TRAMPOLINE_CI", None):
# Create local settings if running with CI, for unit testing
placeholder = (
f"SECRET_KEY=a\n"
f"DATABASE_URL=sqlite://{os.path.join(BASE_DIR, 'db.sqlite3')}"
)
env.read_env(io.StringIO(placeholder))
# [END_EXCLUDE]
elif os.environ.get("GOOGLE_CLOUD_PROJECT", None):
# Pull secrets from Secret Manager
project_id = os.environ.get("GOOGLE_CLOUD_PROJECT")
client = secretmanager.SecretManagerServiceClient()
settings_name = os.environ.get("SETTINGS_NAME", "django_settings")
name = f"projects/{project_id}/secrets/{settings_name}/versions/latest"
payload = client.access_secret_version(name=name).payload.data.decode("UTF-8")
env.read_env(io.StringIO(payload))
else:
raise Exception("No local .env or GOOGLE_CLOUD_PROJECT detected. No secrets found.")
# [END gaestd_py_django_secret_config]
SECRET_KEY = env("SECRET_KEY")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env("DEBUG")
# [START gaestd_py_django_csrf]
# SECURITY WARNING: It's recommended that you use this when
# running in production. The URL will be known once you first deploy
# to App Engine. This code takes the URL and converts it to both these settings formats.
APPENGINE_URL = env("APPENGINE_URL", default=None)
if APPENGINE_URL:
# Ensure a scheme is present in the URL before it's processed.
if not urlparse(APPENGINE_URL).scheme:
APPENGINE_URL = f"https://{APPENGINE_URL}"
ALLOWED_HOSTS = [urlparse(APPENGINE_URL).netloc]
CSRF_TRUSTED_ORIGINS = [APPENGINE_URL]
SECURE_SSL_REDIRECT = True
else:
ALLOWED_HOSTS = ["*"]
# [END gaestd_py_django_csrf]
# Application definition
INSTALLED_APPS = [
'portpolio.apps.PortpolioConfig',
'CRblogs.apps.CrblogsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_social_share',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'crrathod.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'crrathod.wsgi.application'
# Database
# [START db_setup]
# [START gaestd_py_django_database_config]
# Use django-environ to parse the connection string
DATABASES = {"default": env.db()}
# If the flag as been set, configure to use proxy
if os.getenv("USE_CLOUD_SQL_AUTH_PROXY", None):
DATABASES["default"]["HOST"] = "127.0.0.1"
DATABASES["default"]["PORT"] = 8088
# [END gaestd_py_django_database_config]
# [END db_setup]
# Use a in-memory sqlite3 database when testing in CI systems
# TODO(glasnt) CHECK IF THIS IS REQUIRED because we're setting a val above
if os.getenv("TRAMPOLINE_CI", None):
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
# Password validation
# https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/4.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/4.0/howto/static-files/
# STATICFILES_DIRS =[
# os.path.join(BASE_DIR, 'static')
# ]
# STATIC_ROOT = os.path.join(BASE_DIR,'assets')
STATIC_URL = 'static/'
MEDIA_URL = '/media/'
if DEBUG:
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
else:
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(BASE_DIR,'media')
# Default primary key field type
# https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
Log file
Traceback (most recent call last):
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\env\lib\site-packages\google\api_core\grpc_helpers.py", line 72, in error_remapped_callable
return callable_(*args, **kwargs)
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\env\lib\site-packages\grpc\_channel.py", line 946, in __call__
return _end_unary_response_blocking(state, call, False, None)
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\env\lib\site-packages\grpc\_channel.py", line 849, in _end_unary_response_blocking
raise _InactiveRpcError(state)
grpc._channel._InactiveRpcError: <_InactiveRpcError of RPC that terminated with:
status = StatusCode.PERMISSION_DENIED
details = "Permission denied on resource project my-portfolio-361305."
debug_error_string = "{"created":"#1662731782.139000000","description":"Error received from peer ipv6:[2404:6800:4009:825::200a]:443","file":"src/core/lib/surface/call.cc","file_line":967,"grpc_message":"Permission denied on resource project my-portfolio-361305.","grpc_status":7}"
>
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "manage.py", line 22, in <module>
main()
File "manage.py", line 18, in main
execute_from_command_line(sys.argv)
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\env\lib\site-packages\django\core\management\__init__.py", line 446, in execute_from_command_line
utility.execute()
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\env\lib\site-packages\django\core\management\__init__.py", line 386, in execute
settings.INSTALLED_APPS
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\env\lib\site-packages\django\conf\__init__.py", line 87, in __getattr__
self._setup(name)
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\env\lib\site-packages\django\conf\__init__.py", line 74, in _setup
self._wrapped = Settings(settings_module)
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\env\lib\site-packages\django\conf\__init__.py", line 183, in __init__
mod = importlib.import_module(self.SETTINGS_MODULE)
File "C:\Users\LONAR\AppData\Local\Programs\Python\Python38\lib\importlib\__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1014, in _gcd_import
File "<frozen importlib._bootstrap>", line 991, in _find_and_load
File "<frozen importlib._bootstrap>", line 975, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 671, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 848, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\crrathod\settings.py", line 50, in <module>
payload = client.access_secret_version(name=name).payload.data.decode("UTF-8")
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\env\lib\site-packages\google\cloud\secretmanager_v1\services\secret_manager_service\client.py", line 1440, in access_secret_version
response = rpc(
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\env\lib\site-packages\google\api_core\gapic_v1\method.py", line 154, in __call__
return wrapped_func(*args, **kwargs)
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\env\lib\site-packages\google\api_core\retry.py", line 283, in retry_wrapped_func
return retry_target(
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\env\lib\site-packages\google\api_core\retry.py", line 190, in retry_target
return target()
File "C:\Program Files (x86)\Google\Cloud SDK\Portfolio-blog\env\lib\site-packages\google\api_core\grpc_helpers.py", line 74, in error_remapped_callable
raise exceptions.from_grpc_error(exc) from exc
google.api_core.exceptions.PermissionDenied: 403 Permission denied on resource project my-portfolio-361305. [links {
description: "Google developer console API key"
url: "https://console.developers.google.com/project/ my-portfolio-361305/apiui/credential"
}
, reason: "CONSUMER_INVALID"
domain: "googleapis.com"
metadata {
key: "service"
value: "secretmanager.googleapis.com"
}
metadata {
key: "consumer"
value: "projects/ my-portfolio-361305"
}
]
I just shifted from one problem to another. I did some changes here. I have removed space after = from this cmd set GOOGLE_CLOUD_PROJECT=my-portfolio-361305 previously I was using set GOOGLE_CLOUD_PROJECT= my-portfolio-361305 this cmd. Now I have also removed space after = in secret manager file as I read that in some solutions for my new problem Error in deploying Django app on Google cloud using appengine "django.core.exceptions.ImproperlyConfigured: Set the SECRET_KEY environment variable".
I got a solution to that problem as well by running set SECRET_KEY=my_secret_key_value
But now got a new problem as after running python manage.py makemigrations
as
RuntimeWarning: Got an error checking a consistent migration history performed for >database connection 'default': connection to server at "127.0.0.1", port 5432>failed: FATAL: password authentication failed for user "CRRathod"
warnings.warn(
No changes detected
It seems here as I have entered the wrong password. But I check that as well
Thank You!
Describe the bug
Python rises an error during the initialization of medusa container
Environment:
```
apiVersion: v1
kind: Secret
metadata:
name: medusa-bucket-key
type: Opaque
stringData:
medusa_s3_credentials: |-
[default]
aws_access_key_id = xxxxxx
aws_secret_access_key = xxxxxxxx
```
medusa-operator version:
0.12.2
Helm charts version info
apiVersion: v2
name: k8ssandra
type: application
version: 1.6.0-SNAPSHOT
dependencies:
* name: cass-operator
version: 0.35.2
* name: reaper-operator
version: 0.32.3
* name: medusa-operator
version: 0.32.0
* name: k8ssandra-common
version: 0.28.4
Kubernetes version information:
v1.23.1
Kubernetes cluster kind:
EKS
Operator logs:
MEDUSA_MODE = GRPC sleeping for 0 sec Starting Medusa gRPC service INFO:root:Init service [2022-05-10 12:56:28,368] INFO: Init service DEBUG:root:Loading storage_provider: s3 [2022-05-10 12:56:28,368] DEBUG: Loading storage_provider: s3 DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): 169.254.169.254:80 [2022-05-10 12:56:28,371] DEBUG: Starting new HTTP connection (1): 169.254.169.254:80 DEBUG:urllib3.connectionpool:http://169.254.169.254:80 "PUT /latest/api/token HTTP/1.1" 200 56 [2022-05-10 12:56:28,373] DEBUG: http://169.254.169.254:80 "PUT /latest/api/token HTTP/1.1" 200 56 DEBUG:root:Reading AWS credentials from /etc/medusa-secrets/medusa_s3_credentials [2022-05-10 12:56:28,373] DEBUG: Reading AWS credentials from /etc/medusa-secrets/medusa_s3_credentials Traceback (most recent call last): File "/usr/lib/python3.6/runpy.py", line 193, in _run_module_as_main "**main**", mod_spec) File "/usr/lib/python3.6/runpy.py", line 85, in _run_code exec(code, run_globals) File "/home/cassandra/medusa/service/grpc/server.py", line 297, in <module> server.serve() File "/home/cassandra/medusa/service/grpc/server.py", line 60, in serve medusa_pb2_grpc.add_MedusaServicer_to_server(MedusaService(config), self.grpc_server) File "/home/cassandra/medusa/service/grpc/server.py", line 99, in **init** self.storage = Storage(config=self.config.storage) File "/home/cassandra/medusa/storage/**init**.py", line 72, in **init** self.storage_driver = self._connect_storage() File "/home/cassandra/medusa/storage/**init**.py", line 92, in _connect_storage s3_storage = S3Storage(self._config) File "/home/cassandra/medusa/storage/s3_storage.py", line 40, in **init** super().**init**(config) File "/home/cassandra/medusa/storage/abstract_storage.py", line 39, in **init** self.driver = self.connect_storage() File "/home/cassandra/medusa/storage/s3_storage.py", line 78, in connect_storage profile = aws_config[aws_profile] File "/usr/lib/python3.6/configparser.py", line 959, in **getitem** raise KeyError(key) KeyError: 'default'
Which could be the problem?
thanks
Cristian
I am using yaml configuration file to create log handlers using logging.handlers.RotatingFileHandler, in the configuration file i have given maxBytes 10Mb,
Please find the below for reference
logging:
version: 1
formatters:
simple:
format: "%(asctime)s - %(lineno)d - %(name)s - %(levelname)s - %(funcName)s - %(message)s"
handlers:
console:
class: logging.StreamHandler
level: DEBUG
formatter: simple
stream: ext://sys.stdout
info_file_handler:
class: logging.handlers.RotatingFileHandler
level: DEBUG
formatter: simple
filename: info.log
maxBytes: 10485760 # 10MB
backupCount: 20
encoding: utf8
error_file_handler:
class: logging.handlers.RotatingFileHandler
level: ERROR
formatter: simple
filename: errors.log
maxBytes: 10485760 # 10MB
backupCount: 20
encoding: utf8
root:
level: DEBUG
handlers: [console, info_file_handler, error_file_handler]
Now once log file is generated ex:info.log and reaches to 10MB it creates another file example info1.log to append new logs.
but i would like delete the old logs history once it reaches to 10MB, and write the logs again to the same file.
I don not wanted to create new file to append new logs.
I'm trying to create logs using following logging config.
But in handler 'info_file_handler', mode: 'w' does not overwrite the file with class:logging.handlers.RotatingFileHandler. should I replace the class with logging.FileHandler, output log file gets overwrite. does logging.handlers.RotatingFileHandler need some extra code to be added here?
# https://gist.github.com/kingspp/9451566a5555fb022215ca2b7b802f19
version: 1
disable_existing_loggers: true
formatters:
standard:
# format: "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
format: "%(levelname)s: %(message)s"
error:
format: "%(levelname)s <PID %(process)d:%(processName)s> %(name)s.%(funcName)s(): %(message)s"
handlers:
console:
class: logging.StreamHandler
level: DEBUG
formatter: standard
stream: ext://sys.stdout
info_file_handler:
class: logging.handlers.RotatingFileHandler
level: INFO
formatter: standard
filename: info.log
mode: 'w'
maxBytes: 10485760 # 10MB
backupCount: 20
encoding: utf8
root:
level: NOTSET
handlers: [console]
propogate: no
loggers:
my_module:
level: INFO
handlers: [info_file_handler]
propogate: no
I have the same problem. How to rotate file at each logging starts with specified file size rotation.
From logging.handlers :
If maxBytes is zero, rollover never occurs.
"""
# If rotation/rollover is wanted, it doesn't make sense to use another
# mode. If for example 'w' were specified, then if there were multiple
# runs of the calling application, the logs from previous runs would be
# lost if the 'w' is respected, because the log file would be truncated
# on each run.
if maxBytes > 0:
mode = 'a'
IMO. It seems another mode than 'a' is useless, so 'mode' could be removed.(?)
I am trying to create events for Google Calendar but am getting a FileNotFoundError for 'client_secret.json'.
from __future__ import print_function
from apiclient.discovery import build
from httplib2 import Http
from oauth2client import file, client, tools
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
SCOPES = 'https://www.googleapis.com/auth/calendar'
store = file.Storage('storage.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('client_secret.json', SCOPES)
creds = tools.run_flow(flow, store, flags) \
if flags else tools.run(flow, store)
CAL = build('calendar', 'v3', http=creds.authorize(Http()))
GMT_OFF = '-07:00' # PDT/MST/GMT-7
EVENT = {
'summary': 'Dinner with friends',
'start': {'dateTime': '2015-09-15T19:00:00%s' % GMT_OFF},
'end': {'dateTime': '2015-09-15T22:00:00%s' % GMT_OFF},
'attendees': [
{'email': 'friend1#example.com'},
{'email': 'friend2#example.com'},
],
}
e = CAL.events().insert(calendarId='primary',
sendNotifications=True, body=EVENT).execute()
print('''*** %r event added:
Start: %s
End: %s''' % (e['summary'].encode('utf-8'),
e['start']['dateTime'], e['end']['dateTime']))
Here is the error:
Warning (from warnings module):
File "C:\Users\bakat\AppData\Local\Programs\Python\Python35-32\lib\site-packages\oauth2client_helpers.py", line 255
warnings.warn(_MISSING_FILE_MESSAGE.format(filename))
UserWarning: Cannot access storage.json: No such file or directory
Traceback (most recent call last):
File "C:\Users\bakat\AppData\Local\Programs\Python\Python35-32\lib\site-packages\oauth2client\clientsecrets.py", line 121, in _loadfile
with open(filename, 'r') as fp:
FileNotFoundError: [Errno 2] No such file or directory: 'client_secret.json'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:/Users/bakat/AppData/Local/Programs/Python/Python35-32/Diet Buddy/Google_Calendar.py", line 16, in
flow = client.flow_from_clientsecrets('client_secret.json', SCOPES)
File "C:\Users\bakat\AppData\Local\Programs\Python\Python35-32\lib\site-packages\oauth2client_helpers.py", line 133, in positional_wrapper
return wrapped(*args, **kwargs)
File "C:\Users\bakat\AppData\Local\Programs\Python\Python35-32\lib\site-packages\oauth2client\client.py", line 2125, in flow_from_clientsecrets
cache=cache)
File "C:\Users\bakat\AppData\Local\Programs\Python\Python35-32\lib\site-packages\oauth2client\clientsecrets.py", line 165, in loadfile
return _loadfile(filename)
File "C:\Users\bakat\AppData\Local\Programs\Python\Python35-32\lib\site-packages\oauth2client\clientsecrets.py", line 125, in _loadfile
exc.strerror, exc.errno)
oauth2client.clientsecrets.InvalidClientSecretsError: ('Error opening file', 'client_secret.json', 'No such file or directory', 2)
I found the problem - you must go to https://developers.google.com/gmail/api/quickstart/python and follow the instructions to download your client_secrets.json.