chore(celery): Cleanup config and async query specifications (#25314)

This commit is contained in:
John Bodley 2023-10-31 10:17:51 -07:00 committed by GitHub
parent 2177a50c7a
commit 1e37f0b417
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 62 additions and 88 deletions

View File

@ -104,30 +104,27 @@ REDIS_HOST = "superset_cache"
REDIS_PORT = "6379"
class CeleryConfig:
broker_url = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT)
imports = ('superset.sql_lab', "superset.tasks", "superset.tasks.thumbnails", )
result_backend = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT)
broker_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/0"
imports = (
"superset.sql_lab",
"superset.tasks.scheduler",
)
result_backend = f"redis://{REDIS_HOST}:{REDIS_PORT}/0"
worker_prefetch_multiplier = 10
task_acks_late = True
task_annotations = {
'sql_lab.get_sql_results': {
'rate_limit': '100/s',
},
'email_reports.send': {
'rate_limit': '1/s',
'time_limit': 600,
'soft_time_limit': 600,
'ignore_result': True,
"sql_lab.get_sql_results": {
"rate_limit": "100/s",
},
}
beat_schedule = {
'reports.scheduler': {
'task': 'reports.scheduler',
'schedule': crontab(minute='*', hour='*'),
"reports.scheduler": {
"task": "reports.scheduler",
"schedule": crontab(minute="*", hour="*"),
},
'reports.prune_log': {
'task': 'reports.prune_log',
'schedule': crontab(minute=0, hour=0),
"reports.prune_log": {
"task": "reports.prune_log",
"schedule": crontab(minute=0, hour=0),
},
}
CELERY_CONFIG = CeleryConfig

View File

@ -23,30 +23,17 @@ and web server processes should have the same configuration.
```python
class CeleryConfig(object):
broker_url = 'redis://localhost:6379/0'
broker_url = "redis://localhost:6379/0"
imports = (
'superset.sql_lab',
'superset.tasks',
"superset.sql_lab",
"superset.tasks.scheduler",
)
result_backend = 'redis://localhost:6379/0'
worker_log_level = 'DEBUG'
result_backend = "redis://localhost:6379/0"
worker_prefetch_multiplier = 10
task_acks_late = True
task_annotations = {
'sql_lab.get_sql_results': {
'rate_limit': '100/s',
},
'email_reports.send': {
'rate_limit': '1/s',
'time_limit': 120,
'soft_time_limit': 150,
'ignore_result': True,
},
}
beat_schedule = {
'email_reports.schedule_hourly': {
'task': 'email_reports.schedule_hourly',
'schedule': crontab(minute=1, hour='*'),
"sql_lab.get_sql_results": {
"rate_limit": "100/s",
},
}

View File

@ -114,7 +114,10 @@ from s3cache.s3cache import S3Cache
class CeleryConfig(object):
broker_url = "redis://localhost:6379/0"
imports = ("superset.sql_lab", "superset.tasks", "superset.tasks.thumbnails")
imports = (
"superset.sql_lab",
"superset.tasks.thumbnails",
)
result_backend = "redis://localhost:6379/0"
worker_prefetch_multiplier = 10
task_acks_late = True

View File

@ -367,37 +367,35 @@ configOverrides:
celery_conf: |
from celery.schedules import crontab
class CeleryConfig(object):
class CeleryConfig:
broker_url = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0"
imports = ('superset.sql_lab', "superset.tasks", "superset.tasks.thumbnails", )
imports = (
"superset.sql_lab",
"superset.tasks.cache",
"superset.tasks.scheduler",
)
result_backend = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0"
task_annotations = {
'sql_lab.get_sql_results': {
'rate_limit': '100/s',
},
'email_reports.send': {
'rate_limit': '1/s',
'time_limit': 600,
'soft_time_limit': 600,
'ignore_result': True,
"sql_lab.get_sql_results": {
"rate_limit": "100/s",
},
}
beat_schedule = {
'reports.scheduler': {
'task': 'reports.scheduler',
'schedule': crontab(minute='*', hour='*'),
"reports.scheduler": {
"task": "reports.scheduler",
"schedule": crontab(minute="*", hour="*"),
},
'reports.prune_log': {
'task': 'reports.prune_log',
"reports.prune_log": {
"task": "reports.prune_log",
'schedule': crontab(minute=0, hour=0),
},
'cache-warmup-hourly': {
'task': 'cache-warmup',
'schedule': crontab(minute='*/30', hour='*'),
'kwargs': {
'strategy_name': 'top_n_dashboards',
'top_n': 10,
'since': '7 days ago',
"task": "cache-warmup",
"schedule": crontab(minute="*/30", hour="*"),
"kwargs": {
"strategy_name": "top_n_dashboards",
"top_n": 10,
"since": "7 days ago",
},
}
}

View File

@ -29,7 +29,7 @@ maintainers:
- name: craig-rueda
email: craig@craigrueda.com
url: https://github.com/craig-rueda
version: 0.10.13
version: 0.10.14
dependencies:
- name: postgresql
version: 12.1.6

View File

@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs
# superset
![Version: 0.10.13](https://img.shields.io/badge/Version-0.10.13-informational?style=flat-square)
![Version: 0.10.14](https://img.shields.io/badge/Version-0.10.14-informational?style=flat-square)
Apache Superset is a modern, enterprise-ready business intelligence web application

View File

@ -84,15 +84,14 @@ SQLALCHEMY_DATABASE_URI = f"postgresql+psycopg2://{env('DB_USER')}:{env('DB_PASS
SQLALCHEMY_TRACK_MODIFICATIONS = True
SECRET_KEY = env('SECRET_KEY', 'thisISaSECRET_1234')
class CeleryConfig(object):
CELERY_IMPORTS = ('superset.sql_lab', )
CELERY_ANNOTATIONS = {'tasks.add': {'rate_limit': '10/s'}}
class CeleryConfig:
imports = ("superset.sql_lab", )
{{- if .Values.supersetNode.connections.redis_password }}
BROKER_URL = f"redis://:{env('REDIS_PASSWORD')}@{env('REDIS_HOST')}:{env('REDIS_PORT')}/0"
CELERY_RESULT_BACKEND = f"redis://:{env('REDIS_PASSWORD')}@{env('REDIS_HOST')}:{env('REDIS_PORT')}/0"
broker_url = f"redis://:{env('REDIS_PASSWORD')}@{env('REDIS_HOST')}:{env('REDIS_PORT')}/0"
result_backend = f"redis://:{env('REDIS_PASSWORD')}@{env('REDIS_HOST')}:{env('REDIS_PORT')}/0"
{{- else }}
BROKER_URL = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0"
CELERY_RESULT_BACKEND = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0"
broker_url = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0"
result_backend = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0"
{{- end }}
CELERY_CONFIG = CeleryConfig

View File

@ -940,24 +940,16 @@ CELERY_BEAT_SCHEDULER_EXPIRES = timedelta(weeks=1)
class CeleryConfig: # pylint: disable=too-few-public-methods
broker_url = "sqla+sqlite:///celerydb.sqlite"
imports = ("superset.sql_lab",)
imports = ("superset.sql_lab", "superset.tasks.scheduler")
result_backend = "db+sqlite:///celery_results.sqlite"
worker_prefetch_multiplier = 1
task_acks_late = False
task_annotations = {
"sql_lab.get_sql_results": {"rate_limit": "100/s"},
"email_reports.send": {
"rate_limit": "1/s",
"time_limit": int(timedelta(seconds=120).total_seconds()),
"soft_time_limit": int(timedelta(seconds=150).total_seconds()),
"ignore_result": True,
"sql_lab.get_sql_results": {
"rate_limit": "100/s",
},
}
beat_schedule = {
"email_reports.schedule_hourly": {
"task": "email_reports.schedule_hourly",
"schedule": crontab(minute=1, hour="*"),
},
"reports.scheduler": {
"task": "reports.scheduler",
"schedule": crontab(minute="*", hour="*"),
@ -1558,7 +1550,7 @@ GLOBAL_ASYNC_QUERIES_JWT_COOKIE_SAMESITE: None | (
) = None
GLOBAL_ASYNC_QUERIES_JWT_COOKIE_DOMAIN = None
GLOBAL_ASYNC_QUERIES_JWT_SECRET = "test-secret-change-me"
GLOBAL_ASYNC_QUERIES_TRANSPORT = "polling"
GLOBAL_ASYNC_QUERIES_TRANSPORT: Literal["polling", "ws"] = "polling"
GLOBAL_ASYNC_QUERIES_POLLING_DELAY = int(
timedelta(milliseconds=500).total_seconds() * 1000
)

View File

@ -133,11 +133,10 @@ ALERT_REPORTS_QUERY_EXECUTION_MAX_TRIES = 3
class CeleryConfig:
BROKER_URL = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}"
CELERY_IMPORTS = ("superset.sql_lab",)
CELERY_RESULT_BACKEND = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_RESULTS_DB}"
CELERY_ANNOTATIONS = {"sql_lab.add": {"rate_limit": "10/s"}}
CONCURRENCY = 1
broker_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}"
imports = ("superset.sql_lab",)
result_backend = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_RESULTS_DB}"
concurrency = 1
CELERY_CONFIG = CeleryConfig

View File

@ -62,10 +62,9 @@ REDIS_RESULTS_DB = os.environ.get("REDIS_RESULTS_DB", 3)
class CeleryConfig:
BROKER_URL = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}"
CELERY_IMPORTS = ("superset.sql_lab", "superset.tasks.thumbnails")
CELERY_ANNOTATIONS = {"sql_lab.add": {"rate_limit": "10/s"}}
CONCURRENCY = 1
broker_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}"
imports = ("superset.sql_lab", "superset.tasks.thumbnails")
concurrency = 1
CELERY_CONFIG = CeleryConfig