Skip to content

Instantly share code, notes, and snippets.

View supreme-core's full-sized avatar
:octocat:

I am me supreme-core

:octocat:
View GitHub Profile
import logging
from celery.app.defaults import DEFAULT_TASK_LOG_FMT, DEFAULT_PROCESS_LOG_FMT
class CeleryTaskFilter(logging.Filter):
def filter(self, record):
return record.processName.find('Worker') != -1
@supreme-core
supreme-core / awesome_task.py
Created March 6, 2022 22:54 — forked from winhamwr/awesome_task.py
Celery base task that adds some niceties for longish-running or singleton jobs.
"""
Celery base task aimed at longish-running jobs that return a result.
``AwesomeResultTask`` adds thundering herd avoidance, result caching, progress
reporting, error fallback and JSON encoding of results.
"""
from __future__ import division
import logging
import simplejson
@supreme-core
supreme-core / schedulers.py
Created March 6, 2022 22:53 — forked from brolewis/schedulers.py
A distributed job scheduler for Celery using ZooKeeper (via kazoo) to manage the locking.
'''Zookeeper-based Scheduler'''
## Standard Library
import cPickle # Store dictionary in ZooKeeper
import datetime # Time delta
import socket # Hostname
## Third Party
import celery # Current app
import celery.beat # Scheduler
import celery.utils.log # Get logger
import kazoo.client # ZooKeeper Client Library
@supreme-core
supreme-core / celery_tasks_error_handling.py
Created March 6, 2022 22:53 — forked from darklow/celery_tasks_error_handling.py
Celery tasks error handling example
from celery import Task
from celery.task import task
from my_app.models import FailedTask
from django.db import models
@task(base=LogErrorsTask)
def some task():
return result
class LogErrorsTask(Task):
@supreme-core
supreme-core / celery_task_monitor.py
Created March 6, 2022 22:53 — forked from linar-jether/celery_task_monitor.py
Celery task monitor, logs task state to MongoDB
import pickle
import threading
from Queue import Queue
import time
from bson import InvalidDocument
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
@supreme-core
supreme-core / dask_celery_scheduler.py
Created March 6, 2022 22:51 — forked from linar-jether/dask_celery_scheduler.py
A dask distributed scheduler based on on Celery tasks - Allows reusing an existing celery cluster for ad-hoc computation
from __future__ import absolute_import, division, print_function
import multiprocessing
import pickle
from multiprocessing.pool import ThreadPool
from celery import shared_task
from dask.local import get_async # TODO: get better get
from dask.context import _globals
from dask.optimize import fuse, cull
@supreme-core
supreme-core / track_celery.py
Created March 6, 2022 22:46 — forked from hanneshapke/track_celery.py
Decorator for Celery functions to measure the execution time and memory usage
import time
from memory_profiler import memory_usage
import logging
celery_logger = logging.getLogger('celery')
def track_celery(method):
"""
@supreme-core
supreme-core / celeryconfig.py
Created March 6, 2022 22:45 — forked from kenshinx/celeryconfig.py
Celery config file in our project. Hope can be reference to some other guys
import os
import sys
from kombu import Exchange, Queue
from datetime import timedelta
from config import cfg
import log
@supreme-core
supreme-core / celerylinks.txt
Last active March 7, 2022 05:58
celery links
# This work is licensed under the GNU Public License (GPL).
# To view a copy of this license, visit http://www.gnu.org/copyleft/gpl.html
# For more information visit this blog post http://mpcabd.igeex.biz/python-celery-asynchronous-task-decorator/
# Written by Abd Allah Diab (mpcabd)
# Email: mpcabd ^at^ gmail ^dot^ com
# Website: http://mpcabd.igeex.biz
from django.utils.decorators import available_attrs