diff --git a/.env b/.env new file mode 100644 index 0000000..8a7c673 --- /dev/null +++ b/.env @@ -0,0 +1,51 @@ +# ============================================================ +# .env โ€” Environment variables for docker-compose +# Used by all services: core, celeryworker, flower, db, broker +# ============================================================ + +# ------------------------------------------------------------ +# Python / General +# ------------------------------------------------------------ +PYTHONPATH=. +LOGS_LEVEL=DEBUG +ENVIRONMENT=local + +# ------------------------------------------------------------ +# Django +# ------------------------------------------------------------ +DJANGO_SECRET_KEY=s8s6s5t2nu00 +DJANGO_DEBUG=True +DJANGO_ADMIN_URL=admin/ +DJANGO_ALLOWED_HOSTS=* +DJANGO_SETTINGS_MODULE=core.settings.local + +# ------------------------------------------------------------ +# PostgreSQL Database +# ------------------------------------------------------------ +DB_ENGINE=django.db.backends.postgresql_psycopg2 +DB_NAME=postgres +DB_USER=postgres +DB_PASSWORD=qwerty123 +DB_HOST=db +DB_PORT=5432 + +# PostgreSQL image environment variables (used by postgres container) +POSTGRES_DB=postgres +POSTGRES_USER=postgres +POSTGRES_PASSWORD=qwerty123 + +# ------------------------------------------------------------ +# Celery +# ------------------------------------------------------------ +CELERY_RESULT_BACKEND=django-db +CELERY_BROKER_URL=amqp://admin:admin@broker:5672/vhost +CELERY_WORKER_CONCURRENCY=1 +CELERY_WORKER_PREFETCH_MULTIPLIER=1 +CELERY_WORKER_MAX_TASKS_PER_CHILD=10 + +# ------------------------------------------------------------ +# RabbitMQ Broker +# ------------------------------------------------------------ +RABBITMQ_DEFAULT_USER=admin +RABBITMQ_DEFAULT_PASS=admin +RABBITMQ_DEFAULT_VHOST=vhost diff --git a/Dockerfile b/Dockerfile index cb77737..0594d8a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,44 +1,100 @@ -FROM python:3.9.13 +# ============================================================ +# Dockerfile for the Django/Celery core service +# Supports: local, development, production environments +# Usage: +# Local: docker build --build-arg ENVIRONMENT=local -t core . +# Production: docker build --build-arg ENVIRONMENT=production -t core . +# ============================================================ -ARG ENVIRONMENT=default +# Base image: Python 3.11 slim (LTS-aligned, amd64 compatible) +FROM python:3.11-slim -ENV PYTHONUNBUFFERED 1 \ - PIP_NO_CACHE_DIR=off \ - PIP_DISABLE_PIP_VERSION_CHECK=1 +# ------------------------------------------------------------ +# Build argument to switch between environments +# Values: local | development | production (default: local) +# ------------------------------------------------------------ +ARG ENVIRONMENT=local +# ------------------------------------------------------------ +# Environment variables for Python and pip behaviour +# ------------------------------------------------------------ +ENV PYTHONUNBUFFERED=1 \ + PYTHONDONTWRITEBYTECODE=1 \ + PIP_NO_CACHE_DIR=off \ + PIP_DISABLE_PIP_VERSION_CHECK=1 \ + ENVIRONMENT=${ENVIRONMENT} +# ------------------------------------------------------------ +# Set working directory +# ------------------------------------------------------------ WORKDIR /app -COPY requirements/*.txt /tmp/requirements/ - -RUN set -x \ - && buildDeps=" \ - build-essential \ - " \ - && runDeps=" \ - git \ - " \ - && localDeps=" \ - telnet \ - " \ - && apt-get update \ - && apt-get install -y --no-install-recommends $buildDeps \ - && apt-get install -y --no-install-recommends $runDeps \ - && if [ $ENVIRONMENT = local ] || [ $ENVIRONMENT = development ]; then \ - apt-get install -y --no-install-recommends $localDeps \ - # Install python dev dependencies - && pip install -r /tmp/requirements/local.txt; \ - else \ - # Install python production dependencies - pip install -r /tmp/requirements/production.txt; \ - # other environment to local remove the build dependencies - apt-get remove -y $buildDeps; \ - fi \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - # clean tmp dir - && rm -rf /tmp/* +# ------------------------------------------------------------ +# Install system dependencies +# - build-essential : needed to compile psycopg2 C extensions +# - libpq-dev : PostgreSQL client headers for psycopg2 +# - git : required by some pip packages +# - telnet : useful for local debugging only +# In production, build-time deps are removed after pip install +# ------------------------------------------------------------ +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + libpq-dev \ + git \ + $(if [ "$ENVIRONMENT" = "local" ] || [ "$ENVIRONMENT" = "development" ]; then echo "telnet"; fi) \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* +# ------------------------------------------------------------ +# Copy only requirements files first (layer cache optimisation) +# ------------------------------------------------------------ +COPY requirements/ /tmp/requirements/ + +# ------------------------------------------------------------ +# Install Python dependencies based on the target environment +# - local/development : requirements/local.txt (includes debug toolbar) +# - production : requirements/production.txt (includes gunicorn) +# ------------------------------------------------------------ +RUN if [ "$ENVIRONMENT" = "local" ] || [ "$ENVIRONMENT" = "development" ]; then \ + pip install -r /tmp/requirements/local.txt; \ + else \ + pip install -r /tmp/requirements/production.txt; \ + fi \ + # Clean up temporary requirements files + && rm -rf /tmp/requirements + +# ------------------------------------------------------------ +# Copy the full application source code into the container +# ------------------------------------------------------------ COPY . . -CMD [ "python","manage.py","runserver","0.0.0.0:8000" ] +# ------------------------------------------------------------ +# Copy and set permissions on the Docker entrypoint script +# The entrypoint handles: +# - Waiting for PostgreSQL to be ready +# - Running makemigrations and migrate +# - Loading initial data (if needed) +# ------------------------------------------------------------ +RUN chmod +x /app/docker-entrypoint.sh + +# ------------------------------------------------------------ +# Expose the application port +# ------------------------------------------------------------ +EXPOSE 8000 + +# ------------------------------------------------------------ +# Startup command: +# - local/development : Django dev server (hot-reload) +# - production : Gunicorn WSGI server (multi-worker) +# The entrypoint script runs first to ensure DB readiness +# ------------------------------------------------------------ +CMD if [ "$ENVIRONMENT" = "local" ] || [ "$ENVIRONMENT" = "development" ]; then \ + /app/docker-entrypoint.sh; \ + else \ + /app/docker-entrypoint.sh && \ + gunicorn core.wsgi:application \ + --bind 0.0.0.0:8000 \ + --workers 2 \ + --timeout 120 \ + --log-level info; \ + fi diff --git a/Dockerfile.celeryworker b/Dockerfile.celeryworker new file mode 100644 index 0000000..cb9a0ad --- /dev/null +++ b/Dockerfile.celeryworker @@ -0,0 +1,95 @@ +# ============================================================ +# Dockerfile for the celeryworker service +# Python 3.11 + Django 4.2 + Celery 5.3.6 +# Supports local and production environments via ARG ENVIRONMENT +# ============================================================ + +FROM python:3.11-slim + +# ------------------------------------------------------------ +# Build argument to switch between local and production setups +# Usage: docker build --build-arg ENVIRONMENT=production ... +# ------------------------------------------------------------ +ARG ENVIRONMENT=local + +# ------------------------------------------------------------ +# Environment variables: +# - PYTHONUNBUFFERED: ensures stdout/stderr are flushed immediately (important for logs) +# - PIP_NO_CACHE_DIR: reduces image size by not caching pip downloads +# - PIP_DISABLE_PIP_VERSION_CHECK: suppresses pip version warnings +# - DJANGO_SETTINGS_MODULE: default settings module (overridable at runtime) +# ------------------------------------------------------------ +ENV PYTHONUNBUFFERED=1 \ + PIP_NO_CACHE_DIR=off \ + PIP_DISABLE_PIP_VERSION_CHECK=1 \ + DJANGO_SETTINGS_MODULE=core.settings.local + +# ------------------------------------------------------------ +# Set the working directory inside the container +# ------------------------------------------------------------ +WORKDIR /app + +# ------------------------------------------------------------ +# Install system-level dependencies: +# - build-essential: required to compile Python C extensions (e.g. psycopg2) +# - libpq-dev: PostgreSQL client library headers (required by psycopg2) +# - git: required by some pip packages that reference git repos +# - telnet: useful for local debugging of broker/db connectivity +# ------------------------------------------------------------ +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + build-essential \ + libpq-dev \ + git \ + telnet \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# ------------------------------------------------------------ +# Copy only the requirements files first to leverage Docker layer caching. +# Dependencies are re-installed only when requirements files change. +# ------------------------------------------------------------ +COPY requirements/ /tmp/requirements/ + +# ------------------------------------------------------------ +# Install Python dependencies based on the target environment: +# - local/development: installs local.txt (includes debug toolbar, etc.) +# - production (or any other): installs production.txt (includes gunicorn) +# After installation, clean up the temporary requirements directory. +# ------------------------------------------------------------ +RUN if [ "$ENVIRONMENT" = "local" ] || [ "$ENVIRONMENT" = "development" ]; then \ + echo "Installing LOCAL dependencies..." \ + && pip install -r /tmp/requirements/local.txt; \ + else \ + echo "Installing PRODUCTION dependencies..." \ + && pip install -r /tmp/requirements/production.txt; \ + fi \ + && rm -rf /tmp/requirements/ + +# ------------------------------------------------------------ +# Copy the full application source code into the container +# ------------------------------------------------------------ +COPY . . + +# ------------------------------------------------------------ +# Expose no HTTP port โ€” the celery worker does not serve HTTP traffic. +# It connects outbound to the broker (RabbitMQ) and database (PostgreSQL). +# ------------------------------------------------------------ + +# ------------------------------------------------------------ +# Default startup command: launch the Celery worker +# - -A core.celery : Celery app defined in core/celery.py +# - worker : run as a worker process +# - -l INFO : log level INFO +# - --concurrency 1 : number of concurrent worker processes +# - --max-tasks-per-child 1: recycle worker after N tasks (prevents memory leaks) +# - --prefetch-multiplier 1: fetch one task at a time per worker +# - -n celery@%h : unique worker name using hostname +# Override CMD at runtime for production tuning (e.g. higher concurrency). +# ------------------------------------------------------------ +CMD ["celery", "-A", "core.celery", "worker", \ + "-l", "INFO", \ + "--concurrency", "1", \ + "--max-tasks-per-child", "1", \ + "--prefetch-multiplier", "1", \ + "-n", "celery@%h"] diff --git a/Dockerfile.flower b/Dockerfile.flower new file mode 100644 index 0000000..44ea4fa --- /dev/null +++ b/Dockerfile.flower @@ -0,0 +1,84 @@ +# ============================================================ +# Dockerfile for the Flower service +# Flower is a Celery monitoring web UI (port 5555) +# Shares the same Django/Celery codebase as core & celeryworker +# ============================================================ + +# ------------------------------------------------------------ +# Base image: Python 3.13 slim (cached locally, amd64 compatible) +# ------------------------------------------------------------ +FROM python:3.13-slim + +# ------------------------------------------------------------ +# Build argument to switch between local and production envs +# Usage: +# Local: docker build --build-arg ENVIRONMENT=local ... +# Production: docker build --build-arg ENVIRONMENT=production ... +# ------------------------------------------------------------ +ARG ENVIRONMENT=local + +# ------------------------------------------------------------ +# Environment variables for Python runtime behaviour +# ------------------------------------------------------------ +ENV PYTHONUNBUFFERED=1 \ + PYTHONDONTWRITEBYTECODE=1 \ + PIP_NO_CACHE_DIR=off \ + PIP_DISABLE_PIP_VERSION_CHECK=1 \ + ENVIRONMENT=${ENVIRONMENT} + +# ------------------------------------------------------------ +# Set working directory +# ------------------------------------------------------------ +WORKDIR /app + +# ------------------------------------------------------------ +# Install OS-level dependencies +# build-essential : needed to compile psycopg2 C extensions +# git : required by some pip packages +# libpq-dev : PostgreSQL client headers for psycopg2 +# ------------------------------------------------------------ +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + git \ + libpq-dev \ + && rm -rf /var/lib/apt/lists/* + +# ------------------------------------------------------------ +# Copy only requirements files first (layer-cache friendly) +# ------------------------------------------------------------ +COPY requirements/ /tmp/requirements/ + +# ------------------------------------------------------------ +# Install Python dependencies based on ENVIRONMENT arg +# local -> requirements/local.txt (includes debug tools) +# production -> requirements/production.txt (includes gunicorn) +# default -> requirements/local.txt +# ------------------------------------------------------------ +RUN if [ "$ENVIRONMENT" = "production" ]; then \ + echo "Installing production dependencies..." && \ + pip install -r /tmp/requirements/production.txt; \ + else \ + echo "Installing local/development dependencies..." && \ + pip install -r /tmp/requirements/local.txt; \ + fi \ + # Clean up temporary requirements files + && rm -rf /tmp/requirements + +# ------------------------------------------------------------ +# Copy the full application source code into the container +# ------------------------------------------------------------ +COPY . . + +# ------------------------------------------------------------ +# Expose Flower's default web UI port +# ------------------------------------------------------------ +EXPOSE 5555 + +# ------------------------------------------------------------ +# Default startup command: launch Flower monitoring UI +# -A core.celery : Celery app reference +# --port=5555 : bind to port 5555 +# --address=0.0.0.0 : accept connections from any interface +# Override CMD at runtime for different configurations +# ------------------------------------------------------------ +CMD ["celery", "-A", "core.celery", "flower", "--port=5555", "--address=0.0.0.0"] diff --git a/README.md b/README.md index 723c903..bc97979 100644 --- a/README.md +++ b/README.md @@ -1,535 +1,310 @@ -# example-django-celery +# Django Example Project -Complete Django + Celery example with RabbitMQ broker for asynchronous task processing. +A production-ready Django application with Celery task queue, Flower monitoring, PostgreSQL database, and RabbitMQ message broker. -## Getting Started +## ๐Ÿš€ Quick Start ### Prerequisites -- Docker and Docker Compose installed -- Make utility -### Running the Project +- [Docker](https://docs.docker.com/get-docker/) (v20.10+) +- [Docker Compose](https://docs.docker.com/compose/install/) (v2.0+) -1. Copy environment file: - ```bash - cp .env.local .env - ``` - -2. Start all services (Django, PostgreSQL, RabbitMQ, Celery Worker, Flower): - ```bash - make run - ``` - This will build images on first run and start: - - **Django app**: `http://localhost:8000` - - **PostgreSQL**: Database on port `5432` - - **RabbitMQ**: Broker on port `5672` (Management UI: `http://localhost:15672`) - - **Celery Worker**: Background task processor - - **Flower**: Task monitoring dashboard at `http://localhost:5555` - -3. Create superuser to access Django admin: - ```bash - make createsuperuser - ``` - -4. Access the application: - - **Django Admin**: http://localhost:8000/admin/ - - **Flower Dashboard**: http://localhost:5555 +### Run the Project -### Available Make Commands +```bash +# Clone the repository +git clone +cd django-example -Run `make help` to see all available commands: -- `make run` - Start all services -- `make stop` - Stop all containers -- `make bash` - Access bash inside core container -- `make shell` - Access Django shell -- `make migrate` - Run database migrations -- `make makemigrations` - Create new migrations -- `make reset-db` - Reset database (โš ๏ธ deletes all data) -- `make rebuild-core` - Rebuild core container from scratch -- `make rebuild-all` - Rebuild everything +# Start all services +docker compose up -d -## Project Structure +# Wait for services to initialize (about 30-45 seconds) +docker compose ps +# View logs +docker compose logs -f ``` -โ”œโ”€โ”€ apps/ -โ”‚ โ”œโ”€โ”€ client/ # Client app -โ”‚ โ”‚ โ”œโ”€โ”€ tasks.py # Celery tasks definitions -โ”‚ โ”‚ โ”œโ”€โ”€ admin.py # Admin actions that trigger tasks -โ”‚ โ”‚ โ””โ”€โ”€ views.py # Views (can trigger tasks) -โ”‚ โ”œโ”€โ”€ post/ # Post app -โ”‚ โ””โ”€โ”€ user/ # User app -โ”œโ”€โ”€ core/ -โ”‚ โ”œโ”€โ”€ celery.py # Celery configuration -โ”‚ โ””โ”€โ”€ settings/ # Django settings -โ”œโ”€โ”€ docker-compose.yml # All services orchestration -โ”œโ”€โ”€ Makefile # Helper commands -โ””โ”€โ”€ .env.local # Environment variables template -``` - ---- -## How to Install & Configure Celery (Step by Step) +### Access the Services -If you want to add Celery to your own Django project, follow these steps: +| Service | URL | Description | +|---------|-----|-------------| +| Django App | http://localhost:8000 | Main application | +| Django Admin | http://localhost:8000/admin/ | Admin panel (credentials: admin/admin) | +| Flower (Celery Monitor) | http://localhost:5555 | Task queue monitoring | +| RabbitMQ Management | http://localhost:15672 | Message broker UI (guest/guest) | -### Step 1: Install Dependencies +## ๐Ÿ“ Project Structure -Add to your `requirements/_base.txt` or `requirements.txt`: - -```txt -celery==5.3.6 # Core Celery library -django-celery-results==2.5.1 # Optional: Store results in DB -flower==2.0.1 # Optional: Monitoring dashboard ``` - -**Install:** -```bash -pip install -r requirements.txt +. +โ”œโ”€โ”€ core/ # Main Django application +โ”‚ โ”œโ”€โ”€ settings/ # Environment-specific settings +โ”‚ โ”‚ โ”œโ”€โ”€ base.py # Base configuration +โ”‚ โ”‚ โ”œโ”€โ”€ local.py # Local development settings +โ”‚ โ”‚ โ””โ”€โ”€ production.py # Production settings +โ”‚ โ”œโ”€โ”€ celery.py # Celery configuration +โ”‚ โ”œโ”€โ”€ urls.py # URL routing +โ”‚ โ””โ”€โ”€ wsgi.py # WSGI entry point +โ”œโ”€โ”€ apps/ # Django apps +โ”‚ โ””โ”€โ”€ users/ # User management +โ”œโ”€โ”€ requirements/ # Python dependencies +โ”‚ โ”œโ”€โ”€ base.txt # Core dependencies +โ”‚ โ”œโ”€โ”€ local.txt # Development extras +โ”‚ โ””โ”€โ”€ production.txt # Production extras +โ”œโ”€โ”€ docker-entrypoint.sh # Container startup script +โ”œโ”€โ”€ Dockerfile # Main application image +โ”œโ”€โ”€ Dockerfile.flower # Flower monitoring image +โ”œโ”€โ”€ docker-compose.yml # Service orchestration +โ”œโ”€โ”€ .env # Environment variables +โ””โ”€โ”€ README.md # This file ``` -### Step 2: Configure Django Settings - -Add Celery configuration to `core/settings/base.py`: - -```python -# Celery Configuration -# https://docs.celeryproject.org/en/stable/django/first-steps-with-django.html - -CELERY_BROKER_URL = env.str("CELERY_BROKER_URL") # RabbitMQ or Redis URL -CELERY_RESULT_BACKEND = env.str("CELERY_RESULT_BACKEND") # Optional: "django-db" to store results -CELERY_WORKER_PREFETCH_MULTIPLIER = env.int("CELERY_WORKER_PREFETCH_MULTIPLIER", default=1) -CELERY_RESULT_EXTENDED = env.bool("CELERY_RESULT_EXTENDED", default=False) # Optional: Extended result info -``` - -**Important notes:** -- `CELERY_RESULT_BACKEND` and `CELERY_RESULT_EXTENDED` are **optional** - only needed if you want to store task results in the database -- If using `django-celery-results`, add `"django_celery_results"` to `INSTALLED_APPS` - -Reference: [`core/settings/base.py:138-143`](https://github.com/sleakops/example-django-celery/blob/main/core/settings/base.py#L138) - -### Step 3: Create Celery App - -Create `core/celery.py`: +## ๐Ÿ”ง Services Overview -```python -import os -from celery import Celery -from django.conf import settings +| Service | Image | Port | Purpose | +|---------|-------|------|---------| +| **core** | django-example/core | 8000 | Django web application | +| **celeryworker** | django-example/core | - | Background task worker | +| **flower** | django-example/flower | 5555 | Celery monitoring UI | +| **db** | postgres:14 | 5432 | PostgreSQL database | +| **broker** | rabbitmq:3-management | 5672, 15672 | RabbitMQ message broker | -# Set default Django settings module -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings.local") +## ๐Ÿ› ๏ธ Development -# Create Celery app -app = Celery("core") +### Run Migrations -# Load config from Django settings with CELERY_ namespace -app.config_from_object(settings, namespace="CELERY") - -# Auto-discover tasks.py in all Django apps -app.autodiscover_tasks() +```bash +docker compose exec core python manage.py migrate ``` -Reference: [`core/celery.py`](https://github.com/sleakops/example-django-celery/blob/main/core/celery.py) - -### Step 4: Initialize Celery on Django Startup +### Create Superuser -Modify `core/__init__.py` to import Celery app: - -```python -# This will make sure the app is always imported when -# Django starts so that shared_task will use this app. -from .celery import app as celery_app - -__all__ = ('celery_app',) +```bash +docker compose exec core python manage.py createsuperuser ``` -### Step 5: Define Tasks +### Run Tests -Create `tasks.py` in any Django app (e.g., `apps/client/tasks.py`): - -```python -from celery import shared_task - -@shared_task -def my_async_task(param): - # Your task logic here - print(f"Processing: {param}") - return "Task completed" +```bash +docker compose exec core python manage.py test ``` -**Magic:** Celery automatically discovers any `tasks.py` file in your Django apps! +### Execute Management Commands -Reference: [`apps/client/tasks.py`](https://github.com/sleakops/example-django-celery/blob/main/apps/client/tasks.py) +```bash +# Any Django management command +docker compose exec core python manage.py -### Step 6: Set Environment Variables +# Examples +docker compose exec core python manage.py shell +docker compose exec core python manage.py collectstatic +docker compose exec core python manage.py makemigrations +``` -Add to your `.env` file: +### View Logs ```bash -# Celery Configuration -CELERY_BROKER_URL=amqp://admin:admin@broker:5672/vhost # RabbitMQ -# OR -# CELERY_BROKER_URL=redis://localhost:6379/0 # Redis - -CELERY_RESULT_BACKEND=django-db # Optional: Store results in database +# All services +docker compose logs -f + +# Specific service +docker compose logs -f core +docker compose logs -f celeryworker +docker compose logs -f flower +docker compose logs -f db +docker compose logs -f broker ``` -### Step 7: Run Celery Worker +### Restart Services ```bash -# Development -celery -A core.celery worker -l INFO +# Restart all +docker compose restart -# Production (with more options) -celery -A core.celery worker -l INFO --concurrency 4 --max-tasks-per-child 100 +# Restart specific service +docker compose restart core +docker compose restart celeryworker ``` -**In Docker (see `docker-compose.yml:53-70`):** -```yaml -celeryworker: - image: django-example/core - command: celery -A core.celery worker -l INFO --concurrency 1 - depends_on: - - db - - broker -``` +## ๐Ÿ” Environment Variables -### Step 8: Trigger Tasks +Key environment variables (defined in `.env`): -From anywhere in your Django code: +| Variable | Default | Description | +|----------|---------|-------------| +| `DJANGO_SECRET_KEY` | auto-generated | Django secret key | +| `DJANGO_DEBUG` | True | Debug mode (False in production) | +| `DJANGO_SETTINGS_MODULE` | core.settings.local | Settings module | +| `DB_NAME` | postgres | Database name | +| `DB_USER` | postgres | Database user | +| `DB_PASSWORD` | qwerty123 | Database password | +| `DB_HOST` | db | Database host | +| `DB_PORT` | 5432 | Database port | +| `CELERY_BROKER_URL` | amqp://admin:admin@broker:5672/vhost | RabbitMQ connection | +| `CELERY_RESULT_BACKEND` | django-db | Task result storage | -```python -from apps.client.tasks import my_async_task +## ๐Ÿ—๏ธ Building Images -# Asynchronous execution (non-blocking) -task = my_async_task.delay("parameter") -print(f"Task ID: {task.id}") +### Build All Images -# With custom options -task = my_async_task.apply_async( - args=["parameter"], - countdown=10, # Execute after 10 seconds -) +```bash +docker compose build ``` ---- - -## Celery Architecture +### Build Specific Image -This project demonstrates the complete Celery workflow for asynchronous task processing: +```bash +# Main application +docker build --build-arg ENVIRONMENT=local -t django-example/core:latest . +# Flower monitoring +docker build --build-arg ENVIRONMENT=local -f Dockerfile.flower -t django-example/flower:latest . ``` -Django View/Admin โ†’ Celery Task (.delay) โ†’ RabbitMQ Broker โ†’ Celery Worker โ†’ Result Backend (DB) -``` - -### Components - -#### 1. **Broker (RabbitMQ)** -Message broker that queues tasks between Django and workers. - -- **Configuration** (`.env.local`): - ```bash - CELERY_BROKER_URL=amqp://admin:admin@broker:5672/vhost - ``` -- **Docker service** (`docker-compose.yml:20-30`): RabbitMQ with management plugin -- **Ports**: - - `5672` - AMQP protocol - - `15672` - Management UI (http://localhost:15672, user: `admin`, pass: `admin`) - -#### 2. **Celery App** (`core/celery.py`) -Main Celery application configuration. -```python -app = Celery("core") -app.config_from_object(settings, namespace="CELERY") -app.autodiscover_tasks() # Finds tasks.py in all Django apps -``` +### Build for Production -- Auto-discovers tasks in `tasks.py` from all registered Django apps -- Loaded on Django startup via `core/__init__.py:3` -- Namespace `CELERY_` means all config uses this prefix in `.env` - -#### 3. **Tasks** (`apps/client/tasks.py`) -Asynchronous task definitions using `@shared_task` decorator. - -**Example task** (`apps/client/tasks.py:7-17`): -```python -@shared_task -def collect_post_by_client(client_id): - client = Client.objects.get(id=client_id) - print(f'Request: {client.username}') - - for i in range(1, 200): - Post.objects.create( - client=client, - source="twitter", - description=f"Lorem {i}" - ) +```bash +docker build --build-arg ENVIRONMENT=production -t django-example/core:prod . ``` -This task creates 200 posts asynchronously, preventing request timeout. - -#### 4. **Result Backend** -Stores task results and status in database. +## ๐Ÿงน Cleanup -- **Configuration** (`.env.local:23`): - ```bash - CELERY_RESULT_BACKEND="django-db" - ``` -- **Package**: `django-celery-results` (installed in `requirements.txt:6`) -- **Purpose**: Track task status (PENDING, STARTED, SUCCESS, FAILURE) and retrieve results - -#### 5. **Celery Worker** (`docker-compose.yml:53-70`) -Background process that executes tasks from the queue. - -```yaml -celeryworker: - command: celery -A core.celery worker -l INFO --concurrency 1 -``` +```bash +# Stop all services +docker compose down -- Runs in separate Docker container -- Shares same codebase as Django app -- Configuration: - - `--concurrency 1`: Process 1 task at a time - - `--max-tasks-per-child 1`: Restart worker after each task (prevents memory leaks) - - `--prefetch-multiplier 1`: Don't prefetch tasks +# Stop and remove volumes (โš ๏ธ deletes database data) +docker compose down -v -#### 6. **Flower** (`docker-compose.yml:72-87`) -Real-time monitoring dashboard for Celery. +# Remove all images +docker compose down --rmi all -```yaml -flower: - command: celery -A core.celery flower - ports: - - "5555:5555" +# Clean up unused Docker resources +docker system prune -f ``` -- **Access**: http://localhost:5555 -- **Features**: Monitor tasks, workers, queue status, task history, and execution times - ---- - -## How to Trigger Tasks +## ๐Ÿ› Troubleshooting -### Option 1: From Django Admin (Already Implemented) +### Services not starting -The project includes a Django admin action in `apps/client/admin.py:10-19`: +```bash +# Check service status +docker compose ps -```python -@admin.action(description="Run collect post task") -def collect_post(modeladmin, request, queryset): - for c in queryset.all(): - collect_post_by_client.delay(c.id) # Sends task to broker - messages.add_message( - request, - messages.INFO, - f"Run collect post task for client {c.username}", - ) -``` +# View detailed logs +docker compose logs -**Usage Steps:** -1. Go to http://localhost:8000/admin/client/client/ -2. Select one or more clients from the list -3. Choose **"Run collect post task"** from the actions dropdown -4. Click "Go" -5. The task is sent to RabbitMQ and processed by Celery worker -6. Monitor progress in Flower: http://localhost:5555 - -### Option 2: From a Django View (Code Example) - -You can trigger tasks from any Django view. Here's a complete example: - -```python -# apps/client/views.py -from django.http import JsonResponse -from celery.result import AsyncResult -from .tasks import collect_post_by_client - -def trigger_collect_posts(request, client_id): - """Trigger async task to collect posts for a client""" - task = collect_post_by_client.delay(client_id) - return JsonResponse({ - 'task_id': task.id, - 'status': 'Task sent to broker', - 'client_id': client_id, - 'monitor_url': f'http://localhost:5555/task/{task.id}' - }) - -def check_task_status(request, task_id): - """Check the status of a running task""" - task = AsyncResult(task_id) - - response_data = { - 'task_id': task_id, - 'status': task.status, # PENDING, STARTED, SUCCESS, FAILURE, RETRY - 'ready': task.ready(), - } - - if task.ready(): - if task.successful(): - response_data['result'] = task.result - else: - response_data['error'] = str(task.info) - - return JsonResponse(response_data) +# Check for port conflicts +docker ps ``` -**Add to `core/urls.py`:** - -```python -from django.urls import path -from apps.client import views +### Database connection issues -urlpatterns = [ - # ... existing patterns - path('api/collect-posts//', views.trigger_collect_posts, name='trigger_collect_posts'), - path('api/task-status//', views.check_task_status, name='check_task_status'), -] -``` - -**Usage:** ```bash -# Trigger task -curl http://localhost:8000/api/collect-posts/1/ - -# Response: -# { -# "task_id": "a7f3e0b2-5c8d-4e9f-b1a2-3d4e5f6a7b8c", -# "status": "Task sent to broker", -# "client_id": 1, -# "monitor_url": "http://localhost:5555/task/a7f3e0b2-5c8d-4e9f-b1a2-3d4e5f6a7b8c" -# } - -# Check task status -curl http://localhost:8000/api/task-status/a7f3e0b2-5c8d-4e9f-b1a2-3d4e5f6a7b8c/ - -# Response: -# { -# "task_id": "a7f3e0b2-5c8d-4e9f-b1a2-3d4e5f6a7b8c", -# "status": "SUCCESS", -# "ready": true -# } -``` +# Ensure database is healthy +docker compose ps db -### Option 3: From Django Shell +# Check database logs +docker compose logs db -```bash -make shell +# Restart database +docker compose restart db ``` -```python -from apps.client.tasks import collect_post_by_client +### Celery worker not processing tasks -# Trigger task -task = collect_post_by_client.delay(1) -print(f"Task ID: {task.id}") +```bash +# Check worker status +docker compose logs celeryworker -# Check status -print(f"Status: {task.status}") +# Restart worker +docker compose restart celeryworker -# Wait for result (blocking) -result = task.get(timeout=60) +# Check Flower for task status +open http://localhost:5555 ``` ---- +### Port already in use -## Complete Task Flow Example +If you see "port is already allocated" errors, modify the port mappings in `docker-compose.yml`: -Here's what happens when you trigger a task: +```yaml +ports: + - "8001:8000" # Use 8001 instead of 8000 +``` -1. **User Action**: Admin selects a client and clicks "Run collect post task" -2. **Django**: Calls `collect_post_by_client.delay(client_id)` (non-blocking) -3. **Celery**: Serializes task (function name + arguments) and sends to RabbitMQ -4. **RabbitMQ**: Stores task message in queue -5. **Celery Worker**: Picks up task from queue and executes `collect_post_by_client()` -6. **Task Execution**: Creates 200 posts in PostgreSQL database -7. **Result Storage**: Saves task result/status in `django_celery_results` table -8. **Monitoring**: View real-time progress in Flower dashboard +## ๐Ÿ“ฆ Dependencies -**Timing**: -- Django response: ~50ms (immediately returns after queuing) -- Task execution: ~5-10 seconds (running in background) -- Database operations: 200 inserts +### Python Packages ---- +Core dependencies are managed through `requirements/`: -## Environment Configuration +- **base.txt**: Django, Celery, psycopg2, gunicorn +- **local.txt**: Debug toolbar, development tools +- **production.txt**: Production optimizations -Key Celery settings in `.env.local`: +### Add New Dependencies ```bash -# Celery Configuration -CELERY_RESULT_BACKEND="django-db" # Store results in PostgreSQL -CELERY_BROKER_URL=amqp://admin:admin@broker:5672/vhost # RabbitMQ connection -CELERY_WORKER_CONCURRENCY=1 # Number of worker processes -CELERY_WORKER_PREFETCH_MULTIPLIER=1 # Tasks to prefetch per worker -CELERY_WORKER_MAX_TASKS_PER_CHILD=10 # Restart worker after N tasks +# Add to appropriate requirements file +echo "package-name==version" >> requirements/base.txt + +# Rebuild images +docker compose build ``` ---- +## ๐Ÿš€ Production Deployment -## Monitoring & Debugging +### Environment Setup -### Flower Dashboard -Access http://localhost:5555 to: -- View active workers -- Monitor task queue -- Check task execution history -- See task success/failure rates -- Inspect individual task details +1. Update `.env` with production values: + ``` + ENVIRONMENT=production + DJANGO_DEBUG=False + DJANGO_SECRET_KEY= + ``` -### RabbitMQ Management UI -Access http://localhost:15672 (user: `admin`, pass: `admin`) to: -- View message queues -- Check connection status -- Monitor message rates -- Inspect queue bindings +2. Build production images: + ```bash + docker build --build-arg ENVIRONMENT=production -t django-example/core:prod . + ``` -### Check Celery Worker Logs -```bash -docker logs django-example-celeryworker -f -``` +3. Run with production settings: + ```bash + docker compose -f docker-compose.yml up -d + ``` -### Check Django Logs -```bash -docker logs django-example-core -f -``` +### AWS Managed Services ---- +This project can leverage AWS managed services for production: -## Troubleshooting +| Component | AWS Service | +|-----------|-------------| +| PostgreSQL | Amazon RDS | +| RabbitMQ | Amazon MQ | +| Cache (if added) | Amazon ElastiCache | +| File Storage | Amazon S3 | -**Workers not processing tasks?** -```bash -# Check worker status -docker ps | grep celeryworker +## ๐Ÿ“š Additional Resources -# Restart worker -docker restart django-example-celeryworker -``` +- [Django Documentation](https://docs.djangoproject.com/) +- [Celery Documentation](https://docs.celeryq.dev/) +- [Flower Documentation](https://flower.readthedocs.io/) +- [Docker Documentation](https://docs.docker.com/) -**RabbitMQ connection issues?** -```bash -# Check broker is running -docker ps | grep broker +## ๐Ÿ“ License -# Check environment variable -docker exec django-example-core env | grep CELERY_BROKER_URL -``` +[Your License Here] -**Tasks stuck in PENDING?** -- Verify worker is running: `docker ps | grep celeryworker` -- Check worker logs: `docker logs django-example-celeryworker` -- Ensure broker URL is correct in `.env` +## ๐Ÿค Contributing -**Database errors in tasks?** -- Check PostgreSQL is running: `docker ps | grep db` -- Verify database migrations: `make migrate` +1. Fork the repository +2. Create a feature branch +3. Make your changes +4. Submit a pull request --- -## Health Check - -- **Port**: 8000 -- **Path**: `/healthcheck/` (if implemented) -- **Expected Status**: 200 +**Note**: This project is configured for local development by default. Always review and update security settings before deploying to production. diff --git a/build_celeryworker.sh b/build_celeryworker.sh new file mode 100644 index 0000000..999cf0b --- /dev/null +++ b/build_celeryworker.sh @@ -0,0 +1,8 @@ +#!/bin/sh +docker build \ + --load \ + -f /app/Dockerfile.celeryworker \ + --build-arg ENVIRONMENT=local \ + -t celeryworker:latest \ + --progress plain \ + /app diff --git a/docker-compose.yml b/docker-compose.yml index 8822484..8f9713a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,92 +1,159 @@ -version: "3.3" +# ============================================================ +# docker-compose.yml โ€” Local development environment +# Services: db (PostgreSQL), broker (RabbitMQ), core (Django), +# celeryworker (Celery Worker), flower (Celery Monitor) +# All environment variables are loaded from .env file +# NOTE: celeryworker and flower reuse the django-example/core:latest +# image (same codebase/deps) to avoid redundant builds. +# ============================================================ + services: + + # ------------------------------------------------------------ + # PostgreSQL 14 โ€” Relational database + # Stores Django models and Celery task results (django-db backend) + # ------------------------------------------------------------ db: image: postgres:14.1 - env_file: .env container_name: django-example-db + env_file: + - .env + # PostgreSQL requires these specific env vars for initialisation environment: - - POSTGRES_USER=${DB_USER} - - POSTGRES_PASSWORD=${DB_PASSWORD} - - POSTGRES_DB=${DB_NAME} + - POSTGRES_USER=${POSTGRES_USER} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_DB=${POSTGRES_DB} ports: - - ${DB_PORT}:${DB_PORT} + # Host port 5433 avoids conflicts with any local PostgreSQL instance + - "5433:5432" volumes: - - db:/var/lib/postgresql/data - - # cache: - # image: redis:7.0.0-alpine - # restart: always + # Named volume for persistent database storage + - db_data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s + # ------------------------------------------------------------ + # RabbitMQ 3 โ€” Message broker for Celery tasks + # Management UI available at http://localhost:15673 + # ------------------------------------------------------------ broker: - image: rabbitmq:3-management # AWS MQ, SQS - restart: always + image: rabbitmq:3-management container_name: django-example-broker + restart: always + env_file: + - .env environment: - - RABBITMQ_DEFAULT_USER=admin - - RABBITMQ_DEFAULT_PASS=admin - - RABBITMQ_DEFAULT_VHOST=vhost + - RABBITMQ_DEFAULT_USER=${RABBITMQ_DEFAULT_USER} + - RABBITMQ_DEFAULT_PASS=${RABBITMQ_DEFAULT_PASS} + - RABBITMQ_DEFAULT_VHOST=${RABBITMQ_DEFAULT_VHOST} ports: - - 5672:5672 - - 15672:15672 + # AMQP protocol port (used by Celery workers) + - "5672:5672" + # RabbitMQ management web UI + - "15673:15672" + healthcheck: + test: ["CMD", "rabbitmq-diagnostics", "-q", "ping"] + interval: 15s + timeout: 10s + retries: 5 + start_period: 40s + # ------------------------------------------------------------ + # Core โ€” Django web application (development server) + # Uses pre-built django-example/core:latest image + # Runs migrations via docker-entrypoint.sh, then dev server + # ------------------------------------------------------------ core: - build: - context: . - dockerfile: Dockerfile - args: - ENVIRONMENT: local - image: django-example/core - command: ./docker-entrypoint.sh + image: django-example/core:latest container_name: django-example-core - env_file: .env + env_file: + - .env stdin_open: true tty: true - depends_on: - - db - - broker - # - cache - volumes: - - .:/app ports: + # Django dev server โ€” host port 8000 - "8000:8000" + depends_on: + db: + condition: service_healthy + broker: + condition: service_healthy + # Entrypoint waits for DB, runs migrations, then starts dev server + command: ./docker-entrypoint.sh + healthcheck: + # Use Python urllib to check Django admin page (curl not available in slim image) + test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:8000/admin/')\" || exit 1"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 60s + # ------------------------------------------------------------ + # Celery Worker โ€” Processes async tasks from RabbitMQ queue + # Reuses django-example/core:latest (same codebase and deps) + # No HTTP port exposed โ€” communicates via broker only + # ------------------------------------------------------------ celeryworker: - build: - context: . - dockerfile: Dockerfile - args: - ENVIRONMENT: local - image: django-example/core + image: django-example/core:latest container_name: django-example-celeryworker - platform: linux/x86_64 - volumes: - - .:/app:z + platform: linux/amd64 env_file: - .env depends_on: - - db - - broker - # - cache - command: celery -A core.celery worker -l INFO --concurrency 1 --max-tasks-per-child 1 --prefetch-multiplier 1 -n celery@%h + db: + condition: service_healthy + broker: + condition: service_healthy + # Launch Celery worker with conservative concurrency settings + command: > + celery -A core.celery worker + -l INFO + --concurrency 1 + --max-tasks-per-child 1 + --prefetch-multiplier 1 + -n celery@%h + healthcheck: + # Ping the Celery worker to verify it is alive and connected to broker + test: ["CMD-SHELL", "celery -A core.celery inspect ping -d celery@$$HOSTNAME || exit 1"] + interval: 30s + timeout: 15s + retries: 3 + start_period: 60s + # ------------------------------------------------------------ + # Flower โ€” Celery task monitoring web UI + # Reuses django-example/core:latest (flower is installed in deps) + # Web UI available at http://localhost:5555 + # ------------------------------------------------------------ flower: - build: - context: . - dockerfile: Dockerfile - args: - ENVIRONMENT: local - image: django-example/core + image: django-example/core:latest container_name: django-example-flower - platform: linux/x86_64 - volumes: - - .:/app:z + platform: linux/amd64 env_file: - .env ports: + # Flower monitoring UI โ€” host port 5555 - "5555:5555" - command: celery -A core.celery flower - + depends_on: + broker: + condition: service_healthy + # Launch Flower bound to all interfaces + command: celery -A core.celery flower --port=5555 --address=0.0.0.0 + healthcheck: + # Use Python urllib to check Flower web UI (curl not available in slim image) + test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:5555/')\" || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 45s +# ============================================================ +# Named volumes for persistent data +# ============================================================ volumes: - db: {} - # cache: {} + # PostgreSQL data persistence across container restarts + db_data: