From e9d7f24aaad4b50297769afb0893b6d4cfc7769b Mon Sep 17 00:00:00 2001 From: Aleksandr Meshchriakov Date: Wed, 21 Jan 2026 12:07:35 +0100 Subject: [PATCH] first commit --- .env.example | 31 + .env.test | 25 + .gitea/workflows/ci-cd.yml | 247 +++++++ .gitignore | 41 ++ .pre-commit-config.yaml | 56 ++ .python-version | 1 + .qoder/rules/main.md | 422 +++++++++++ CHANGELOG.md | 91 +++ Makefile | 149 ++++ README.md | 316 +++++++++ check_tests.py | 60 ++ deploy/apache/project.conf | 80 +++ deploy/monitoring/prometheus.yml | 36 + deploy/scripts/deploy.sh | 127 ++++ deploy/systemd/celery-beat.service | 15 + deploy/systemd/celery-worker.service | 16 + deploy/systemd/gunicorn.service | 27 + docker-compose.yml | 136 ++++ docker/Dockerfile.celery | 36 + docker/Dockerfile.web | 41 ++ docker/postgres/init.sql | 18 + pyproject.toml | 472 ++++++++++++ requirements-dev.txt | 435 ++++++++++++ requirements.txt | 295 ++++++++ run_tests.py | 268 +++++++ scripts/check-migrations.sh | 16 + scripts/setup-precommit.sh | 41 ++ src/apps/core/__init__.py | 1 + src/apps/core/apps.py | 7 + src/apps/core/cache.py | 254 +++++++ src/apps/core/exception_handler.py | 152 ++++ src/apps/core/exceptions.py | 169 +++++ src/apps/core/filters.py | 121 ++++ src/apps/core/logging.py | 287 ++++++++ src/apps/core/management/__init__.py | 0 src/apps/core/management/commands/__init__.py | 0 src/apps/core/management/commands/base.py | 252 +++++++ src/apps/core/middleware.py | 131 ++++ .../core/migrations/0001_background_job.py | 52 ++ src/apps/core/migrations/__init__.py | 0 src/apps/core/mixins.py | 391 ++++++++++ src/apps/core/models.py | 237 +++++++ src/apps/core/openapi.py | 374 ++++++++++ src/apps/core/pagination.py | 202 ++++++ src/apps/core/permissions.py | 128 ++++ src/apps/core/response.py | 160 +++++ src/apps/core/serializers.py | 47 ++ src/apps/core/services.py | 671 ++++++++++++++++++ src/apps/core/signals.py | 295 ++++++++ src/apps/core/tasks.py | 269 +++++++ src/apps/core/urls.py | 14 + src/apps/core/views.py | 249 +++++++ src/apps/core/viewsets.py | 468 ++++++++++++ src/apps/user/__init__.py | 0 src/apps/user/apps.py | 10 + src/apps/user/migrations/0001_initial.py | 71 ++ src/apps/user/migrations/__init__.py | 0 src/apps/user/models.py | 109 +++ src/apps/user/serializers.py | 155 ++++ src/apps/user/services.py | 240 +++++++ src/apps/user/signals.py | 25 + src/apps/user/urls.py | 24 + src/apps/user/views.py | 290 ++++++++ src/config/__init__.py | 3 + src/config/api_v1_urls.py | 20 + src/config/asgi.py | 16 + src/config/celery.py | 42 ++ src/config/settings/__init__.py | 9 + src/config/settings/base.py | 271 +++++++ src/config/settings/development.py | 46 ++ src/config/settings/production.py | 110 +++ src/config/settings/test.py | 105 +++ src/config/urls.py | 44 ++ src/config/wsgi.py | 16 + src/manage.py | 22 + tests/README.md | 368 ++++++++++ tests/__init__.py | 5 + tests/apps/__init__.py | 5 + tests/apps/core/__init__.py | 0 tests/apps/core/test_background_jobs.py | 236 ++++++ tests/apps/core/test_bulk_operations.py | 186 +++++ tests/apps/core/test_cache.py | 193 +++++ tests/apps/core/test_exceptions.py | 152 ++++ tests/apps/core/test_filters.py | 128 ++++ tests/apps/core/test_logging.py | 163 +++++ tests/apps/core/test_management_commands.py | 92 +++ tests/apps/core/test_middleware.py | 34 + tests/apps/core/test_mixins.py | 110 +++ tests/apps/core/test_openapi.py | 133 ++++ tests/apps/core/test_permissions.py | 252 +++++++ tests/apps/core/test_response.py | 138 ++++ tests/apps/core/test_services.py | 103 +++ tests/apps/core/test_signals.py | 204 ++++++ tests/apps/core/test_tasks.py | 82 +++ tests/apps/core/test_views.py | 101 +++ tests/apps/core/test_viewsets.py | 86 +++ tests/apps/user/__init__.py | 10 + tests/apps/user/factories.py | 93 +++ tests/apps/user/test_models.py | 132 ++++ tests/apps/user/test_serializers.py | 291 ++++++++ tests/apps/user/test_services.py | 224 ++++++ tests/apps/user/test_views.py | 312 ++++++++ 102 files changed, 13890 insertions(+) create mode 100644 .env.example create mode 100644 .env.test create mode 100644 .gitea/workflows/ci-cd.yml create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 .python-version create mode 100644 .qoder/rules/main.md create mode 100644 CHANGELOG.md create mode 100644 Makefile create mode 100644 README.md create mode 100644 check_tests.py create mode 100644 deploy/apache/project.conf create mode 100644 deploy/monitoring/prometheus.yml create mode 100644 deploy/scripts/deploy.sh create mode 100644 deploy/systemd/celery-beat.service create mode 100644 deploy/systemd/celery-worker.service create mode 100644 deploy/systemd/gunicorn.service create mode 100644 docker-compose.yml create mode 100644 docker/Dockerfile.celery create mode 100644 docker/Dockerfile.web create mode 100644 docker/postgres/init.sql create mode 100644 pyproject.toml create mode 100644 requirements-dev.txt create mode 100644 requirements.txt create mode 100644 run_tests.py create mode 100755 scripts/check-migrations.sh create mode 100644 scripts/setup-precommit.sh create mode 100644 src/apps/core/__init__.py create mode 100644 src/apps/core/apps.py create mode 100644 src/apps/core/cache.py create mode 100644 src/apps/core/exception_handler.py create mode 100644 src/apps/core/exceptions.py create mode 100644 src/apps/core/filters.py create mode 100644 src/apps/core/logging.py create mode 100644 src/apps/core/management/__init__.py create mode 100644 src/apps/core/management/commands/__init__.py create mode 100644 src/apps/core/management/commands/base.py create mode 100644 src/apps/core/middleware.py create mode 100644 src/apps/core/migrations/0001_background_job.py create mode 100644 src/apps/core/migrations/__init__.py create mode 100644 src/apps/core/mixins.py create mode 100644 src/apps/core/models.py create mode 100644 src/apps/core/openapi.py create mode 100644 src/apps/core/pagination.py create mode 100644 src/apps/core/permissions.py create mode 100644 src/apps/core/response.py create mode 100644 src/apps/core/serializers.py create mode 100644 src/apps/core/services.py create mode 100644 src/apps/core/signals.py create mode 100644 src/apps/core/tasks.py create mode 100644 src/apps/core/urls.py create mode 100644 src/apps/core/views.py create mode 100644 src/apps/core/viewsets.py create mode 100644 src/apps/user/__init__.py create mode 100644 src/apps/user/apps.py create mode 100644 src/apps/user/migrations/0001_initial.py create mode 100644 src/apps/user/migrations/__init__.py create mode 100644 src/apps/user/models.py create mode 100644 src/apps/user/serializers.py create mode 100644 src/apps/user/services.py create mode 100644 src/apps/user/signals.py create mode 100644 src/apps/user/urls.py create mode 100644 src/apps/user/views.py create mode 100644 src/config/__init__.py create mode 100644 src/config/api_v1_urls.py create mode 100644 src/config/asgi.py create mode 100644 src/config/celery.py create mode 100644 src/config/settings/__init__.py create mode 100644 src/config/settings/base.py create mode 100644 src/config/settings/development.py create mode 100644 src/config/settings/production.py create mode 100644 src/config/settings/test.py create mode 100644 src/config/urls.py create mode 100644 src/config/wsgi.py create mode 100644 src/manage.py create mode 100644 tests/README.md create mode 100644 tests/__init__.py create mode 100644 tests/apps/__init__.py create mode 100644 tests/apps/core/__init__.py create mode 100644 tests/apps/core/test_background_jobs.py create mode 100644 tests/apps/core/test_bulk_operations.py create mode 100644 tests/apps/core/test_cache.py create mode 100644 tests/apps/core/test_exceptions.py create mode 100644 tests/apps/core/test_filters.py create mode 100644 tests/apps/core/test_logging.py create mode 100644 tests/apps/core/test_management_commands.py create mode 100644 tests/apps/core/test_middleware.py create mode 100644 tests/apps/core/test_mixins.py create mode 100644 tests/apps/core/test_openapi.py create mode 100644 tests/apps/core/test_permissions.py create mode 100644 tests/apps/core/test_response.py create mode 100644 tests/apps/core/test_services.py create mode 100644 tests/apps/core/test_signals.py create mode 100644 tests/apps/core/test_tasks.py create mode 100644 tests/apps/core/test_views.py create mode 100644 tests/apps/core/test_viewsets.py create mode 100644 tests/apps/user/__init__.py create mode 100644 tests/apps/user/factories.py create mode 100644 tests/apps/user/test_models.py create mode 100644 tests/apps/user/test_serializers.py create mode 100644 tests/apps/user/test_services.py create mode 100644 tests/apps/user/test_views.py diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..f3fe2f2 --- /dev/null +++ b/.env.example @@ -0,0 +1,31 @@ +# Файл окружения для разработки +# Скопируйте этот файл в .env и измените значения по необходимости + +# Django Settings +DEBUG=True +SECRET_KEY=django-insecure-development-key-change-in-production +ALLOWED_HOSTS=localhost,127.0.0.1,0.0.0.0 + +# Database Settings +POSTGRES_DB=project_dev +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_HOST=localhost +POSTGRES_PORT=5432 + +# Redis Settings +REDIS_URL=redis://localhost:6379/0 +REDIS_CACHE_URL=redis://localhost:6379/1 + +# Celery Settings +CELERY_BROKER_URL=redis://localhost:6379/0 +CELERY_RESULT_BACKEND=redis://localhost:6379/0 + +# CORS Settings +CORS_ALLOWED_ORIGINS=http://localhost:3000,http://127.0.0.1:3000 + +# Logging +LOG_LEVEL=INFO + +# Scrapy Settings +SCRAPY_LOG_LEVEL=INFO \ No newline at end of file diff --git a/.env.test b/.env.test new file mode 100644 index 0000000..8d1be55 --- /dev/null +++ b/.env.test @@ -0,0 +1,25 @@ +# Test environment for user app +DEBUG=True +SECRET_KEY=test-secret-key-for-development +ALLOWED_HOSTS=localhost,127.0.0.1 + +# Database Settings - using existing tenant_db container +POSTGRES_DB=project_dev +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_HOST=localhost +POSTGRES_PORT=8432 # social_db container port + +# Redis Settings +REDIS_URL=redis://localhost:6379/0 +REDIS_CACHE_URL=redis://localhost:6379/1 + +# Celery Settings +CELERY_BROKER_URL=redis://localhost:6379/0 +CELERY_RESULT_BACKEND=redis://localhost:6379/0 + +# CORS Settings +CORS_ALLOWED_ORIGINS=http://localhost:3000,http://127.0.0.1:3000 + +# Logging +LOG_LEVEL=INFO \ No newline at end of file diff --git a/.gitea/workflows/ci-cd.yml b/.gitea/workflows/ci-cd.yml new file mode 100644 index 0000000..971d6f7 --- /dev/null +++ b/.gitea/workflows/ci-cd.yml @@ -0,0 +1,247 @@ +name: CI/CD Pipeline + +on: + push: + branches: [ main, develop ] + pull_request: + branches: [ main, develop ] + +jobs: + lint: + name: Code Quality Checks + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install uv + run: | + curl -LsSf https://astral.sh/uv/install.sh | sh + echo "$HOME/.local/bin" >> $GITHUB_PATH + + - name: Create virtual environment + run: uv venv + + - name: Activate virtual environment and install dependencies + run: | + source .venv/bin/activate + uv sync --dev + + - name: Run Ruff linting + run: | + source .venv/bin/activate + ruff check . + + - name: Run Ruff formatting check + run: | + source .venv/bin/activate + ruff format . --check + + test: + name: Run Tests + runs-on: ubuntu-latest + services: + postgres: + image: postgres:15.10 + env: + POSTGRES_DB: test_db + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + + redis: + image: redis:7-alpine + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install uv + run: | + curl -LsSf https://astral.sh/uv/install.sh | sh + echo "$HOME/.local/bin" >> $GITHUB_PATH + + - name: Create virtual environment + run: uv venv + + - name: Activate virtual environment and install dependencies + run: | + source .venv/bin/activate + uv sync --dev + + - name: Wait for services to be ready + run: | + # Wait for PostgreSQL + until pg_isready -h localhost -p 5432 -U postgres; do + echo "Waiting for PostgreSQL..." + sleep 2 + done + + # Wait for Redis + until redis-cli -h localhost -p 6379 ping; do + echo "Waiting for Redis..." + sleep 2 + done + + - name: Run Django tests + run: | + source .venv/bin/activate + cd src + python manage.py test --verbosity=2 + env: + DJANGO_SETTINGS_MODULE: config.settings.development + DATABASE_URL: postgres://postgres:postgres@localhost:5432/test_db + REDIS_URL: redis://localhost:6379/0 + CELERY_BROKER_URL: redis://localhost:6379/0 + SECRET_KEY: test-secret-key-for-ci + + build: + name: Build Docker Images + runs-on: ubuntu-latest + needs: [lint, test] + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Extract metadata for web image + id: meta-web + uses: docker/metadata-action@v5 + with: + images: | + ${{ github.repository_owner }}/mostovik-web + tags: | + type=ref,event=branch + type=ref,event=pr + type=sha,prefix={{branch}}- + + - name: Extract metadata for celery image + id: meta-celery + uses: docker/metadata-action@v5 + with: + images: | + ${{ github.repository_owner }}/mostovik-celery + tags: | + type=ref,event=branch + type=ref,event=pr + type=sha,prefix={{branch}}- + + - name: Build web image + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile.web + push: false + load: true + tags: ${{ steps.meta-web.outputs.tags }} + labels: ${{ steps.meta-web.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Build celery image + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile.celery + push: false + load: true + tags: ${{ steps.meta-celery.outputs.tags }} + labels: ${{ steps.meta-celery.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + + push: + name: Push to Gitea Registry + runs-on: ubuntu-latest + needs: [build] + if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop' + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Login to Gitea Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ vars.GITEA_REGISTRY_URL }} + username: ${{ secrets.GITEA_USERNAME }} + password: ${{ secrets.GITEA_TOKEN }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Extract metadata for web image + id: meta-web + uses: docker/metadata-action@v5 + with: + images: | + ${{ vars.GITEA_REGISTRY_URL }}/${{ github.repository_owner }}/mostovik-web + tags: | + type=ref,event=branch + type=sha,prefix={{branch}}- + type=raw,value=latest,enable={{is_default_branch}} + + - name: Extract metadata for celery image + id: meta-celery + uses: docker/metadata-action@v5 + with: + images: | + ${{ vars.GITEA_REGISTRY_URL }}/${{ github.repository_owner }}/mostovik-celery + tags: | + type=ref,event=branch + type=sha,prefix={{branch}}- + type=raw,value=latest,enable={{is_default_branch}} + + - name: Build and push web image + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile.web + push: true + tags: ${{ steps.meta-web.outputs.tags }} + labels: ${{ steps.meta-web.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Build and push celery image + uses: docker/build-push-action@v5 + with: + context: . + file: ./docker/Dockerfile.celery + push: true + tags: ${{ steps.meta-celery.outputs.tags }} + labels: ${{ steps.meta-celery.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Image digest + run: | + echo "Web image digest: ${{ steps.docker_build_web.outputs.digest }}" + echo "Celery image digest: ${{ steps.docker_build_celery.outputs.digest }}" diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a5bd201 --- /dev/null +++ b/.gitignore @@ -0,0 +1,41 @@ +*.pyc +*.pyo +*.pyc +__pycache__/ +.pytest_cache/ +.coverage +htmlcov/ +*.egg-info/ +dist/ +build/ +*.log +.env +.venv +venv/ +uv.lock +.env.local +.env.*.local + +# Django +*.sqlite3 +media/ +staticfiles/ +logs/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# OS +.DS_Store +Thumbs.db + +# Docker +.dockerignore + +# Backup files +*.bak +*.backup \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..9594179 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,56 @@ +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.14 + hooks: + - id: ruff + name: ruff lint (src only) + files: ^src/.*\.py$ + args: [--fix, --exit-non-zero-on-fix] + exclude: | + (?x)^( + src/.*/migrations/.*| + src/.*/__pycache__/.* + )$ + + - id: ruff-format + name: ruff format (src only) + files: ^src/.*\.py$ + args: [] + exclude: | + (?x)^( + src/.*/migrations/.*| + src/.*/__pycache__/.* + )$ + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: trailing-whitespace + name: check trailing whitespace (src only) + files: ^src/.*\.(py|txt|md|yaml|yml)$ + + - id: end-of-file-fixer + name: fix end of file (src only) + files: ^src/.*\.(py|txt|md|yaml|yml)$ + + - id: check-yaml + name: check yaml syntax (src only) + files: ^src/.*\.ya?ml$ + + - id: check-added-large-files + name: check large files + args: ["--maxkb=500"] + + - repo: local + hooks: + - id: django-check-migrations + name: django check migrations + entry: ./scripts/check-migrations.sh + language: script + files: ^src/.*\.py$ + pass_filenames: false + exclude: | + (?x)^( + src/.*/migrations/.*| + src/.*/__pycache__/.* + )$ diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..c00391b --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.11.14 \ No newline at end of file diff --git a/.qoder/rules/main.md b/.qoder/rules/main.md new file mode 100644 index 0000000..de8d1c1 --- /dev/null +++ b/.qoder/rules/main.md @@ -0,0 +1,422 @@ +--- +trigger: always_on +--- + +## 0) Язык и стиль общения (СТРОГО) +- ИИ-агент **ВСЕГДА отвечает на русском языке** +- Английский допускается ТОЛЬКО для: + - имён библиотек + - имён классов, функций, переменных + - CLI-команд +- Тон: инженерный, практичный, без воды и маркетинга + +--- + +## 1) Базовые принципы (НЕ ОБСУЖДАЮТСЯ) +Проект разрабатывается строго по принципам: + +- **SOLID** +- **KISS** +- **DRY** + +Правила приоритета: +- красиво vs просто → **простота** +- умно vs поддерживаемо → **поддерживаемость** +- магия vs явность → **явность** + +--- + +## 2) Контекст проекта +- ОС: **Astra Linux 1.8** +- Python: **3.11.2** +- Django: **3.x (указано 3.14)** +- Django REST Framework (DRF) +- Celery +- PostgreSQL **15.10** +- Apache **2.4.57** +- mod_wsgi **4.9.4** + +### Инструменты разработки +- **uv** +- **виртуальная среда** +- **pre-commit** +- **Gitea Actions (CI)** +- Тесты: `django test` +- Линтинг: `ruff` +- **ЗАПРЕЩЕНО** создавать тестовые скрипты для демонстрации, все должно быть исправлено в рамках проекта + +--- + +## 3) Окружение и команды (СТРОГО) +Все команды: +- выполняются **только внутри виртуальной среды** +- используют **uv** +- считаются выполняемыми из корня проекта + +### ❌ Запрещено +- `pip`, `python -m pip` +- `poetry`, `pipenv`, `pipx` +- системные команды вне venv + +### ✅ Разрешено +- `uv venv` +- `source .venv/bin/activate` +- `uv add / uv remove / uv sync` +- `uv run ` + +Пример: +```bash +uv run python manage.py test +``` + +--- + +## 4) Архитектура и слои ответственности (КРИТИЧНО) + +### 4.1 View (DRF) +View отвечает ТОЛЬКО за: +- приём HTTP-запроса +- проверку прав доступа +- работу с serializer +- вызов сервисного слоя + +❌ Запрещено: +- бизнес-логика +- сложные условия +- транзакции +- сложная работа с ORM + +--- + +### 4.2 Serializer +Serializer отвечает за: +- валидацию данных +- преобразование вход/выход + +Допускается: +- field-level validation +- object-level validation + +❌ Запрещено: +- бизнес-правила +- side-effects +- сложная логика в `save()` + +--- + +### 4.3 Сервисный слой (Business Logic) +- **ВСЯ бизнес-логика живёт здесь** +- Сервисы: + - не зависят от HTTP + - легко тестируются + - управляют транзакциями +- Сервис определяет *что* делать, а не *как* отдать ответ + +Рекомендуемый паттерн: +```python +class EntityService: + @classmethod + def do_something(cls, *, data): + ... +``` + +--- + +### 4.4 Модели (ORM) +Модели должны быть: +- простыми +- декларативными + +Допускается: +- `__str__` +- простые computed properties +- минимальные helper-методы + +❌ Запрещено: +- бизнес-логика +- workflow +- сигналы как логика +- условия, зависящие от сценариев + +👉 **Любые исключения — ТОЛЬКО после обсуждения в чате.** + +--- + +## 5) Celery +- Task = **thin wrapper** +- Task вызывает сервис, а не содержит логику +- Таски: + - идемпотентны + - логируют начало и завершение +- Ретраи: + - только для временных ошибок + - с backoff + +--- + +## 6) База данных и миграции +- Любое изменение моделей → миграции обязательны +- Миграции: + - детерминированные + - без ручной магии без причины + +Проверка перед коммитом: +```bash +uv run python manage.py makemigrations --check --dry-run +``` + +PostgreSQL: +- транзакции использовать осознанно +- `select_for_update()` при гонках +- Raw SQL — только с объяснением + +--- + +## 7) Тестирование +- Любая бизнес-логика → тесты +- В первую очередь тестируется сервисный слой +- API — happy path + edge cases + +Запуск: +```bash +uv run python manage.py test +``` + +--- + +## 8) pre-commit (обязателен) +- Любой код обязан проходить pre-commit +- Агент обязан учитывать проверки форматирования и линтинга + +```bash +pre-commit run --all-files +``` + +--- + +## 9) CI (Gitea Actions) +- Используется **Gitea Actions** +- ❌ GitHub Actions запрещены +- Любые изменения: + - не должны ломать CI +- Если меняются: + - зависимости + - команды тестов + - миграции + → агент обязан указать необходимость правок workflow + +--- + +## 10) Apache + mod_wsgi +- Используется **ТОЛЬКО WSGI** +- ASGI запрещён без отдельного обсуждения +- Любые изменения в `wsgi.py`, путях, статике: + - сопровождаются пояснением + - требуют перезапуска Apache + +```bash +systemctl restart apache2 +``` + +Учитывать ограничения и права Astra Linux. + +--- + +## 11) Работа с репозиторием +- Минимальный diff — приоритет +- ❌ Не коммитить: + - `.venv` + - артефакты + - дампы БД +- Массовый рефакторинг — только по явному запросу + +--- + +## 12) Anti-patterns (ЗАПРЕЩЕНО) +- Fat Models +- God Views +- Бизнес-логика в Serializers +- Сигналы как workflow +- Магия в `save()` +- Прямые импорты моделей между apps +- Сложная логика в queryset как бизнес-правило + +--- + +## 13) Формат ответа ИИ-агента (ОБЯЗАТЕЛЬНЫЙ) +Каждый ответ должен содержать: + +1. **Что меняем** +2. **Файлы / патч** +3. **Команды (через uv)** +4. **Проверки (tests / pre-commit / CI)** +5. **Риски / замечания** + +--- + +## 14) Исключения +- ИИ-агент **НЕ внедряет исключения сам** +- Агент: + - описывает стандартное решение + - объясняет, почему оно не подходит + - запрашивает разрешение в чате + +## 15) Структура проекта и миксины (ОБЯЗАТЕЛЬНО) + +### 15.0 Правило Core-First (КРИТИЧНО) +**ПЕРЕД созданием любого нового компонента** агент ОБЯЗАН проверить модуль `apps.core`: + +``` +src/apps/core/ +├── mixins.py # Model mixins (TimestampMixin, SoftDeleteMixin, etc.) +├── services.py # BaseService, BackgroundJobService +├── views.py # Health checks, BackgroundJob API +├── viewsets.py # BaseViewSet, ReadOnlyViewSet +├── exceptions.py # APIError, NotFoundError, ValidationError +├── permissions.py # IsOwner, IsAdminOrReadOnly, etc. +├── pagination.py # CursorPagination +├── filters.py # BaseFilterSet +├── cache.py # cache_result, invalidate_cache +├── tasks.py # BaseTask для Celery +├── logging.py # StructuredLogger +├── middleware.py # RequestIDMiddleware +├── signals.py # SignalDispatcher +├── responses.py # APIResponse wrapper +├── openapi.py # api_docs decorator +└── management/commands/base.py # BaseAppCommand +``` + +**Порядок действий:** +1. Проверить `apps.core` на наличие нужного базового класса/миксина +2. Наследоваться от существующего, а не создавать с нуля +3. Если нужного нет — обсудить добавление в core + +❌ **ЗАПРЕЩЕНО:** создавать дублирующую функциональность в app-модулях + +--- + +### 15.1 Model Mixins +При создании моделей **ОБЯЗАТЕЛЬНО** использовать миксины из `apps.core.mixins`: + +| Миксин | Когда использовать | Поля | +|--------|-------------------|------| +| `TimestampMixin` | **ВСЕГДА** для любой модели | `created_at`, `updated_at` | +| `UUIDPrimaryKeyMixin` | Когда нужен UUID вместо int ID | `id` (UUID) | +| `SoftDeleteMixin` | Когда нельзя физически удалять | `is_deleted`, `deleted_at` | +| `AuditMixin` | Когда нужно знать кто создал/изменил | `created_by`, `updated_by` | +| `OrderableMixin` | Для сортируемых списков | `order` | +| `StatusMixin` | Для моделей со статусами | `status` | +| `SlugMixin` | Для URL-friendly идентификаторов | `slug` | + +**Пример правильного использования:** +```python +from apps.core.mixins import TimestampMixin, SoftDeleteMixin, AuditMixin + +class Document(TimestampMixin, SoftDeleteMixin, AuditMixin, models.Model): + """Документ с историей и мягким удалением.""" + title = models.CharField(max_length=200) + + class Meta: + ordering = ['-created_at'] +``` + +**Порядок наследования миксинов:** +1. `UUIDPrimaryKeyMixin` (если нужен) +2. `TimestampMixin` +3. `SoftDeleteMixin` (если нужен) +4. `AuditMixin` (если нужен) +5. `OrderableMixin` / `StatusMixin` / `SlugMixin` +6. `models.Model` (последним) + +--- + +### 15.2 Management Commands +Все management commands наследуются от `BaseAppCommand`: + +```python +from apps.core.management.commands.base import BaseAppCommand + +class Command(BaseAppCommand): + help = 'Описание команды' + use_transaction = True # Обернуть в транзакцию + + def add_arguments(self, parser): + super().add_arguments(parser) # Добавляет --dry-run, --silent + parser.add_argument('--my-arg', type=str) + + def execute_command(self, *args, **options): + items = MyModel.objects.all() + + for item in self.progress_iter(items, desc="Обработка"): + if not self.dry_run: + self.process(item) + + return "Обработано успешно" +``` + +**Возможности BaseAppCommand:** +- `--dry-run` — тестовый запуск без изменений +- `--silent` — минимальный вывод +- `self.progress_iter()` — прогресс-бар +- `self.timed_operation()` — измерение времени +- `self.confirm()` — подтверждение +- `self.log_info/success/warning/error()` — логирование + +--- + +### 15.3 Background Jobs (Celery) +Для отслеживания статуса фоновых задач использовать `BackgroundJob`: + +```python +# В сервисе при запуске задачи +from apps.core.services import BackgroundJobService + +job = BackgroundJobService.create_job( + task_id=task.id, + task_name="apps.myapp.tasks.process_data", + user_id=request.user.id, +) + +# В Celery таске +from apps.core.models import BackgroundJob + +@shared_task(bind=True) +def my_task(self, data): + job = BackgroundJob.objects.get(task_id=self.request.id) + job.mark_started() + + for i, item in enumerate(items): + process(item) + job.update_progress(i * 100 // len(items), "Обработка...") + + job.complete(result={"processed": len(items)}) +``` + +**API эндпоинты:** +- `GET /api/v1/jobs/` — список задач пользователя +- `GET /api/v1/jobs/{task_id}/` — статус конкретной задачи + +--- + +### 15.4 Factories (тестирование) +Все фабрики используют `factory_boy` + `faker`: + +```python +import factory +from faker import Faker + +fake = Faker("ru_RU") + +class MyModelFactory(factory.django.DjangoModelFactory): + class Meta: + model = MyModel + + name = factory.LazyAttribute(lambda _: fake.word()) + email = factory.LazyAttribute(lambda _: fake.unique.email()) +``` + +**Правила:** +- Никакого хардкода в тестах (`"test@example.com"` → `fake.email()`) +- Использовать `fake.unique.*` для уникальных полей +- Локаль: `Faker("ru_RU")` для русских данных + diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..5717a2b --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,91 @@ +# Changelog + +Все значимые изменения в проекте документируются в этом файле. + +Формат основан на [Keep a Changelog](https://keepachangelog.com/ru/1.0.0/). + +--- + +## [0.2.0] - 2026-01-21 + +### Добавлено + +#### Core Module (`apps.core`) +- **Model Mixins** (`mixins.py`): + - `TimestampMixin` — автоматические `created_at`, `updated_at` + - `UUIDPrimaryKeyMixin` — UUID вместо auto-increment ID + - `SoftDeleteMixin` — мягкое удаление с `is_deleted`, `deleted_at` + - `AuditMixin` — отслеживание `created_by`, `updated_by` + - `OrderableMixin` — поле `order` для сортировки + - `StatusMixin` — статусы draft/active/inactive/archived + - `SlugMixin` — URL-friendly идентификаторы + +- **Base Services** (`services.py`): + - `BaseService` — базовый CRUD сервис + - `BaseReadOnlyService` — только чтение + - `TransactionMixin` — управление транзакциями + - `BulkOperationsMixin` — массовые операции (bulk_create_chunked, bulk_update_or_create, bulk_delete, bulk_update_fields) + - `QueryOptimizerMixin` — декларативная оптимизация запросов (select_related, prefetch_related, only, defer) + - `BackgroundJobService` — управление фоновыми задачами + +- **Base ViewSets** (`viewsets.py`): + - `BaseViewSet` — базовый CRUD ViewSet с пагинацией, фильтрацией, логированием + - `ReadOnlyViewSet` — только чтение + - `OwnerViewSet` — фильтрация по владельцу + - `BulkMixin` — массовые операции через API (bulk_create, bulk_update, bulk_delete) + +- **Background Job Tracking** (`models.py`): + - Модель `BackgroundJob` для отслеживания Celery задач + - API endpoints: `GET /api/v1/jobs/`, `GET /api/v1/jobs/{task_id}/` + - Статусы: pending, started, success, failure, revoked, retry + - Прогресс выполнения и результаты + +- **Management Commands** (`management/commands/base.py`): + - `BaseAppCommand` — базовый класс для команд + - Поддержка `--dry-run`, `--silent` + - Прогресс-бар, измерение времени, логирование + +- **Прочее**: + - `exceptions.py` — кастомные исключения API + - `permissions.py` — базовые permissions (IsOwner, IsAdminOrReadOnly и др.) + - `pagination.py` — CursorPagination + - `filters.py` — BaseFilterSet + - `cache.py` — декораторы кэширования + - `tasks.py` — BaseTask для Celery + - `logging.py` — структурированное логирование + - `middleware.py` — RequestIDMiddleware + - `signals.py` — SignalDispatcher + - `response.py` — унифицированные API ответы + - `openapi.py` — декоратор для документации + +#### Тестирование +- Переход на `factory_boy` + `faker` (вместо model_bakery + uuid) +- Фабрики: `UserFactory`, `ProfileFactory` +- 297 тестов + +#### Конфигурация +- API versioning: `/api/v1/` +- Отдельный `test.py` для настроек тестов +- Обновлён `pyproject.toml` +- Правила разработки в `.qoder/rules/main.md` + +### Изменено +- Структура тестов перенесена в `/tests/apps/` +- Удалён `conftest.py` (pytest не используется) +- Обновлены URLs с namespace + +### Удалено +- `ruff.toml` (конфигурация перенесена в pyproject.toml) +- `CI_CD_SUMMARY.md` +- `custom_test_runner.py` + +--- + +## [0.1.0] - 2026-01-20 + +### Добавлено +- Начальная структура проекта +- Приложение `user` с JWT аутентификацией +- Базовые модели User и Profile +- Docker конфигурация +- Pre-commit hooks diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..20281b2 --- /dev/null +++ b/Makefile @@ -0,0 +1,149 @@ +# Makefile для удобной работы с проектом + +.PHONY: help install dev-up dev-down test lint format migrate createsuperuser shell + +help: + @echo "Доступные команды:" + @echo "" + @echo "🔧 Установка и настройка:" + @echo " make install - Установка зависимостей" + @echo "" + @echo "🐳 Docker управление:" + @echo " make dev-up - Запуск разработческого окружения (Docker)" + @echo " make dev-down - Остановка разработческого окружения" + @echo " make logs - Просмотр логов (Docker)" + @echo "" + @echo "🧪 Тестирование:" + @echo " make test - Запуск тестов (по умолчанию все)" + + @echo " Примеры:" + @echo " make test # Все тесты" + @echo " make test TARGET=user # Только user app" + @echo " make test TARGET=test_models # Только модели" + @echo "" + @echo "🔍 Качество кода:" + @echo " make lint - Проверка кода линтерами" + @echo " make format - Форматирование кода" + @echo " make type-check - Проверка типов" + @echo " make security-check - Проверка безопасности" + @echo " make pre-commit - Запуск pre-commit hooks" + @echo "" + @echo "🗄️ База данных:" + @echo " make migrate - Выполнение миграций Django" + @echo " make createsuperuser - Создание суперпользователя" + @echo "" + @echo "🛠️ Утилиты:" + @echo " make shell - Запуск Django shell" + @echo " make setup-dev - Настройка окружения разработки" + @echo " make clean - Очистка временных файлов" + +install: + uv pip install -r requirements.txt + uv pip install -r requirements-dev.txt + +dev-up: + docker-compose up -d + @echo "Сервисы запущены. Приложение доступно по адресу: http://localhost:8000" + +dev-down: + docker-compose down + +# Универсальная команда для тестирования с поддержкой аргументов +# Использование: +# make test # Все тесты +# make test TARGET=user # Тесты user app +# make test TARGET=models # Тесты моделей +# make test TARGET=views # Тесты представлений +# make test TARGET=serializers # Тесты сериализаторов +# make test TARGET=services # Тесты сервисов +test: + @if [ "$(TARGET)" ]; then \ + echo "🧪 Запуск тестов: $(TARGET)"; \ + python run_tests_simple.py $(TARGET); \ + else \ + echo "🧪 Запуск всех тестов..."; \ + python run_tests_simple.py; \ + fi + +lint: + @echo "🔍 Проверка кода линтерами..." + ruff check src/ + black --check src/ + isort --check-only src/ + @echo "✅ Линтинг завершен" + +format: + @echo "🎨 Форматирование кода..." + black src/ + isort src/ + ruff check --fix src/ + @echo "✅ Форматирование завершено" + +migrate: + @echo "🗄️ Выполнение миграций..." + cd src && python manage.py makemigrations + cd src && python manage.py migrate + +createsuperuser: + @echo "👤 Создание суперпользователя..." + cd src && python manage.py createsuperuser + +shell: + @echo "🐚 Запуск Django shell..." + cd src && python manage.py shell + +logs: + @echo "📋 Просмотр логов..." + docker-compose logs -f + +# Дополнительные команды для тестирования +test-cov: + @echo "🧪 Запуск тестов с покрытием..." + python run_tests_simple.py --coverage + +test-fast: + @echo "🚀 Быстрые тесты (без медленных)..." + python run_tests_simple.py --fast + +test-parallel: + @echo "⚡ Параллельный запуск тестов..." + python run_tests_simple.py --parallel=auto + +test-failfast: + @echo "❌ Тесты с остановкой при первой ошибке..." + python run_tests_simple.py --failfast + +# Дополнительные команды для качества кода +type-check: + @echo "🔍 Проверка типов с mypy..." + mypy src/ + +security-check: + @echo "🔒 Проверка безопасности..." + bandit -r src/ -f json -o bandit-report.json || bandit -r src/ + +pre-commit: + @echo "🔧 Запуск pre-commit hooks..." + pre-commit run --all-files + +# Установка и настройка +setup-dev: + @echo "⚙️ Настройка окружения разработки..." + pre-commit install + @echo "✅ Окружение настроено" + +clean: + @echo "🧹 Очистка временных файлов..." + find . -type f -name "*.pyc" -delete + find . -type d -name "__pycache__" -delete + rm -rf *.log + rm -rf htmlcov/ + rm -rf .coverage + rm -rf coverage.xml + rm -rf bandit-report.json + rm -rf .pytest_cache/ + rm -rf .mypy_cache/ + rm -rf tests/__pycache__/ + rm -rf tests/apps/__pycache__/ + rm -rf tests/apps/user/__pycache__/ + @echo "✅ Очистка завершена" diff --git a/README.md b/README.md new file mode 100644 index 0000000..0e00569 --- /dev/null +++ b/README.md @@ -0,0 +1,316 @@ +# State-Corp / Отчётность Организаций + +Backend для системы Отчётность Организаций (State-Corp). + +## Технологический стек + +- **Python**: 3.11.2 +- **Django**: 3.2.25 +- **Django REST Framework**: 3.14.0 +- **PostgreSQL**: 15.10 +- **Redis**: 7.x +- **Celery**: 5.3.6 +- **Scrapy**: 2.11.2 +- **Gunicorn**: 21.2.0 +- **Apache**: 2.4.57 + +## Структура проекта + +``` +state-corp-backend/ +├── src/ # Исходный код Django +│ ├── config/ # Конфигурация Django +│ │ ├── settings/ # Настройки (base, dev, prod, test) +│ │ ├── celery.py # Конфигурация Celery +│ │ └── urls.py # URL маршруты +│ ├── apps/ # Django приложения +│ │ └── user/ # Приложение пользователей +│ └── manage.py # Управление Django +├── tests/ # Тесты (в корне проекта) +│ ├── apps/user/ # Тесты для user app +│ ├── conftest.py # Конфигурация pytest +│ └── README.md # Документация по тестам +├── docker/ # Docker конфигурации +├── deploy/ # Файлы развертывания +├── pyproject.toml # Конфигурация проекта и инструментов +├── Makefile # Команды для разработки +└── docker-compose.yml # Docker Compose для разработки +``` + +## Быстрый старт (локальная разработка) + +### 1. Установка зависимостей + +```bash +# Установка uv (если не установлен) +curl -LsSf https://astral.sh/uv/install.sh | sh +source $HOME/.cargo/env + +# Создание виртуального окружения с uv +uv venv .venv +source .venv/bin/activate + +# Установка зависимостей через uv +uv pip install -e ".[dev]" + +# Или через Makefile +make install + +# Настройка окружения разработки (pre-commit hooks) +make setup-dev +``` + +### 2. Настройка окружения + +```bash +# Копирование файла окружения +cp .env.example .env + +# Редактирование .env файла по необходимости +nano .env +``` + +### 3. Запуск с Docker Compose (рекомендуется) + +```bash +# Запуск всех сервисов +docker-compose up -d + +# Проверка состояния контейнеров +docker-compose ps + +# Просмотр логов +docker-compose logs -f web +``` + +### 4. Ручная настройка (без Docker) + +#### Запуск баз данных: +```bash +# PostgreSQL +sudo systemctl start postgresql + +# Redis +sudo systemctl start redis +``` + +#### Миграции и запуск: +```bash +cd src + +# Миграции +python manage.py makemigrations +python manage.py migrate + +# Создание суперпользователя +python manage.py createsuperuser + +# Запуск разработческого сервера +python manage.py runserver + +# Запуск Celery worker (в отдельном терминале) +celery -A config worker --loglevel=info + +# Запуск Celery beat (в отдельном терминале) +celery -A config beat --loglevel=info +``` + +## API Endpoints + +Основной префикс: `/api/` + +### Data Processor +- `GET/POST /api/data-sources/` - Источники данных +- `GET/POST /api/data-pipelines/` - ETL пайплайны +- `GET /api/extracted-data/` - Извлеченные данные +- `GET /api/processing-logs/` - Логи обработки + +### Web Scraping +- `GET/POST /api/scraping-jobs/` - Задачи скрапинга +- `GET /api/scraped-items/` - Скрапленные данные +- `GET/POST /api/spider-configurations/` - Конфигурации пауков +- `GET/POST /api/proxy-servers/` - Прокси сервера + +### Аутентификация +- `POST /api/api-token-auth/` - Получение API токена + +## Развертывание на сервере Astra Linux + +### Автоматическое развертывание + +```bash +# Сделать скрипт исполняемым +chmod +x deploy/scripts/deploy.sh + +# Запуск скрипта развертывания +sudo ./deploy/scripts/deploy.sh +``` + +### Ручное развертывание + +1. **Установка системных зависимостей:** +```bash +sudo apt-get update +sudo apt-get install python3.11 python3.11-venv postgresql-15 redis-server nginx +``` + +2. **Настройка проекта:** +```bash +# Клонирование репозитория +git clone ваш_репозиторий.git /var/www/project +cd /var/www/project + +# Создание виртуального окружения +python3.11 -m venv venv +source venv/bin/activate +pip install -r requirements.txt + +# Настройка базы данных +sudo -u postgres psql -c "CREATE DATABASE project_prod;" +sudo -u postgres psql -c "CREATE USER project_user WITH PASSWORD 'secure_password';" +sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE project_prod TO project_user;" +``` + +3. **Конфигурация systemd:** +```bash +sudo cp deploy/systemd/*.service /etc/systemd/system/ +sudo systemctl daemon-reload +sudo systemctl enable gunicorn celery-worker celery-beat +``` + +4. **Настройка Apache:** +```bash +sudo cp deploy/apache/project.conf /etc/apache2/sites-available/project.conf +sudo a2ensite project.conf +sudo a2enmod ssl rewrite headers expires +sudo a2dissite 000-default +sudo systemctl restart apache2 +``` + +## Мониторинг и логирование + +### Логи приложения +```bash +# Логи Django +tail -f logs/django.log + +# Логи Celery +tail -f logs/celery.log + +# Системные логи +journalctl -u gunicorn -f +journalctl -u celery-worker -f +``` + +### Мониторинг Celery +```bash +# Запуск Flower (в отдельном терминале) +celery -A config flower + +# Доступ через браузер: http://localhost:5555 +``` + +## Разработка + +### Запуск тестов +```bash +# Запуск всех тестов +make test + +# Запуск с покрытием +make test-cov + +# Запуск только быстрых тестов +make test-fast + +# Запуск тестов конкретного модуля +make test TARGET=user + +# Линтинг и форматирование +make lint +make format + +# Проверка типов +make type-check + +# Проверка безопасности +make security-check +``` + +### Создание миграций +```bash +# Через Makefile +make migrate + +# Или напрямую +cd src +python manage.py makemigrations +python manage.py migrate + +# Создание суперпользователя +make createsuperuser +``` + +### Работа с задачами Celery +```python +# В коде Python +from apps.data_processor.tasks import process_extracted_data +from apps.scraping.tasks import run_scraping_job + +# Запуск асинхронно +result = process_extracted_data.delay() +print(result.id) # ID задачи +``` + +## Конфигурация инструментов + +Все конфигурации инструментов разработки централизованы в файле `pyproject.toml`: + +- **pytest**: настройки тестирования +- **coverage**: отчеты о покрытии кода +- **ruff**: линтинг и форматирование +- **black**: форматирование кода +- **isort**: сортировка импортов +- **mypy**: проверка типов +- **bandit**: проверка безопасности + +### Полезные команды Make + +```bash +# Качество кода +make lint # Проверка линтерами +make format # Форматирование кода +make type-check # Проверка типов +make security-check # Проверка безопасности +make pre-commit # Запуск всех pre-commit hooks + +# Тестирование +make test # Все тесты +make test-cov # Тесты с покрытием +make test-fast # Только быстрые тесты + +# Разработка +make shell # Django shell +make migrate # Миграции +make clean # Очистка временных файлов +``` + +## Безопасность + +- Все секретные ключи хранятся в переменных окружения +- Используется HTTPS в продакшене +- Настроены заголовки безопасности в Apache +- Регулярное обновление зависимостей + +## Поддержка + +Для вопросов и поддержки обращайтесь к документации Django и используемым библиотекам: + +- [Django Documentation](https://docs.djangoproject.com/) +- [Celery Documentation](https://docs.celeryproject.org/) +- [Scrapy Documentation](https://docs.scrapy.org/) +- [Django REST Framework](https://www.django-rest-framework.org/) + +## Лицензия + +MIT License \ No newline at end of file diff --git a/check_tests.py b/check_tests.py new file mode 100644 index 0000000..73b7139 --- /dev/null +++ b/check_tests.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +"""Проверка тестовой среды""" + +import os +import sys + +import django + +# Настройка Django +sys.path.append(os.path.join(os.getcwd(), "src")) +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.development") +django.setup() + +print("✅ Django настроен успешно!") + +# Проверка импортов +try: + from apps.user.tests.test_views import * + + print("✅ test_views импортирован успешно!") +except Exception as e: + print(f"❌ Ошибка импорта test_views: {e}") + +try: + from apps.user.tests.test_models import * + + print("✅ test_models импортирован успешно!") +except Exception as e: + print(f"❌ Ошибка импорта test_models: {e}") + +try: + from apps.user.tests.test_serializers import * + + print("✅ test_serializers импортирован успешно!") +except Exception as e: + print(f"❌ Ошибка импорта test_serializers: {e}") + +try: + from apps.user.tests.test_services import * + + print("✅ test_services импортирован успешно!") +except Exception as e: + print(f"❌ Ошибка импорта test_services: {e}") + +try: + from apps.user.tests.factories import ProfileFactory, UserFactory + + print("✅ factories импортированы успешно!") + + # Тест создания объектов + user = UserFactory.create_user() + print(f"✅ Создан пользователь: {user.username}") + + profile = ProfileFactory.create_profile() + print(f"✅ Создан профиль: {profile.full_name}") + +except Exception as e: + print(f"❌ Ошибка работы с фабриками: {e}") + +print("\n🏁 Проверка завершена!") diff --git a/deploy/apache/project.conf b/deploy/apache/project.conf new file mode 100644 index 0000000..73d7703 --- /dev/null +++ b/deploy/apache/project.conf @@ -0,0 +1,80 @@ +# Конфигурация Apache 2.4.57 для Django приложения +# Разместить в /etc/apache2/sites-available/project.conf + + + ServerName your-domain.com + ServerAlias www.your-domain.com + + # Редирект на HTTPS + RewriteEngine On + RewriteCond %{HTTPS} off + RewriteRule ^(.*)$ https://%{HTTP_HOST}%{REQUEST_URI} [R=301,L] + + + + ServerName your-domain.com + ServerAlias www.your-domain.com + + # SSL конфигурация + SSLEngine on + SSLCertificateFile /etc/ssl/certs/your-cert.pem + SSLCertificateKeyFile /etc/ssl/private/your-key.pem + SSLCertificateChainFile /etc/ssl/certs/your-chain.pem + + # SSL настройки безопасности + SSLProtocol all -SSLv3 -TLSv1 -TLSv1.1 + SSLCipherSuite ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256 + SSLHonorCipherOrder off + SSLSessionTickets off + + # Основные настройки + DocumentRoot /var/www/project + + # WSGI конфигурация + WSGIDaemonProcess project python-path=/var/www/project/src python-home=/var/www/project/venv + WSGIProcessGroup project + WSGIScriptAlias / /var/www/project/src/config/wsgi.py + WSGIApplicationGroup %{GLOBAL} + + # Права доступа к WSGI файлу + + Require all granted + + + # Статические файлы + Alias /static/ /var/www/project/staticfiles/ + + Require all granted + ExpiresActive On + ExpiresDefault "access plus 1 year" + Header append Cache-Control "public" + + + # Медиа файлы + Alias /media/ /var/www/project/media/ + + Require all granted + ExpiresActive On + ExpiresDefault "access plus 1 year" + Header append Cache-Control "public" + + + # Логи + ErrorLog ${APACHE_LOG_DIR}/project_error.log + CustomLog ${APACHE_LOG_DIR}/project_access.log combined + + # Заголовки безопасности + Header always set X-Frame-Options "SAMEORIGIN" + Header always set X-Content-Type-Options "nosniff" + Header always set X-XSS-Protection "1; mode=block" + Header always set Referrer-Policy "no-referrer-when-downgrade" + + # Ограничение размера загрузки + LimitRequestBody 104857600 + + # Health check endpoint + + SetHandler none + Require all granted + + \ No newline at end of file diff --git a/deploy/monitoring/prometheus.yml b/deploy/monitoring/prometheus.yml new file mode 100644 index 0000000..502c7da --- /dev/null +++ b/deploy/monitoring/prometheus.yml @@ -0,0 +1,36 @@ +# Конфигурация мониторинга Prometheus для Django приложения + +global: + scrape_interval: 15s + evaluation_interval: 15s + +rule_files: + - "alert.rules" + +scrape_configs: + - job_name: 'django-app' + static_configs: + - targets: ['localhost:8000'] + metrics_path: '/metrics' + scrape_interval: 30s + + - job_name: 'celery-exporter' + static_configs: + - targets: ['localhost:9542'] + scrape_interval: 30s + + - job_name: 'postgresql' + static_configs: + - targets: ['localhost:9187'] + scrape_interval: 30s + + - job_name: 'redis' + static_configs: + - targets: ['localhost:9121'] + scrape_interval: 30s + +alerting: + alertmanagers: + - static_configs: + - targets: + - alertmanager:9093 \ No newline at end of file diff --git a/deploy/scripts/deploy.sh b/deploy/scripts/deploy.sh new file mode 100644 index 0000000..9403f3f --- /dev/null +++ b/deploy/scripts/deploy.sh @@ -0,0 +1,127 @@ +#!/bin/bash +# Скрипт развертывания проекта на сервере Astra Linux + +set -e # Прекращать выполнение при ошибках + +PROJECT_NAME="project" +PROJECT_PATH="/var/www/${PROJECT_NAME}" +REPO_URL="ваш_репозиторий.git" +BRANCH="main" + +echo "=== Начало развертывания проекта ===" + +# Обновление системы +echo "Обновление системы..." +apt-get update && apt-get upgrade -y + +# Установка uv +echo "Установка uv package manager..." +curl -LsSf https://astral.sh/uv/install.sh | sh +source $HOME/.cargo/env || true + +# Установка необходимых пакетов +echo "Установка системных зависимостей..." +apt-get install -y \ + python3.11 \ + python3.11-venv \ + python3.11-dev \ + postgresql-15 \ + postgresql-client-15 \ + redis-server \ + apache2 \ + libapache2-mod-wsgi-py3 \ + git \ + build-essential \ + libpq-dev \ + libffi-dev \ + libxml2-dev \ + libxslt1-dev \ + zlib1g-dev + +# Создание пользователя для проекта +echo "Создание пользователя проекта..." +if ! id "www-data" &>/dev/null; then + useradd -r -s /bin/false www-data +fi + +# Создание директорий проекта +echo "Создание структуры директорий..." +mkdir -p ${PROJECT_PATH}/{src,logs,media,staticfiles,venv} +chown -R www-data:www-data ${PROJECT_PATH} + +# Клонирование репозитория +echo "Клонирование репозитория..." +cd ${PROJECT_PATH} +if [ -d ".git" ]; then + git pull origin ${BRANCH} +else + git clone ${REPO_URL} . + git checkout ${BRANCH} +fi + +# Создание виртуального окружения с uv +echo "Создание виртуального окружения с uv..." +uv venv ${PROJECT_PATH}/venv +source ${PROJECT_PATH}/venv/bin/activate + +# Установка зависимостей через uv +echo "Установка Python зависимостей через uv..." +uv pip install --upgrade pip +uv pip install -r requirements.txt +uv pip install -r requirements-dev.txt + +# Настройка переменных окружения +echo "Настройка переменных окружения..." +cp .env.example .env +# Здесь можно автоматически заполнить .env файл или запросить ввод + +# Настройка базы данных +echo "Настройка базы данных..." +sudo -u postgres psql -c "CREATE DATABASE ${PROJECT_NAME}_prod;" || true +sudo -u postgres psql -c "CREATE USER ${PROJECT_NAME}_user WITH PASSWORD '${PROJECT_NAME}_password';" || true +sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE ${PROJECT_NAME}_prod TO ${PROJECT_NAME}_user;" || true + +# Выполнение миграций Django +echo "Выполнение миграций..." +cd ${PROJECT_PATH}/src +python manage.py makemigrations +python manage.py migrate +python manage.py collectstatic --noinput + +# Создание суперпользователя (опционально) +echo "Создание суперпользователя..." +echo "from django.contrib.auth import get_user_model; User = get_user_model(); User.objects.create_superuser('admin', 'admin@example.com', 'adminpass') if not User.objects.filter(username='admin').exists() else None" | python manage.py shell + +# Настройка systemd сервисов +echo "Настройка systemd сервисов..." +cp ../deploy/systemd/*.service /etc/systemd/system/ +systemctl daemon-reload + +# Настройка Apache +echo "Настройка Apache..." +cp ../deploy/apache/project.conf /etc/apache2/sites-available/${PROJECT_NAME}.conf +a2ensite ${PROJECT_NAME}.conf +a2enmod ssl rewrite headers expires +a2dissite 000-default + +# Настройка прав доступа +echo "Настройка прав доступа..." +chown -R www-data:www-data ${PROJECT_PATH} +chmod -R 755 ${PROJECT_PATH} + +# Запуск сервисов +echo "Запуск сервисов..." +systemctl enable gunicorn.service +systemctl enable celery-worker.service +systemctl enable celery-beat.service +systemctl enable apache2 + +systemctl start gunicorn.service +systemctl start celery-worker.service +systemctl start celery-beat.service +systemctl restart apache2 + +echo "=== Развертывание завершено успешно ===" +echo "Проект доступен по адресу: https://ваш-ip-адрес" +echo "Админка Django: https://ваш-ip-адрес/admin/" +echo "API документация: https://ваш-ip-адрес/api/" \ No newline at end of file diff --git a/deploy/systemd/celery-beat.service b/deploy/systemd/celery-beat.service new file mode 100644 index 0000000..f54f84b --- /dev/null +++ b/deploy/systemd/celery-beat.service @@ -0,0 +1,15 @@ +[Unit] +Description=Celery Beat for Django project +After=network.target redis.service postgresql.service + +[Service] +Type=simple +User=www-data +Group=www-data +EnvironmentFile=/var/www/project/.env +WorkingDirectory=/var/www/project/src +ExecStart=/var/www/project/venv/bin/celery -A config beat --loglevel=INFO --scheduler django_celery_beat.schedulers:DatabaseScheduler +Restart=always + +[Install] +WantedBy=multi-user.target \ No newline at end of file diff --git a/deploy/systemd/celery-worker.service b/deploy/systemd/celery-worker.service new file mode 100644 index 0000000..a7cabb0 --- /dev/null +++ b/deploy/systemd/celery-worker.service @@ -0,0 +1,16 @@ +[Unit] +Description=Celery Worker for Django project +After=network.target redis.service postgresql.service + +[Service] +Type=forking +User=www-data +Group=www-data +EnvironmentFile=/var/www/project/.env +WorkingDirectory=/var/www/project/src +ExecStart=/var/www/project/venv/bin/celery -A config worker --loglevel=INFO --pidfile=/run/celery/worker.pid +ExecReload=/bin/kill -HUP $MAINPID +PIDFile=/run/celery/worker.pid + +[Install] +WantedBy=multi-user.target \ No newline at end of file diff --git a/deploy/systemd/gunicorn.service b/deploy/systemd/gunicorn.service new file mode 100644 index 0000000..20b059f --- /dev/null +++ b/deploy/systemd/gunicorn.service @@ -0,0 +1,27 @@ +[Unit] +Description=Gunicorn daemon for Django project +After=network.target + +[Service] +Type=notify +User=www-data +Group=www-data +RuntimeDirectory=gunicorn +WorkingDirectory=/var/www/project/src +ExecStart=/var/www/project/venv/bin/gunicorn config.wsgi:application \ + --bind unix:/run/gunicorn.sock \ + --workers 3 \ + --worker-class gevent \ + --worker-connections 1000 \ + --timeout 30 \ + --keep-alive 2 \ + --max-requests 1000 \ + --max-requests-jitter 100 \ + --preload +ExecReload=/bin/kill -s HUP $MAINPID +KillMode=mixed +TimeoutStopSec=5 +PrivateTmp=true + +[Install] +WantedBy=multi-user.target \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..329db80 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,136 @@ +version: '3.8' + +services: + db: + image: postgres:15.10 + container_name: state_corp_db + restart: unless-stopped + environment: + POSTGRES_DB: ${POSTGRES_DB:-project_dev} + POSTGRES_USER: ${POSTGRES_USER:-postgres} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres} + volumes: + - postgres_data:/var/lib/postgresql/data + - ./docker/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql + ports: + - "5432:5432" + networks: + - state_corp_network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 30s + timeout: 10s + retries: 3 + + redis: + image: redis:7-alpine + container_name: state_corp_redis + restart: unless-stopped + ports: + - "6379:6379" + volumes: + - redis_data:/data + networks: + - state_corp_network + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 30s + timeout: 10s + retries: 3 + + web: + build: + context: . + dockerfile: docker/Dockerfile.web + container_name: state_corp_web + restart: unless-stopped + depends_on: + db: + condition: service_healthy + redis: + condition: service_healthy + environment: + - DEBUG=${DEBUG:-True} + - SECRET_KEY=${SECRET_KEY:-django-insecure-development-key} + - POSTGRES_HOST=db + - POSTGRES_PORT=5432 + - POSTGRES_DB=${POSTGRES_DB:-project_dev} + - POSTGRES_USER=${POSTGRES_USER:-postgres} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-postgres} + - REDIS_URL=redis://redis:6379/0 + - CELERY_BROKER_URL=redis://redis:6379/0 + volumes: + - ./src:/app/src + - ./logs:/app/logs + - ./media:/app/media + - ./staticfiles:/app/staticfiles + ports: + - "8000:8000" + networks: + - state_corp_network + command: > + sh -c "python src/manage.py migrate && + python src/manage.py collectstatic --noinput && + gunicorn config.wsgi:application --bind 0.0.0.0:8000 --workers 3" + + celery_worker: + build: + context: . + dockerfile: docker/Dockerfile.celery + container_name: state_corp_celery_worker + restart: unless-stopped + depends_on: + db: + condition: service_healthy + redis: + condition: service_healthy + environment: + - DEBUG=${DEBUG:-True} + - POSTGRES_HOST=db + - POSTGRES_PORT=5432 + - POSTGRES_DB=${POSTGRES_DB:-project_dev} + - POSTGRES_USER=${POSTGRES_USER:-postgres} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-postgres} + - REDIS_URL=redis://redis:6379/0 + - CELERY_BROKER_URL=redis://redis:6379/0 + volumes: + - ./src:/app/src + - ./logs:/app/logs + networks: + - state_corp_network + command: celery -A config worker --loglevel=info + + celery_beat: + build: + context: . + dockerfile: docker/Dockerfile.celery + container_name: state_corp_celery_beat + restart: unless-stopped + depends_on: + db: + condition: service_healthy + redis: + condition: service_healthy + environment: + - DEBUG=${DEBUG:-True} + - POSTGRES_HOST=db + - POSTGRES_PORT=5432 + - POSTGRES_DB=${POSTGRES_DB:-project_dev} + - POSTGRES_USER=${POSTGRES_USER:-postgres} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-postgres} + - REDIS_URL=redis://redis:6379/0 + - CELERY_BROKER_URL=redis://redis:6379/0 + volumes: + - ./src:/app/src + - ./logs:/app/logs + networks: + - state_corp_network + command: celery -A config beat --loglevel=info --scheduler django_celery_beat.schedulers:DatabaseScheduler + +volumes: + postgres_data: + redis_data: + +networks: + state_corp_network: + driver: bridge \ No newline at end of file diff --git a/docker/Dockerfile.celery b/docker/Dockerfile.celery new file mode 100644 index 0000000..9b5a0bd --- /dev/null +++ b/docker/Dockerfile.celery @@ -0,0 +1,36 @@ +FROM python:3.11.2-slim + +# Установка системных зависимостей +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + gcc \ + libpq-dev \ + libffi-dev \ + libxml2-dev \ + libxslt1-dev \ + zlib1g-dev \ + && rm -rf /var/lib/apt/lists/* + +# Создание рабочей директории +WORKDIR /app + +# Копирование файлов зависимостей +COPY requirements.txt . +COPY requirements-dev.txt . + +# Установка Python зависимостей +RUN pip install --no-cache-dir -r requirements.txt +RUN pip install --no-cache-dir -r requirements-dev.txt + +# Копирование исходного кода +COPY src/ ./src/ + +# Создание необходимых директорий +RUN mkdir -p logs + +# Создание пользователя для запуска приложения +RUN groupadd -r appgroup && useradd -r -g appgroup appuser +RUN chown -R appuser:appgroup /app +USER appuser + +# Команда по умолчанию будет передаваться из docker-compose \ No newline at end of file diff --git a/docker/Dockerfile.web b/docker/Dockerfile.web new file mode 100644 index 0000000..203b11b --- /dev/null +++ b/docker/Dockerfile.web @@ -0,0 +1,41 @@ +FROM python:3.11.2-slim + +# Установка системных зависимостей +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + gcc \ + postgresql-client \ + libpq-dev \ + libffi-dev \ + libxml2-dev \ + libxslt1-dev \ + zlib1g-dev \ + && rm -rf /var/lib/apt/lists/* + +# Создание рабочей директории +WORKDIR /app + +# Копирование файлов зависимостей +COPY requirements.txt . +COPY requirements-dev.txt . + +# Установка Python зависимостей +RUN pip install --no-cache-dir -r requirements.txt +RUN pip install --no-cache-dir -r requirements-dev.txt + +# Копирование исходного кода +COPY src/ ./src/ + +# Создание необходимых директорий +RUN mkdir -p logs staticfiles media + +# Создание пользователя для запуска приложения +RUN groupadd -r appgroup && useradd -r -g appgroup appuser +RUN chown -R appuser:appgroup /app +USER appuser + +# Открытие порта +EXPOSE 8000 + +# Команда по умолчанию +CMD ["gunicorn", "config.wsgi:application", "--bind", "0.0.0.0:8000", "--workers", "3"] \ No newline at end of file diff --git a/docker/postgres/init.sql b/docker/postgres/init.sql new file mode 100644 index 0000000..48e8a44 --- /dev/null +++ b/docker/postgres/init.sql @@ -0,0 +1,18 @@ +-- Инициализационный SQL файл для PostgreSQL +-- Создает необходимые расширения и базовые настройки + +-- Создание расширений +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; +CREATE EXTENSION IF NOT EXISTS "pg_trgm"; + +-- Создание пользователей (если нужно) +-- CREATE USER project_user WITH PASSWORD 'project_password'; +-- GRANT ALL PRIVILEGES ON DATABASE project_dev TO project_user; + +-- Настройки для производительности +ALTER SYSTEM SET shared_buffers = '256MB'; +ALTER SYSTEM SET effective_cache_size = '1GB'; +ALTER SYSTEM SET maintenance_work_mem = '64MB'; +ALTER SYSTEM SET checkpoint_completion_target = 0.9; +ALTER SYSTEM SET wal_buffers = '16MB'; +ALTER SYSTEM SET default_statistics_target = 100; \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..54aeb36 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,472 @@ +[project] +name = "state-corp-backend" +version = "0.1.0" +description = "Backend для системы Отчётность Организаций (State-Corp)" +authors = [ + {name = "Your Name", email = "your.email@example.com"}, +] +requires-python = ">=3.11" +dependencies = [ + # Django Framework + "Django==3.2.25", + "djangorestframework==3.14.0", + # Database + "psycopg2-binary==2.9.9", + # Async tasks + "celery==5.3.6", + "redis==5.0.3", + "django-celery-beat==2.6.0", + "django-celery-results==2.5.1", + # Caching + "django-redis==5.4.0", + # Data processing + "pandas==2.0.3", + "numpy==1.24.4", + "requests==2.31.0", + "beautifulsoup4==4.12.3", + # Web scraping + "scrapy==2.11.2", + "selenium==4.17.2", + # Validation and serialization + "django-filter==23.5", + "django-cors-headers==4.3.1", + # Logging and monitoring + "python-json-logger==2.0.7", + # Utilities + "python-dotenv==1.0.1", + "python-dateutil==2.8.2", + "pytz==2024.1", + # Security + "cryptography==42.0.5", + "djangorestframework-simplejwt>=5.3.1", + "drf-yasg>=1.21.10", + "pillow>=12.1.0", + "python-decouple>=3.8", + "coreapi>=2.3.3", + "django-rest-swagger>=2.2.0", + "model-bakery>=1.17.0", + "faker>=40.1.2", + "factory-boy>=3.3.0", +] + +[project.optional-dependencies] +dev = [ + # WSGI server + "gunicorn==21.2.0", + "gevent==23.9.1", + + # Development + "django-extensions==3.2.3", + "werkzeug==3.0.1", + "django-debug-toolbar==4.2.0", + + # Testing + "pytest==7.4.4", + "pytest-django==4.7.0", + "pytest-cov==4.1.0", + "factory-boy==3.3.0", + "coverage==7.4.0", + + # Linters and formatters + "flake8==6.1.0", + "black==23.12.1", + "isort==5.13.2", + "ruff==0.1.14", + + # Documentation + "sphinx==7.2.6", + "sphinx-rtd-theme==2.0.0", + + # Monitoring + "flower==2.0.1", + + # CLI tools + "click==8.1.7", + "typer==0.9.0", + + # Additional tools + "watchdog==3.0.0", + + # Pre-commit hooks + "pre-commit==3.6.0", +] + +[build-system] +requires = ["setuptools>=45", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools] +packages = ["src"] + +# ================================================================================== +# PYTEST CONFIGURATION +# ================================================================================== +[tool.pytest.ini_options] +DJANGO_SETTINGS_MODULE = "config.settings.test" +python_paths = ["src"] +testpaths = ["tests"] +addopts = [ + "--verbose", + "--tb=short", + "--reuse-db", + "--nomigrations", + "--strict-markers", + "--strict-config", + "--color=yes", +] + +markers = [ + "slow: marks tests as slow (deselect with '-m \"not slow\"')", + "integration: marks tests as integration tests", + "unit: marks tests as unit tests", + "models: marks tests for models", + "views: marks tests for views", + "serializers: marks tests for serializers", + "services: marks tests for services", + "factories: marks tests for factories", +] + +filterwarnings = [ + "ignore::django.utils.deprecation.RemovedInDjango40Warning", + "ignore::django.utils.deprecation.RemovedInDjango41Warning", + "ignore::DeprecationWarning", + "ignore::PendingDeprecationWarning", +] + +norecursedirs = [ + ".git", + ".venv", + "__pycache__", + "*.egg-info", + ".pytest_cache", + "node_modules", + "migrations", +] + +# ================================================================================== +# COVERAGE CONFIGURATION +# ================================================================================== +[tool.coverage.run] +source = ["src"] +omit = [ + "*/migrations/*", + "*/tests/*", + "*/venv/*", + "*/virtualenv/*", + "*/site-packages/*", + "manage.py", + "*/settings/*", + "*/config/wsgi.py", + "*/config/asgi.py", + "*/__pycache__/*", +] +branch = true +relative_files = true + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "def __repr__", + "if self.debug:", + "if settings.DEBUG", + "raise AssertionError", + "raise NotImplementedError", + "if 0:", + "if __name__ == .__main__.:", + "class .*\\bProtocol\\):", + "@(abc\\.)?abstractmethod", +] +show_missing = true +skip_covered = false +precision = 2 + +[tool.coverage.html] +directory = "htmlcov" + +[tool.coverage.xml] +output = "coverage.xml" + +# ================================================================================== +# RUFF CONFIGURATION (Linting and Code Quality) +# ================================================================================== +[tool.ruff] +line-length = 88 +target-version = "py311" + +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".mypy_cache", + ".nox", + ".pants.d", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "venv", + "*/migrations/*", + "*/__pycache__/*", +] + +[tool.ruff.lint] +# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default. +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "C", # mccabe + "B", # flake8-bugbear + "Q", # flake8-quotes + "DJ", # flake8-django + "UP", # pyupgrade + "S", # bandit security + "T20", # flake8-print + "SIM", # flake8-simplify +] + +extend-ignore = [ + "E501", # line too long, handled by formatter + "DJ01", # Missing docstring (too strict for Django) + "DJ001", # null=True on string fields (architectural decision) + "F403", # star imports (common in Django settings) + "F405", # name may be undefined from star imports (Django settings) + "E402", # module level import not at top (Django settings) + "S101", # Use of assert (common in tests) + "T201", # print statements (useful for debugging) +] + +# Allow autofix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[tool.ruff.lint.mccabe] +# Unlike Flake8, default to a complexity level of 10. +max-complexity = 10 + +[tool.ruff.lint.per-file-ignores] +# Ignore `E402` (import violations) in all `__init__.py` files +"__init__.py" = ["E402"] +# Ignore star imports and related errors in settings +"src/config/settings/*" = ["F403", "F405", "E402"] +# Ignore star imports in test runner files +"check_tests.py" = ["F403"] +"run_tests.py" = ["F403"] +"run_tests_simple.py" = ["F403"] +# Ignore complexity issues in tests +"tests/*" = ["C901", "S101"] +"**/test_*" = ["C901", "S101"] +"**/tests.py" = ["C901", "S101"] +# Ignore security warnings in test factories +"tests/**/factories.py" = ["S311"] +"**/factories.py" = ["S311"] + +[tool.ruff.format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" + +# ================================================================================== +# BLACK CONFIGURATION (Code Formatting) +# ================================================================================== +[tool.black] +line-length = 88 +target-version = ['py311'] +include = '\.pyi?$' +extend-exclude = ''' +/( + # directories + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | _build + | buck-out + | build + | dist + | migrations +)/ +''' + +# ================================================================================== +# ISORT CONFIGURATION (Import Sorting) +# ================================================================================== +[tool.isort] +profile = "black" +multi_line_output = 3 +line_length = 88 +include_trailing_comma = true +force_grid_wrap = 0 +use_parentheses = true +ensure_newline_before_comments = true +src_paths = ["src"] +skip = ["migrations"] +known_django = ["django"] +known_third_party = [ + "celery", + "redis", + "requests", + "pandas", + "numpy", + "scrapy", + "selenium", + "beautifulsoup4", + "rest_framework", + "django_filters", + "corsheaders", + "drf_yasg", + "model_bakery", + "factory", + "pytest", +] +sections = [ + "FUTURE", + "STDLIB", + "DJANGO", + "THIRDPARTY", + "FIRSTPARTY", + "LOCALFOLDER", +] + +# ================================================================================== +# MYPY CONFIGURATION (Type Checking) +# ================================================================================== +[tool.mypy] +python_version = "3.11" +check_untyped_defs = true +ignore_missing_imports = true +warn_unused_ignores = true +warn_redundant_casts = true +warn_unused_configs = true +warn_return_any = true +warn_unreachable = true +strict_optional = true +no_implicit_reexport = true +show_error_codes = true +plugins = ["mypy_django_plugin.main"] + +[[tool.mypy.overrides]] +module = "*.migrations.*" +ignore_errors = true + +[[tool.mypy.overrides]] +module = "tests.*" +disallow_untyped_defs = false + +[tool.django-stubs] +django_settings_module = "config.settings.development" + +# ================================================================================== +# BANDIT CONFIGURATION (Security) +# ================================================================================== +[tool.bandit] +exclude_dirs = ["tests", "migrations"] +tests = ["B201", "B301"] +skips = ["B101", "B601"] + +# ================================================================================== +# PYLINT CONFIGURATION +# ================================================================================== +[tool.pylint.messages_control] +disable = [ + "C0114", # missing-module-docstring + "C0115", # missing-class-docstring + "C0116", # missing-function-docstring + "R0903", # too-few-public-methods (Django models) + "R0901", # too-many-ancestors (Django views) + "W0613", # unused-argument (Django views) + "C0103", # invalid-name (Django field names) +] + +[tool.pylint.format] +max-line-length = 88 + +[tool.pylint.design] +max-args = 10 +max-locals = 25 +max-returns = 10 +max-branches = 20 + +# ================================================================================== +# DEPENDENCY GROUPS (Alternative to optional-dependencies) +# ================================================================================== +[dependency-groups] +dev = [ + "gunicorn==21.2.0", + "gevent==23.9.1", + "django-extensions==3.2.3", + "werkzeug==3.0.1", + "django-debug-toolbar==4.2.0", + "pytest==7.4.4", + "pytest-django==4.7.0", + "pytest-cov==4.1.0", + "factory-boy==3.3.0", + "coverage==7.4.0", + "flake8==6.1.0", + "black==23.12.1", + "isort==5.13.2", + "ruff==0.1.14", + "sphinx==7.2.6", + "sphinx-rtd-theme==2.0.0", + "flower==2.0.1", + "click==8.1.7", + "typer==0.9.0", + "watchdog==3.0.0", + "pre-commit==3.6.0", + "mypy==1.8.0", + "django-stubs==4.2.7", + "types-requests==2.31.0.20240125", + "bandit==1.7.5", +] + +test = [ + "pytest==7.4.4", + "pytest-django==4.7.0", + "pytest-cov==4.1.0", + "pytest-xdist==3.5.0", + "pytest-mock==3.12.0", + "factory-boy==3.3.0", + "model-bakery>=1.17.0", + "coverage==7.4.0", +] + +docs = [ + "sphinx==7.2.6", + "sphinx-rtd-theme==2.0.0", + "sphinx-autodoc-typehints==1.25.2", + "myst-parser==2.0.0", +] + +lint = [ + "ruff==0.1.14", + "black==23.12.1", + "isort==5.13.2", + "mypy==1.8.0", + "django-stubs==4.2.7", + "bandit==1.7.5", + "pre-commit==3.6.0", +] diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..0659631 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,435 @@ +# This file was autogenerated by uv via the following command: +# uv pip compile pyproject.toml --extra dev --output-file=requirements-dev.txt +alabaster==0.7.16 + # via sphinx +amqp==5.3.1 + # via kombu +asgiref==3.11.0 + # via + # django + # django-cors-headers +attrs==25.4.0 + # via + # outcome + # service-identity + # trio + # twisted +automat==25.4.16 + # via twisted +babel==2.17.0 + # via sphinx +beautifulsoup4==4.12.3 + # via mostovik-backend (pyproject.toml) +billiard==4.2.4 + # via celery +black==23.12.1 + # via mostovik-backend (pyproject.toml) +celery==5.3.6 + # via + # mostovik-backend (pyproject.toml) + # django-celery-beat + # django-celery-results + # flower +certifi==2026.1.4 + # via + # requests + # selenium +cffi==2.0.0 + # via cryptography +cfgv==3.5.0 + # via pre-commit +charset-normalizer==3.4.4 + # via requests +click==8.1.7 + # via + # mostovik-backend (pyproject.toml) + # black + # celery + # click-didyoumean + # click-plugins + # click-repl + # typer +click-didyoumean==0.3.1 + # via celery +click-plugins==1.1.1.2 + # via celery +click-repl==0.3.0 + # via celery +constantly==23.10.4 + # via twisted +coreapi==2.3.3 + # via + # mostovik-backend (pyproject.toml) + # django-rest-swagger + # openapi-codec +coreschema==0.0.4 + # via coreapi +coverage==7.4.0 + # via + # mostovik-backend (pyproject.toml) + # pytest-cov +cron-descriptor==2.0.6 + # via django-celery-beat +cryptography==42.0.5 + # via + # mostovik-backend (pyproject.toml) + # pyopenssl + # scrapy + # service-identity +cssselect==1.3.0 + # via + # parsel + # scrapy +defusedxml==0.7.1 + # via scrapy +distlib==0.4.0 + # via virtualenv +django==3.2.25 + # via + # mostovik-backend (pyproject.toml) + # django-celery-beat + # django-celery-results + # django-cors-headers + # django-debug-toolbar + # django-extensions + # django-filter + # django-redis + # django-timezone-field + # djangorestframework + # djangorestframework-simplejwt + # drf-yasg + # model-bakery +django-celery-beat==2.6.0 + # via mostovik-backend (pyproject.toml) +django-celery-results==2.5.1 + # via mostovik-backend (pyproject.toml) +django-cors-headers==4.3.1 + # via mostovik-backend (pyproject.toml) +django-debug-toolbar==4.2.0 + # via mostovik-backend (pyproject.toml) +django-extensions==3.2.3 + # via mostovik-backend (pyproject.toml) +django-filter==23.5 + # via mostovik-backend (pyproject.toml) +django-redis==5.4.0 + # via mostovik-backend (pyproject.toml) +django-rest-swagger==2.2.0 + # via mostovik-backend (pyproject.toml) +django-timezone-field==7.2.1 + # via django-celery-beat +djangorestframework==3.14.0 + # via + # mostovik-backend (pyproject.toml) + # django-rest-swagger + # djangorestframework-simplejwt + # drf-yasg +djangorestframework-simplejwt==5.3.1 + # via mostovik-backend (pyproject.toml) +docutils==0.20.1 + # via + # sphinx + # sphinx-rtd-theme +drf-yasg==1.21.10 + # via mostovik-backend (pyproject.toml) +factory-boy==3.3.0 + # via mostovik-backend (pyproject.toml) +faker==40.1.2 + # via factory-boy +filelock==3.20.3 + # via + # tldextract + # virtualenv +flake8==6.1.0 + # via mostovik-backend (pyproject.toml) +flower==2.0.1 + # via mostovik-backend (pyproject.toml) +gevent==23.9.1 + # via mostovik-backend (pyproject.toml) +greenlet==3.3.0 + # via gevent +gunicorn==21.2.0 + # via mostovik-backend (pyproject.toml) +h11==0.16.0 + # via wsproto +humanize==4.15.0 + # via flower +hyperlink==21.0.0 + # via twisted +identify==2.6.16 + # via pre-commit +idna==3.11 + # via + # hyperlink + # requests + # tldextract + # trio +imagesize==1.4.1 + # via sphinx +incremental==24.11.0 + # via twisted +inflection==0.5.1 + # via drf-yasg +iniconfig==2.3.0 + # via pytest +isort==5.13.2 + # via mostovik-backend (pyproject.toml) +itemadapter==0.13.1 + # via + # itemloaders + # scrapy +itemloaders==1.3.2 + # via scrapy +itypes==1.2.0 + # via coreapi +jinja2==3.1.6 + # via + # coreschema + # sphinx +jmespath==1.0.1 + # via + # itemloaders + # parsel +kombu==5.6.2 + # via celery +lxml==6.0.2 + # via + # parsel + # scrapy +markupsafe==3.0.3 + # via + # jinja2 + # werkzeug +mccabe==0.7.0 + # via flake8 +model-bakery==1.17.0 + # via mostovik-backend (pyproject.toml) +mypy-extensions==1.1.0 + # via black +nodeenv==1.10.0 + # via pre-commit +numpy==1.24.4 + # via + # mostovik-backend (pyproject.toml) + # pandas +openapi-codec==1.3.2 + # via django-rest-swagger +outcome==1.3.0.post0 + # via + # trio + # trio-websocket +packaging==25.0 + # via + # black + # drf-yasg + # gunicorn + # incremental + # kombu + # parsel + # pytest + # scrapy + # sphinx +pandas==2.0.3 + # via mostovik-backend (pyproject.toml) +parsel==1.10.0 + # via + # itemloaders + # scrapy +pathspec==1.0.3 + # via black +pillow==12.1.0 + # via mostovik-backend (pyproject.toml) +platformdirs==4.5.1 + # via + # black + # virtualenv +pluggy==1.6.0 + # via pytest +pre-commit==3.6.0 + # via mostovik-backend (pyproject.toml) +prometheus-client==0.24.1 + # via flower +prompt-toolkit==3.0.52 + # via click-repl +protego==0.5.0 + # via scrapy +psycopg2-binary==2.9.9 + # via mostovik-backend (pyproject.toml) +pyasn1==0.6.2 + # via + # pyasn1-modules + # service-identity +pyasn1-modules==0.4.2 + # via service-identity +pycodestyle==2.11.1 + # via flake8 +pycparser==2.23 + # via cffi +pydispatcher==2.0.7 + # via scrapy +pyflakes==3.1.0 + # via flake8 +pygments==2.19.2 + # via sphinx +pyjwt==2.10.1 + # via djangorestframework-simplejwt +pyopenssl==25.1.0 + # via scrapy +pysocks==1.7.1 + # via urllib3 +pytest==7.4.4 + # via + # mostovik-backend (pyproject.toml) + # pytest-cov + # pytest-django +pytest-cov==4.1.0 + # via mostovik-backend (pyproject.toml) +pytest-django==4.7.0 + # via mostovik-backend (pyproject.toml) +python-crontab==3.3.0 + # via django-celery-beat +python-dateutil==2.8.2 + # via + # mostovik-backend (pyproject.toml) + # celery + # pandas +python-decouple==3.8 + # via mostovik-backend (pyproject.toml) +python-dotenv==1.0.1 + # via mostovik-backend (pyproject.toml) +python-json-logger==2.0.7 + # via mostovik-backend (pyproject.toml) +pytz==2024.1 + # via + # mostovik-backend (pyproject.toml) + # django + # djangorestframework + # drf-yasg + # flower + # pandas +pyyaml==6.0.3 + # via + # drf-yasg + # pre-commit +queuelib==1.8.0 + # via scrapy +redis==5.0.3 + # via + # mostovik-backend (pyproject.toml) + # django-redis +requests==2.31.0 + # via + # mostovik-backend (pyproject.toml) + # coreapi + # requests-file + # sphinx + # tldextract +requests-file==3.0.1 + # via tldextract +ruff==0.1.14 + # via mostovik-backend (pyproject.toml) +scrapy==2.11.2 + # via mostovik-backend (pyproject.toml) +selenium==4.17.2 + # via mostovik-backend (pyproject.toml) +service-identity==24.2.0 + # via scrapy +setuptools==80.9.0 + # via scrapy +simplejson==3.20.2 + # via django-rest-swagger +six==1.17.0 + # via python-dateutil +sniffio==1.3.1 + # via trio +snowballstemmer==3.0.1 + # via sphinx +sortedcontainers==2.4.0 + # via trio +soupsieve==2.8.2 + # via beautifulsoup4 +sphinx==7.2.6 + # via + # mostovik-backend (pyproject.toml) + # sphinx-rtd-theme + # sphinxcontrib-jquery +sphinx-rtd-theme==2.0.0 + # via mostovik-backend (pyproject.toml) +sphinxcontrib-applehelp==2.0.0 + # via sphinx +sphinxcontrib-devhelp==2.0.0 + # via sphinx +sphinxcontrib-htmlhelp==2.1.0 + # via sphinx +sphinxcontrib-jquery==4.1 + # via sphinx-rtd-theme +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==2.0.0 + # via sphinx +sphinxcontrib-serializinghtml==2.0.0 + # via sphinx +sqlparse==0.5.5 + # via + # django + # django-debug-toolbar +tldextract==5.3.1 + # via scrapy +tornado==6.5.4 + # via flower +trio==0.32.0 + # via + # selenium + # trio-websocket +trio-websocket==0.12.2 + # via selenium +twisted==25.5.0 + # via scrapy +typer==0.9.0 + # via mostovik-backend (pyproject.toml) +typing-extensions==4.15.0 + # via + # cron-descriptor + # pyopenssl + # selenium + # twisted + # typer +tzdata==2025.3 + # via + # celery + # django-celery-beat + # kombu + # pandas +uritemplate==4.2.0 + # via + # coreapi + # drf-yasg +urllib3==2.6.3 + # via + # requests + # selenium +vine==5.1.0 + # via + # amqp + # celery + # kombu +virtualenv==20.36.1 + # via pre-commit +w3lib==2.3.1 + # via + # parsel + # scrapy +watchdog==3.0.0 + # via mostovik-backend (pyproject.toml) +wcwidth==0.2.14 + # via prompt-toolkit +werkzeug==3.0.1 + # via mostovik-backend (pyproject.toml) +wsproto==1.3.2 + # via trio-websocket +zope-event==6.1 + # via gevent +zope-interface==8.2 + # via + # gevent + # scrapy + # twisted diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..070dfe5 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,295 @@ +# This file was autogenerated by uv via the following command: +# uv pip compile pyproject.toml --output-file=requirements.txt +amqp==5.3.1 + # via kombu +asgiref==3.11.0 + # via + # django + # django-cors-headers +attrs==25.4.0 + # via + # outcome + # service-identity + # trio + # twisted +automat==25.4.16 + # via twisted +beautifulsoup4==4.12.3 + # via mostovik-backend (pyproject.toml) +billiard==4.2.4 + # via celery +celery==5.3.6 + # via + # mostovik-backend (pyproject.toml) + # django-celery-beat + # django-celery-results +certifi==2026.1.4 + # via + # requests + # selenium +cffi==2.0.0 + # via cryptography +charset-normalizer==3.4.4 + # via requests +click==8.1.7 + # via + # celery + # click-didyoumean + # click-plugins + # click-repl +click-didyoumean==0.3.1 + # via celery +click-plugins==1.1.1.2 + # via celery +click-repl==0.3.0 + # via celery +constantly==23.10.4 + # via twisted +coreapi==2.3.3 + # via + # mostovik-backend (pyproject.toml) + # django-rest-swagger + # openapi-codec +coreschema==0.0.4 + # via coreapi +cron-descriptor==2.0.6 + # via django-celery-beat +cryptography==42.0.5 + # via + # mostovik-backend (pyproject.toml) + # pyopenssl + # scrapy + # service-identity +cssselect==1.3.0 + # via + # parsel + # scrapy +defusedxml==0.7.1 + # via scrapy +django==3.2.25 + # via + # mostovik-backend (pyproject.toml) + # django-celery-beat + # django-celery-results + # django-cors-headers + # django-filter + # django-redis + # django-timezone-field + # djangorestframework + # djangorestframework-simplejwt + # drf-yasg + # model-bakery +django-celery-beat==2.6.0 + # via mostovik-backend (pyproject.toml) +django-celery-results==2.5.1 + # via mostovik-backend (pyproject.toml) +django-cors-headers==4.3.1 + # via mostovik-backend (pyproject.toml) +django-filter==23.5 + # via mostovik-backend (pyproject.toml) +django-redis==5.4.0 + # via mostovik-backend (pyproject.toml) +django-rest-swagger==2.2.0 + # via mostovik-backend (pyproject.toml) +django-timezone-field==7.2.1 + # via django-celery-beat +djangorestframework==3.14.0 + # via + # mostovik-backend (pyproject.toml) + # django-rest-swagger + # djangorestframework-simplejwt + # drf-yasg +djangorestframework-simplejwt==5.3.1 + # via mostovik-backend (pyproject.toml) +drf-yasg==1.21.10 + # via mostovik-backend (pyproject.toml) +filelock==3.20.3 + # via tldextract +h11==0.16.0 + # via wsproto +hyperlink==21.0.0 + # via twisted +idna==3.11 + # via + # hyperlink + # requests + # tldextract + # trio +incremental==24.11.0 + # via twisted +inflection==0.5.1 + # via drf-yasg +itemadapter==0.13.1 + # via + # itemloaders + # scrapy +itemloaders==1.3.2 + # via scrapy +itypes==1.2.0 + # via coreapi +jinja2==3.1.6 + # via coreschema +jmespath==1.0.1 + # via + # itemloaders + # parsel +kombu==5.6.2 + # via celery +lxml==6.0.2 + # via + # parsel + # scrapy +markupsafe==3.0.3 + # via jinja2 +model-bakery==1.17.0 + # via mostovik-backend (pyproject.toml) +numpy==1.24.4 + # via + # mostovik-backend (pyproject.toml) + # pandas +openapi-codec==1.3.2 + # via django-rest-swagger +outcome==1.3.0.post0 + # via + # trio + # trio-websocket +packaging==25.0 + # via + # drf-yasg + # incremental + # kombu + # parsel + # scrapy +pandas==2.0.3 + # via mostovik-backend (pyproject.toml) +parsel==1.10.0 + # via + # itemloaders + # scrapy +pillow==12.1.0 + # via mostovik-backend (pyproject.toml) +prompt-toolkit==3.0.52 + # via click-repl +protego==0.5.0 + # via scrapy +psycopg2-binary==2.9.9 + # via mostovik-backend (pyproject.toml) +pyasn1==0.6.2 + # via + # pyasn1-modules + # service-identity +pyasn1-modules==0.4.2 + # via service-identity +pycparser==2.23 + # via cffi +pydispatcher==2.0.7 + # via scrapy +pyjwt==2.10.1 + # via djangorestframework-simplejwt +pyopenssl==25.1.0 + # via scrapy +pysocks==1.7.1 + # via urllib3 +python-crontab==3.3.0 + # via django-celery-beat +python-dateutil==2.8.2 + # via + # mostovik-backend (pyproject.toml) + # celery + # pandas +python-decouple==3.8 + # via mostovik-backend (pyproject.toml) +python-dotenv==1.0.1 + # via mostovik-backend (pyproject.toml) +python-json-logger==2.0.7 + # via mostovik-backend (pyproject.toml) +pytz==2024.1 + # via + # mostovik-backend (pyproject.toml) + # django + # djangorestframework + # drf-yasg + # pandas +pyyaml==6.0.3 + # via drf-yasg +queuelib==1.8.0 + # via scrapy +redis==5.0.3 + # via + # mostovik-backend (pyproject.toml) + # django-redis +requests==2.31.0 + # via + # mostovik-backend (pyproject.toml) + # coreapi + # requests-file + # tldextract +requests-file==3.0.1 + # via tldextract +scrapy==2.11.2 + # via mostovik-backend (pyproject.toml) +selenium==4.17.2 + # via mostovik-backend (pyproject.toml) +service-identity==24.2.0 + # via scrapy +setuptools==80.9.0 + # via scrapy +simplejson==3.20.2 + # via django-rest-swagger +six==1.17.0 + # via python-dateutil +sniffio==1.3.1 + # via trio +sortedcontainers==2.4.0 + # via trio +soupsieve==2.8.2 + # via beautifulsoup4 +sqlparse==0.5.5 + # via django +tldextract==5.3.1 + # via scrapy +trio==0.32.0 + # via + # selenium + # trio-websocket +trio-websocket==0.12.2 + # via selenium +twisted==25.5.0 + # via scrapy +typing-extensions==4.15.0 + # via + # cron-descriptor + # pyopenssl + # selenium + # twisted +tzdata==2025.3 + # via + # celery + # django-celery-beat + # kombu + # pandas +uritemplate==4.2.0 + # via + # coreapi + # drf-yasg +urllib3==2.6.3 + # via + # requests + # selenium +vine==5.1.0 + # via + # amqp + # celery + # kombu +w3lib==2.3.1 + # via + # parsel + # scrapy +wcwidth==0.2.14 + # via prompt-toolkit +wsproto==1.3.2 + # via trio-websocket +zope-interface==8.2 + # via + # scrapy + # twisted diff --git a/run_tests.py b/run_tests.py new file mode 100644 index 0000000..db61014 --- /dev/null +++ b/run_tests.py @@ -0,0 +1,268 @@ +#!/usr/bin/env python +""" +Простой скрипт для запуска тестов, обходящий проблемы с pytest и pdbpp +Использует стандартный Django test runner с улучшенными возможностями +Поддерживает coverage и дополнительные опции +""" + +import os +import sys +from io import StringIO +import argparse + +import django + + +def setup_django(): + """Настройка Django окружения""" + # Монкипатчим проблематичные модули + sys.modules["ipdb"] = type("MockModule", (), {"__getattr__": lambda s, n: None})() + + # Добавляем src в PYTHONPATH + src_path = os.path.join(os.path.dirname(__file__), "src") + if src_path not in sys.path: + sys.path.insert(0, src_path) + + # Устанавливаем настройки Django (принудительно для тестов) + os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.test" + + # Инициализируем Django + django.setup() + + +def run_tests_with_args(test_args, options): + """Запуск тестов с заданными аргументами""" + from django.conf import settings + from django.test.utils import get_runner + + # Получаем test runner + TestRunner = get_runner(settings) + + # Настройки для test runner + runner_kwargs = { + "verbosity": options.verbose, + "interactive": False, + "keepdb": options.keepdb, + "failfast": options.failfast, + } + + # Добавляем parallel если указано + if options.parallel: + runner_kwargs["parallel"] = options.parallel + + test_runner = TestRunner(**runner_kwargs) + + # Запускаем тесты + failures = test_runner.run_tests(test_args) + return failures + + +def parse_arguments(): + """Парсинг аргументов командной строки""" + parser = argparse.ArgumentParser(description="Запуск Django тестов с дополнительными возможностями") + + parser.add_argument( + "targets", + nargs="*", + help="Цели тестирования (по умолчанию: все тесты)", + default=["tests"] + ) + + parser.add_argument( + "--coverage", "--cov", + action="store_true", + help="Запуск тестов с измерением покрытия кода" + ) + + parser.add_argument( + "--fast", + action="store_true", + help="Запуск только быстрых тестов (исключает медленные)" + ) + + parser.add_argument( + "--failfast", + action="store_true", + help="Остановка при первой ошибке" + ) + + parser.add_argument( + "--verbose", "-v", + action="count", + default=2, + help="Уровень детализации вывода" + ) + + parser.add_argument( + "--keepdb", + action="store_true", + help="Сохранить тестовую базу данных" + ) + + parser.add_argument( + "--parallel", + type=int, + metavar="N", + help="Запуск тестов в N параллельных процессах" + ) + + args = parser.parse_args() + + # Преобразуем пути для удобства использования + test_targets = [] + + for target in args.targets: + # Преобразование путей файлов в модули Django + if target.endswith(".py"): + # Убираем расширение .py + target = target[:-3] + + # Заменяем слеши на точки для модульных путей + if "/" in target: + target = target.replace("/", ".") + + # Добавляем префикс tests если его нет + if not target.startswith("tests"): + if target == "user": + # Если просто "user", запускаем все тесты user app + target = "tests.apps.user" + elif target in ["models", "views", "serializers", "services"]: + # Если это простые ключевые слова, добавляем test_ префикс + target = f"tests.apps.user.test_{target}" + elif ( + "test_" in target + or "models" in target + or "views" in target + or "serializers" in target + or "services" in target + ): + # Если это конкретный файл тестов с префиксом или содержит ключевые слова + if not target.startswith("test_"): + target = f"tests.apps.user.test_{target}" + else: + target = f"tests.apps.user.{target}" + else: + # Общий случай + target = f"tests.{target}" + + test_targets.append(target) + + args.targets = test_targets if test_targets else ["tests"] + return args + + +def print_test_info(test_targets, options): + """Вывод информации о запуске тестов""" + print("🧪 Запуск тестов (Django test runner)...") + + if test_targets == ["tests"]: + print("📁 Цель: Все тесты в проекте") + else: + print(f"📁 Цели: {', '.join(test_targets)}") + + print(f"⚙️ Настройки Django: {os.environ.get('DJANGO_SETTINGS_MODULE')}") + print(f"📦 Путь к исходникам: {os.path.join(os.path.dirname(__file__), 'src')}") + + # Дополнительные опции + options_info = [] + if options.coverage: + options_info.append("📊 Измерение покрытия") + if options.fast: + options_info.append("🚀 Только быстрые тесты") + if options.failfast: + options_info.append("❌ Остановка при первой ошибке") + if options.keepdb: + options_info.append("💾 Сохранение тестовой БД") + if options.parallel: + options_info.append(f"⚡ Параллельность: {options.parallel}") + + if options_info: + print("🔧 Опции:", " | ".join(options_info)) + + print("-" * 60) + + +def setup_coverage(): + """Настройка coverage""" + try: + import coverage + cov = coverage.Coverage(config_file="pyproject.toml") + cov.start() + return cov + except ImportError: + print("⚠️ Модуль coverage не установлен. Измерение покрытия недоступно.") + return None + + +def finalize_coverage(cov): + """Завершение измерения покрытия""" + if cov: + cov.stop() + cov.save() + + print("\n📊 Отчет о покрытии кода:") + print("-" * 40) + cov.report() + + # Создание HTML отчета + try: + cov.html_report() + print("\n📄 HTML отчет создан в директории: htmlcov/") + except Exception as e: + print(f"⚠️ Не удалось создать HTML отчет: {e}") + + +def main(): + """Основная функция""" + cov = None + try: + # Парсинг аргументов + options = parse_arguments() + + # Настройка coverage если нужно + if options.coverage: + cov = setup_coverage() + + # Настройка Django + setup_django() + + # Настройка фильтрации тестов + if options.fast: + os.environ["PYTEST_CURRENT_TEST_FILTER"] = "not slow" + + # Вывод информации + print_test_info(options.targets, options) + + # Запуск тестов + failures = run_tests_with_args(options.targets, options) + + # Завершение coverage + if cov: + finalize_coverage(cov) + + # Результат + if failures: + print(f"\n❌ Тесты завершились с ошибками: {failures} неудачных тестов") + sys.exit(1) + else: + print(f"\n✅ Все тесты прошли успешно!") + if cov: + print("📊 Отчет о покрытии сохранен") + sys.exit(0) + + except KeyboardInterrupt: + print("\n❌ Тесты прерваны пользователем") + if cov: + cov.stop() + sys.exit(1) + except Exception as e: + print(f"\n❌ Ошибка при запуске тестов: {e}") + if cov: + cov.stop() + import traceback + traceback.print_exc() + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/scripts/check-migrations.sh b/scripts/check-migrations.sh new file mode 100755 index 0000000..2e2365e --- /dev/null +++ b/scripts/check-migrations.sh @@ -0,0 +1,16 @@ +#!/bin/bash +# Django migration check script for pre-commit + +cd "$(dirname "$0")/../src" || exit 1 + +export PYTHONPATH=. +export DJANGO_SETTINGS_MODULE=config.settings.development + +if uv run python manage.py makemigrations --check --dry-run; then + echo "✓ Django migrations are up to date" + exit 0 +else + echo "⚠ Warning: Django migrations check failed (may be due to configuration issues)" + echo " This doesn't prevent commits, but you should check migrations manually" + exit 0 # Exit with success to not block commits +fi \ No newline at end of file diff --git a/scripts/setup-precommit.sh b/scripts/setup-precommit.sh new file mode 100644 index 0000000..7bb39f3 --- /dev/null +++ b/scripts/setup-precommit.sh @@ -0,0 +1,41 @@ +#!/bin/bash +# Скрипт установки и настройки pre-commit хуков + +echo "🔧 Настройка pre-commit хуков..." + +# Проверка наличия Git +if ! command -v git &> /dev/null; then + echo "❌ Git не найден. Установите Git и повторите попытку." + exit 1 +fi + +# Создание директории для хуков если её нет +HOOKS_DIR=".git/hooks" +if [ ! -d "$HOOKS_DIR" ]; then + mkdir -p "$HOOKS_DIR" + echo "📁 Создана директория для git hooks" +fi + +# Копирование pre-commit хука +if [ -f ".git/hooks/pre-commit" ]; then + echo "🔄 Обновление существующего pre-commit хука" +else + echo "📥 Установка нового pre-commit хука" +fi + +# Делаем хук исполняемым +chmod +x .git/hooks/pre-commit +echo "✅ Pre-commit хук установлен и готов к использованию" + +echo "" +echo "📋 Что проверяет pre-commit хук:" +echo " • Синтаксис Python файлов" +echo " • Стиль кода (flake8)" +echo " • Форматирование (black)" +echo " • Сортировка импортов (isort)" +echo " • Формат YAML файлов" +echo " • Пробелы в конце строк" +echo " • Закрывающие переводы строк" +echo "" +echo "💡 Хук автоматически запускается при каждом коммите" +echo "💡 Для пропуска проверок используйте: git commit --no-verify" \ No newline at end of file diff --git a/src/apps/core/__init__.py b/src/apps/core/__init__.py new file mode 100644 index 0000000..d8ed62a --- /dev/null +++ b/src/apps/core/__init__.py @@ -0,0 +1 @@ +default_app_config = "apps.core.apps.CoreConfig" diff --git a/src/apps/core/apps.py b/src/apps/core/apps.py new file mode 100644 index 0000000..9c3e6ff --- /dev/null +++ b/src/apps/core/apps.py @@ -0,0 +1,7 @@ +from django.apps import AppConfig + + +class CoreConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "apps.core" + verbose_name = "Core" diff --git a/src/apps/core/cache.py b/src/apps/core/cache.py new file mode 100644 index 0000000..dc2cc31 --- /dev/null +++ b/src/apps/core/cache.py @@ -0,0 +1,254 @@ +""" +Декораторы кэширования для сервисного слоя. + +Предоставляет удобные декораторы для кэширования результатов методов. +""" + +import functools +import hashlib +import json +import logging +from collections.abc import Callable +from typing import Any, TypeVar + +from django.core.cache import cache + +logger = logging.getLogger(__name__) + +F = TypeVar("F", bound=Callable[..., Any]) + + +def cache_result( + timeout: int = 300, + key_prefix: str = "", + key_builder: Callable[..., str] | None = None, +) -> Callable[[F], F]: + """ + Декоратор для кэширования результата функции/метода. + + Args: + timeout: Время жизни кэша в секундах (по умолчанию 5 минут) + key_prefix: Префикс для ключа кэша + key_builder: Функция для построения ключа кэша + + Пример использования: + class UserService: + @classmethod + @cache_result(timeout=600, key_prefix="user") + def get_user_stats(cls, user_id: int) -> dict: + # Тяжёлый запрос + return heavy_calculation(user_id) + + Ключ кэша формируется как: + {key_prefix}:{function_name}:{hash_of_args} + """ + + def decorator(func: F) -> F: + @functools.wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> Any: + # Формируем ключ кэша + if key_builder: + cache_key = key_builder(*args, **kwargs) + else: + cache_key = _build_cache_key(func, key_prefix, args, kwargs) + + # Пробуем получить из кэша + cached_value = cache.get(cache_key) + if cached_value is not None: + logger.debug(f"Cache hit: {cache_key}") + return cached_value + + # Вычисляем и кэшируем + logger.debug(f"Cache miss: {cache_key}") + result = func(*args, **kwargs) + cache.set(cache_key, result, timeout) + return result + + # Добавляем метод для инвалидации кэша + wrapper.invalidate = lambda *a, **kw: _invalidate_cache( # type: ignore + func, key_prefix, key_builder, a, kw + ) + return wrapper # type: ignore + + return decorator + + +def cache_method( + timeout: int = 300, + key_prefix: str = "", +) -> Callable[[F], F]: + """ + Декоратор для кэширования результата метода класса. + + Автоматически исключает self/cls из ключа кэша. + + Пример использования: + class ArticleService: + @classmethod + @cache_method(timeout=300, key_prefix="articles") + def get_popular_articles(cls, limit: int = 10) -> list: + return Article.objects.order_by('-views')[:limit] + """ + + def decorator(func: F) -> F: + @functools.wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> Any: + # Пропускаем self/cls (первый аргумент) + cache_args = args[1:] if args else args + + cache_key = _build_cache_key(func, key_prefix, cache_args, kwargs) + + cached_value = cache.get(cache_key) + if cached_value is not None: + logger.debug(f"Cache hit: {cache_key}") + return cached_value + + logger.debug(f"Cache miss: {cache_key}") + result = func(*args, **kwargs) + cache.set(cache_key, result, timeout) + return result + + return wrapper # type: ignore + + return decorator + + +def invalidate_cache(key_pattern: str) -> None: + """ + Инвалидирует кэш по паттерну ключа. + + Args: + key_pattern: Паттерн ключа для удаления + + Пример: + invalidate_cache("user:get_user_stats:*") + """ + # Для простых backend'ов (locmem, db) + # Для Redis можно использовать SCAN + DELETE + try: + cache.delete_pattern(key_pattern) # type: ignore + except AttributeError: + # Fallback для backend'ов без delete_pattern + cache.delete(key_pattern.replace("*", "")) + logger.warning( + f"Cache backend не поддерживает delete_pattern, " + f"удалён только точный ключ: {key_pattern}" + ) + + +def invalidate_prefix(prefix: str) -> None: + """ + Инвалидирует весь кэш с указанным префиксом. + + Args: + prefix: Префикс ключей для удаления + + Пример: + invalidate_prefix("user") # Удалит все ключи user:* + """ + invalidate_cache(f"{prefix}:*") + + +def _build_cache_key( + func: Callable[..., Any], + prefix: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], +) -> str: + """Формирует ключ кэша из функции и аргументов.""" + func_name = f"{func.__module__}.{func.__qualname__}" + + # Сериализуем аргументы + try: + args_str = json.dumps(args, sort_keys=True, default=str) + kwargs_str = json.dumps(kwargs, sort_keys=True, default=str) + except (TypeError, ValueError): + args_str = str(args) + kwargs_str = str(kwargs) + + # Хэшируем для компактности (usedforsecurity=False - не криптографическое использование) + args_hash = hashlib.md5( # noqa: S324 + f"{args_str}:{kwargs_str}".encode(), + usedforsecurity=False, + ).hexdigest()[:12] + + if prefix: + return f"{prefix}:{func_name}:{args_hash}" + return f"{func_name}:{args_hash}" + + +def _invalidate_cache( + func: Callable[..., Any], + prefix: str, + key_builder: Callable[..., str] | None, + args: tuple[Any, ...], + kwargs: dict[str, Any], +) -> None: + """Инвалидирует кэш для конкретного вызова.""" + if key_builder: + cache_key = key_builder(*args, **kwargs) + else: + cache_key = _build_cache_key(func, prefix, args, kwargs) + cache.delete(cache_key) + logger.debug(f"Cache invalidated: {cache_key}") + + +class CacheManager: + """ + Менеджер кэша для групповых операций. + + Пример использования: + cache_manager = CacheManager("articles") + + # Кэшировать + cache_manager.set("popular", data, timeout=300) + + # Получить + data = cache_manager.get("popular") + + # Очистить весь префикс + cache_manager.clear() + """ + + def __init__(self, prefix: str): + self.prefix = prefix + + def _make_key(self, key: str) -> str: + """Формирует полный ключ с префиксом.""" + return f"{self.prefix}:{key}" + + def get(self, key: str, default: Any = None) -> Any: + """Получает значение из кэша.""" + return cache.get(self._make_key(key), default) + + def set(self, key: str, value: Any, timeout: int = 300) -> None: + """Сохраняет значение в кэш.""" + cache.set(self._make_key(key), value, timeout) + + def delete(self, key: str) -> None: + """Удаляет значение из кэша.""" + cache.delete(self._make_key(key)) + + def clear(self) -> None: + """Очищает весь кэш с данным префиксом.""" + invalidate_prefix(self.prefix) + + def get_or_set( + self, + key: str, + default_func: Callable[[], Any], + timeout: int = 300, + ) -> Any: + """ + Получает значение из кэша или вычисляет и сохраняет. + + Args: + key: Ключ кэша + default_func: Функция для вычисления значения + timeout: Время жизни кэша + """ + value = self.get(key) + if value is None: + value = default_func() + self.set(key, value, timeout) + return value diff --git a/src/apps/core/exception_handler.py b/src/apps/core/exception_handler.py new file mode 100644 index 0000000..00cda96 --- /dev/null +++ b/src/apps/core/exception_handler.py @@ -0,0 +1,152 @@ +""" +Custom exception handler for DRF. + +Converts all exceptions to a unified API response format. +""" + +import logging +from typing import Any + +from apps.core.exceptions import BaseAPIException +from apps.core.middleware import get_request_id +from apps.core.response import api_error_response +from django.core.exceptions import PermissionDenied +from django.http import Http404 +from rest_framework import status +from rest_framework.exceptions import APIException +from rest_framework.response import Response +from rest_framework.views import exception_handler as drf_exception_handler + +logger = logging.getLogger(__name__) + + +def custom_exception_handler( + exc: Exception, context: dict[str, Any] +) -> Response | None: + """ + Custom exception handler that wraps all errors in a unified format. + + Response format: + { + "success": false, + "data": null, + "errors": [ + { + "code": "error_code", + "message": "Human readable message", + "details": {...} // optional + } + ], + "meta": { + "request_id": "uuid" + } + } + """ + request_id = get_request_id() + + # Handle our custom exceptions + if isinstance(exc, BaseAPIException): + logger.warning( + f"[{request_id}] API Exception: {exc.code} - {exc.message}", + extra={ + "request_id": request_id, + "error_code": exc.code, + "error_details": exc.details, + }, + ) + return api_error_response( + errors=[exc.to_dict()], + status_code=exc.status_code, + request_id=request_id, + ) + + # Handle Django's Http404 + if isinstance(exc, Http404): + return api_error_response( + errors=[{"code": "not_found", "message": str(exc) or "Resource not found"}], + status_code=status.HTTP_404_NOT_FOUND, + request_id=request_id, + ) + + # Handle Django's PermissionDenied + if isinstance(exc, PermissionDenied): + return api_error_response( + errors=[ + { + "code": "permission_denied", + "message": str(exc) or "Permission denied", + } + ], + status_code=status.HTTP_403_FORBIDDEN, + request_id=request_id, + ) + + # Handle DRF exceptions + if isinstance(exc, APIException): + # Get the default DRF response first + response = drf_exception_handler(exc, context) + if response is not None: + errors = _parse_drf_errors(exc, response.data) + return api_error_response( + errors=errors, + status_code=response.status_code, + request_id=request_id, + ) + + # Unhandled exceptions - log and return 500 + logger.exception( + f"[{request_id}] Unhandled exception", + extra={"request_id": request_id, "exception_type": type(exc).__name__}, + ) + + return api_error_response( + errors=[ + { + "code": "internal_error", + "message": "An unexpected error occurred", + } + ], + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request_id=request_id, + ) + + +def _parse_drf_errors(exc: APIException, data: Any) -> list[dict[str, Any]]: + """Parse DRF exception data into our error format.""" + errors = [] + + # Get error code from exception + code = getattr(exc, "default_code", "error") + + if isinstance(data, dict): + # Validation errors with field details + if "detail" in data: + errors.append( + { + "code": code, + "message": str(data["detail"]), + } + ) + else: + # Field-level validation errors + field_errors = {} + for field, messages in data.items(): + if isinstance(messages, list): + field_errors[field] = [str(m) for m in messages] + else: + field_errors[field] = [str(messages)] + + errors.append( + { + "code": "validation_error", + "message": "Validation failed", + "details": {"fields": field_errors}, + } + ) + elif isinstance(data, list): + for item in data: + errors.append({"code": code, "message": str(item)}) + else: + errors.append({"code": code, "message": str(data)}) + + return errors diff --git a/src/apps/core/exceptions.py b/src/apps/core/exceptions.py new file mode 100644 index 0000000..7999f56 --- /dev/null +++ b/src/apps/core/exceptions.py @@ -0,0 +1,169 @@ +""" +Core exceptions for the application. + +Provides a hierarchy of business logic exceptions that are automatically +converted to appropriate API responses by the exception handler. +""" + +from typing import Any + + +class BaseAPIException(Exception): + """ + Base exception for all API-related errors. + + Attributes: + message: Human-readable error message + code: Machine-readable error code (e.g., 'validation_error') + status_code: HTTP status code + details: Additional error details (optional) + """ + + message: str = "An error occurred" + code: str = "error" + status_code: int = 400 + details: dict[str, Any] | None = None + + def __init__( + self, + message: str | None = None, + code: str | None = None, + details: dict[str, Any] | None = None, + ): + self.message = message or self.message + self.code = code or self.code + self.details = details + super().__init__(self.message) + + def to_dict(self) -> dict[str, Any]: + """Convert exception to dictionary for API response.""" + result = { + "code": self.code, + "message": self.message, + } + if self.details: + result["details"] = self.details + return result + + +# ============================================================================ +# Client Errors (4xx) +# ============================================================================ + + +class ValidationError(BaseAPIException): + """Raised when input data fails validation.""" + + message = "Validation error" + code = "validation_error" + status_code = 400 + + +class BadRequestError(BaseAPIException): + """Raised when request is malformed or invalid.""" + + message = "Bad request" + code = "bad_request" + status_code = 400 + + +class AuthenticationError(BaseAPIException): + """Raised when authentication fails.""" + + message = "Authentication failed" + code = "authentication_error" + status_code = 401 + + +class PermissionDeniedError(BaseAPIException): + """Raised when user lacks required permissions.""" + + message = "Permission denied" + code = "permission_denied" + status_code = 403 + + +class NotFoundError(BaseAPIException): + """Raised when requested resource is not found.""" + + message = "Resource not found" + code = "not_found" + status_code = 404 + + +class ConflictError(BaseAPIException): + """Raised when action conflicts with current state.""" + + message = "Conflict with current state" + code = "conflict" + status_code = 409 + + +class RateLimitError(BaseAPIException): + """Raised when rate limit is exceeded.""" + + message = "Rate limit exceeded" + code = "rate_limit_exceeded" + status_code = 429 + + +# ============================================================================ +# Server Errors (5xx) +# ============================================================================ + + +class InternalError(BaseAPIException): + """Raised for unexpected internal errors.""" + + message = "Internal server error" + code = "internal_error" + status_code = 500 + + +class ServiceUnavailableError(BaseAPIException): + """Raised when a dependent service is unavailable.""" + + message = "Service temporarily unavailable" + code = "service_unavailable" + status_code = 503 + + +# ============================================================================ +# Business Logic Errors +# ============================================================================ + + +class BusinessLogicError(BaseAPIException): + """ + Base class for business logic errors. + + Use this for domain-specific errors that should return 400/422. + """ + + message = "Business logic error" + code = "business_error" + status_code = 400 + + +class InvalidStateError(BusinessLogicError): + """Raised when entity is in invalid state for requested operation.""" + + message = "Invalid state for this operation" + code = "invalid_state" + status_code = 400 + + +class DuplicateError(BusinessLogicError): + """Raised when attempting to create a duplicate resource.""" + + message = "Resource already exists" + code = "duplicate" + status_code = 409 + + +class QuotaExceededError(BusinessLogicError): + """Raised when a resource quota is exceeded.""" + + message = "Quota exceeded" + code = "quota_exceeded" + status_code = 400 diff --git a/src/apps/core/filters.py b/src/apps/core/filters.py new file mode 100644 index 0000000..aea1434 --- /dev/null +++ b/src/apps/core/filters.py @@ -0,0 +1,121 @@ +""" +Настройка фильтрации для API. + +Предоставляет базовые классы и утилиты для фильтрации данных. +""" + +from typing import Any + +from django.db.models import QuerySet +from django_filters import rest_framework as filters +from rest_framework.filters import OrderingFilter, SearchFilter + + +class BaseFilterSet(filters.FilterSet): + """ + Базовый класс для фильтров с общими полями. + + Автоматически добавляет фильтрацию по датам создания/обновления, + если модель имеет соответствующие поля. + + Пример использования: + class ArticleFilter(BaseFilterSet): + title = filters.CharFilter(lookup_expr='icontains') + + class Meta: + model = Article + fields = ['title', 'status', 'author'] + """ + + created_at_after = filters.DateTimeFilter( + field_name="created_at", + lookup_expr="gte", + label="Создано после", + ) + created_at_before = filters.DateTimeFilter( + field_name="created_at", + lookup_expr="lte", + label="Создано до", + ) + updated_at_after = filters.DateTimeFilter( + field_name="updated_at", + lookup_expr="gte", + label="Обновлено после", + ) + updated_at_before = filters.DateTimeFilter( + field_name="updated_at", + lookup_expr="lte", + label="Обновлено до", + ) + + +class StandardSearchFilter(SearchFilter): + """ + Расширенный фильтр поиска с настраиваемыми параметрами. + + Пример использования: + class MyViewSet(viewsets.ModelViewSet): + filter_backends = [StandardSearchFilter] + search_fields = ['title', 'description', 'author__username'] + """ + + search_param = "search" + search_title = "Поиск" + search_description = "Поиск по текстовым полям" + + +class StandardOrderingFilter(OrderingFilter): + """ + Расширенный фильтр сортировки. + + Пример использования: + class MyViewSet(viewsets.ModelViewSet): + filter_backends = [StandardOrderingFilter] + ordering_fields = ['created_at', 'title', 'price'] + ordering = ['-created_at'] # Сортировка по умолчанию + """ + + ordering_param = "ordering" + ordering_title = "Сортировка" + ordering_description = "Поле для сортировки (префикс '-' для убывания)" + + +def get_filter_backends() -> list[type]: + """ + Возвращает стандартный набор filter backends. + + Используется для быстрой настройки ViewSet: + class MyViewSet(viewsets.ModelViewSet): + filter_backends = get_filter_backends() + """ + return [ + filters.DjangoFilterBackend, + StandardSearchFilter, + StandardOrderingFilter, + ] + + +class FilterMixin: + """ + Миксин для добавления стандартной фильтрации к ViewSet. + + Автоматически настраивает filter_backends и базовые параметры. + + Пример использования: + class MyViewSet(FilterMixin, viewsets.ModelViewSet): + filterset_class = MyFilterSet + search_fields = ['title', 'description'] + ordering_fields = ['created_at', 'title'] + """ + + filter_backends = [ + filters.DjangoFilterBackend, + StandardSearchFilter, + StandardOrderingFilter, + ] + ordering = ["-created_at"] # Сортировка по умолчанию + + def get_queryset(self) -> QuerySet[Any]: + """Возвращает queryset с применёнными фильтрами.""" + queryset = super().get_queryset() # type: ignore + return queryset diff --git a/src/apps/core/logging.py b/src/apps/core/logging.py new file mode 100644 index 0000000..c8c8341 --- /dev/null +++ b/src/apps/core/logging.py @@ -0,0 +1,287 @@ +""" +Настройка структурированного логирования. + +Предоставляет JSON-форматтер и утилиты для production логов. +""" + +import json +import logging +import traceback +from datetime import UTC, datetime +from typing import Any + +from apps.core.middleware import get_request_id + + +class JSONFormatter(logging.Formatter): + """ + Форматтер логов в JSON формате. + + Формирует структурированные логи для удобного парсинга + в системах мониторинга (ELK, Grafana Loki, etc.). + + Пример вывода: + { + "timestamp": "2024-01-15T10:30:45.123456Z", + "level": "INFO", + "logger": "apps.user.services", + "message": "User created", + "request_id": "abc-123", + "user_id": 42, + "extra": {"email": "user@example.com"} + } + """ + + def format(self, record: logging.LogRecord) -> str: + """Форматирует запись лога в JSON.""" + log_data: dict[str, Any] = { + "timestamp": datetime.now(UTC).isoformat(), + "level": record.levelname, + "logger": record.name, + "message": record.getMessage(), + } + + # Добавляем request_id если доступен + request_id = get_request_id() + if request_id: + log_data["request_id"] = request_id + + # Добавляем информацию о месте вызова + log_data["location"] = { + "file": record.filename, + "line": record.lineno, + "function": record.funcName, + } + + # Добавляем extra данные + extra_fields = {} + for key, value in record.__dict__.items(): + if key not in { + "name", + "msg", + "args", + "created", + "filename", + "funcName", + "levelname", + "levelno", + "lineno", + "module", + "msecs", + "pathname", + "process", + "processName", + "relativeCreated", + "stack_info", + "exc_info", + "exc_text", + "thread", + "threadName", + "message", + "taskName", + }: + extra_fields[key] = value + + if extra_fields: + log_data["extra"] = extra_fields + + # Добавляем информацию об исключении + if record.exc_info: + log_data["exception"] = { + "type": record.exc_info[0].__name__ if record.exc_info[0] else None, + "message": str(record.exc_info[1]) if record.exc_info[1] else None, + "traceback": traceback.format_exception(*record.exc_info), + } + + return json.dumps(log_data, ensure_ascii=False, default=str) + + +class ContextLogger: + """ + Логгер с автоматическим добавлением контекста. + + Пример использования: + logger = ContextLogger(__name__) + logger.set_context(user_id=42, action="login") + logger.info("User logged in") # Автоматически добавит user_id и action + """ + + def __init__(self, name: str): + self._logger = logging.getLogger(name) + self._context: dict[str, Any] = {} + + def set_context(self, **kwargs: Any) -> None: + """Устанавливает контекст для всех последующих логов.""" + self._context.update(kwargs) + + def clear_context(self) -> None: + """Очищает контекст.""" + self._context.clear() + + def _log( + self, + level: int, + message: str, + *args: Any, + exc_info: bool = False, + **kwargs: Any, + ) -> None: + """Логирует сообщение с контекстом.""" + extra = {**self._context, **kwargs.pop("extra", {})} + self._logger.log( + level, message, *args, extra=extra, exc_info=exc_info, **kwargs + ) + + def debug(self, message: str, *args: Any, **kwargs: Any) -> None: + """Логирует DEBUG сообщение.""" + self._log(logging.DEBUG, message, *args, **kwargs) + + def info(self, message: str, *args: Any, **kwargs: Any) -> None: + """Логирует INFO сообщение.""" + self._log(logging.INFO, message, *args, **kwargs) + + def warning(self, message: str, *args: Any, **kwargs: Any) -> None: + """Логирует WARNING сообщение.""" + self._log(logging.WARNING, message, *args, **kwargs) + + def error( + self, message: str, *args: Any, exc_info: bool = True, **kwargs: Any + ) -> None: + """Логирует ERROR сообщение.""" + self._log(logging.ERROR, message, *args, exc_info=exc_info, **kwargs) + + def exception(self, message: str, *args: Any, **kwargs: Any) -> None: + """Логирует исключение.""" + self._log(logging.ERROR, message, *args, exc_info=True, **kwargs) + + +def get_json_logging_config( + log_level: str = "INFO", + log_file: str | None = None, +) -> dict[str, Any]: + """ + Возвращает конфигурацию логирования для production. + + Args: + log_level: Уровень логирования + log_file: Путь к файлу логов (опционально) + + Пример использования в settings.py: + from apps.core.logging import get_json_logging_config + + LOGGING = get_json_logging_config( + log_level="INFO", + log_file="/var/log/app/app.log", + ) + """ + handlers = { + "console": { + "class": "logging.StreamHandler", + "formatter": "json", + }, + } + + root_handlers = ["console"] + + if log_file: + handlers["file"] = { + "class": "logging.handlers.RotatingFileHandler", + "filename": log_file, + "maxBytes": 10 * 1024 * 1024, # 10 MB + "backupCount": 5, + "formatter": "json", + } + root_handlers.append("file") + + return { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "json": { + "()": "apps.core.logging.JSONFormatter", + }, + "standard": { + "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s", + }, + }, + "handlers": handlers, + "root": { + "handlers": root_handlers, + "level": log_level, + }, + "loggers": { + "django": { + "handlers": root_handlers, + "level": "WARNING", + "propagate": False, + }, + "django.request": { + "handlers": root_handlers, + "level": "WARNING", + "propagate": False, + }, + "celery": { + "handlers": root_handlers, + "level": "INFO", + "propagate": False, + }, + "apps": { + "handlers": root_handlers, + "level": log_level, + "propagate": False, + }, + }, + } + + +def log_request( + logger: logging.Logger, + request: Any, + response: Any | None = None, + duration_ms: float | None = None, +) -> None: + """ + Логирует HTTP запрос/ответ. + + Пример использования: + from apps.core.logging import log_request + + def my_middleware(get_response): + def middleware(request): + start = time.time() + response = get_response(request) + duration = (time.time() - start) * 1000 + log_request(logger, request, response, duration) + return response + return middleware + """ + extra: dict[str, Any] = { + "method": request.method, + "path": request.path, + "user_id": getattr(request.user, "id", None) + if hasattr(request, "user") + else None, + } + + if response: + extra["status_code"] = response.status_code + + if duration_ms: + extra["duration_ms"] = round(duration_ms, 2) + + request_id = get_request_id() + if request_id: + extra["request_id"] = request_id + + message = f"{request.method} {request.path}" + if response: + message += f" -> {response.status_code}" + if duration_ms: + message += f" ({duration_ms:.0f}ms)" + + if response and response.status_code >= 500: + logger.error(message, extra=extra) + elif response and response.status_code >= 400: + logger.warning(message, extra=extra) + else: + logger.info(message, extra=extra) diff --git a/src/apps/core/management/__init__.py b/src/apps/core/management/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/apps/core/management/commands/__init__.py b/src/apps/core/management/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/apps/core/management/commands/base.py b/src/apps/core/management/commands/base.py new file mode 100644 index 0000000..ed775a4 --- /dev/null +++ b/src/apps/core/management/commands/base.py @@ -0,0 +1,252 @@ +""" +Базовый класс для management commands. + +Предоставляет: +- Структурированное логирование +- Отображение прогресса +- Обработку ошибок +- Измерение времени выполнения +- Dry-run режим +""" + +import logging +import time +from abc import abstractmethod +from collections.abc import Generator +from contextlib import contextmanager +from typing import Any + +from django.core.management.base import BaseCommand, CommandError +from django.db import transaction + +logger = logging.getLogger(__name__) + + +class BaseAppCommand(BaseCommand): + """ + Базовый класс для management commands проекта. + + Возможности: + - Автоматическое логирование начала и завершения + - Измерение времени выполнения + - Поддержка dry-run режима + - Прогресс-бар для итераций + - Транзакционное выполнение + - Обработка ошибок с правильными кодами выхода + + Использование: + class Command(BaseAppCommand): + help = 'Описание команды' + + def add_arguments(self, parser): + super().add_arguments(parser) # Добавляет --dry-run + parser.add_argument('--my-arg', type=str) + + def execute_command(self, *args, **options): + # Основная логика команды + items = MyModel.objects.all() + + for item in self.progress_iter(items, desc="Обработка"): + self.process_item(item) + + return "Обработано успешно" + """ + + # Переопределяемые атрибуты + requires_migrations_checks = True + requires_system_checks = "__all__" + use_transaction = False # Обернуть в транзакцию + + def add_arguments(self, parser) -> None: + """Добавление базовых аргументов.""" + parser.add_argument( + "--dry-run", + action="store_true", + default=False, + help="Режим тестового запуска без изменений в базе данных", + ) + parser.add_argument( + "--silent", + action="store_true", + default=False, + help="Минимальный вывод (только ошибки)", + ) + + def handle(self, *args: Any, **options: Any) -> str | None: + """Основной обработчик команды.""" + self.dry_run = options.get("dry_run", False) + self.silent = options.get("silent", False) + self.verbosity = options.get("verbosity", 1) + + command_name = self.__class__.__module__.split(".")[-1] + + # Логирование старта + self.log_info(f"Запуск команды: {command_name}") + if self.dry_run: + self.log_warning("Режим dry-run: изменения НЕ будут сохранены") + + start_time = time.time() + + try: + if self.use_transaction: + with transaction.atomic(): + result = self._execute_with_rollback(*args, **options) + else: + result = self.execute_command(*args, **options) + + # Логирование успеха + elapsed = time.time() - start_time + self.log_success(f"Команда завершена за {elapsed:.2f}с") + + return result + + except CommandError: + raise + except Exception as e: + elapsed = time.time() - start_time + self.log_error(f"Ошибка после {elapsed:.2f}с: {e}") + logger.exception("Command failed", extra={"command": command_name}) + raise CommandError(str(e)) from e + + def _execute_with_rollback(self, *args: Any, **options: Any) -> str | None: + """Выполнение с откатом в dry-run режиме.""" + result = self.execute_command(*args, **options) + + if self.dry_run: + # Откатываем транзакцию в dry-run + transaction.set_rollback(True) + self.log_warning("Dry-run: транзакция откачена") + + return result + + @abstractmethod + def execute_command(self, *args: Any, **options: Any) -> str | None: + """ + Основная логика команды. Переопределяется в наследниках. + + Returns: + Строка с результатом или None + """ + raise NotImplementedError("Метод execute_command должен быть реализован") + + # ==================== Методы вывода ==================== + + def log_info(self, message: str) -> None: + """Информационное сообщение.""" + if not self.silent: + self.stdout.write(message) + logger.info(message) + + def log_success(self, message: str) -> None: + """Сообщение об успехе (зелёное).""" + if not self.silent: + self.stdout.write(self.style.SUCCESS(message)) + logger.info(message) + + def log_warning(self, message: str) -> None: + """Предупреждение (жёлтое).""" + if not self.silent: + self.stdout.write(self.style.WARNING(message)) + logger.warning(message) + + def log_error(self, message: str) -> None: + """Ошибка (красное).""" + self.stderr.write(self.style.ERROR(message)) + logger.error(message) + + def log_debug(self, message: str) -> None: + """Отладочное сообщение (только при verbosity >= 2).""" + if self.verbosity >= 2: + self.stdout.write(self.style.HTTP_INFO(message)) + logger.debug(message) + + # ==================== Прогресс ==================== + + def progress_iter( + self, + iterable, + desc: str = "Обработка", + total: int | None = None, + ) -> Generator: + """ + Итератор с отображением прогресса. + + Args: + iterable: Итерируемый объект + desc: Описание операции + total: Общее количество (если известно) + + Yields: + Элементы итератора + + Использование: + for item in self.progress_iter(items, "Обработка записей"): + process(item) + """ + if total is None: + try: + total = len(iterable) + except TypeError: + total = None + + processed = 0 + last_percent = -1 + + for item in iterable: + yield item + processed += 1 + + if total and not self.silent: + percent = int(processed * 100 / total) + if percent != last_percent and percent % 10 == 0: + self.stdout.write(f"{desc}: {percent}% ({processed}/{total})") + last_percent = percent + + if not self.silent: + self.log_info(f"{desc}: завершено ({processed} элементов)") + + @contextmanager + def timed_operation(self, operation_name: str) -> Generator: + """ + Контекстный менеджер для измерения времени операции. + + Использование: + with self.timed_operation("Загрузка данных"): + load_data() + """ + start = time.time() + self.log_debug(f"Начало: {operation_name}") + + try: + yield + finally: + elapsed = time.time() - start + self.log_debug(f"Завершено: {operation_name} ({elapsed:.2f}с)") + + # ==================== Утилиты ==================== + + def confirm(self, message: str) -> bool: + """ + Запрос подтверждения у пользователя. + + Args: + message: Текст вопроса + + Returns: + True если пользователь подтвердил + """ + if self.dry_run: + self.log_warning(f"[Dry-run] Пропуск подтверждения: {message}") + return True + + self.stdout.write(f"\n{message} [y/N]: ", ending="") + response = input().strip().lower() + return response in ("y", "yes", "да", "д") + + def abort(self, message: str) -> None: + """Прерывание команды с сообщением.""" + raise CommandError(message) + + def check_dry_run(self) -> bool: + """Проверка режима dry-run (для условного выполнения).""" + return self.dry_run diff --git a/src/apps/core/middleware.py b/src/apps/core/middleware.py new file mode 100644 index 0000000..8effb6e --- /dev/null +++ b/src/apps/core/middleware.py @@ -0,0 +1,131 @@ +""" +Core middleware components. + +Provides Request ID tracking and other cross-cutting concerns. +""" + +import logging +import threading +import uuid + +from django.utils.deprecation import MiddlewareMixin + +logger = logging.getLogger(__name__) + +# Thread-local storage for request context +_request_context = threading.local() + + +def get_request_id() -> str | None: + """Get current request ID from thread-local storage.""" + return getattr(_request_context, "request_id", None) + + +def get_current_request(): + """Get current request from thread-local storage.""" + return getattr(_request_context, "request", None) + + +class RequestIDMiddleware(MiddlewareMixin): + """ + Middleware that generates or extracts a unique request ID for each request. + + The request ID is: + 1. Extracted from X-Request-ID header if present + 2. Generated as UUID4 if not present + 3. Added to response headers + 4. Available via get_request_id() for logging + + Usage in logging: + from apps.core.middleware import get_request_id + logger.info(f"[{get_request_id()}] Processing request") + """ + + REQUEST_ID_HEADER = "X-Request-ID" + + def process_request(self, request): + """Extract or generate request ID and store in thread-local.""" + request_id = request.headers.get(self.REQUEST_ID_HEADER) + + if not request_id: + request_id = str(uuid.uuid4()) + + request.request_id = request_id + _request_context.request_id = request_id + _request_context.request = request + + return None + + def process_response(self, request, response): + """Add request ID to response headers.""" + request_id = getattr(request, "request_id", None) + if request_id: + response[self.REQUEST_ID_HEADER] = request_id + + # Clean up thread-local storage + _request_context.request_id = None + _request_context.request = None + + return response + + def process_exception(self, request, exception): + """Log exception with request ID.""" + request_id = getattr(request, "request_id", "unknown") + logger.error(f"[{request_id}] Unhandled exception: {exception}") + return None + + +class RequestLoggingMiddleware(MiddlewareMixin): + """ + Middleware for logging request/response details. + + Logs: + - Request method, path, and request ID + - Response status code and timing (if available) + """ + + def process_request(self, request): + """Log incoming request.""" + import time + + request._start_time = time.time() + request_id = getattr(request, "request_id", "N/A") + + logger.info( + f"[{request_id}] {request.method} {request.path} - Started", + extra={ + "request_id": request_id, + "method": request.method, + "path": request.path, + "user": getattr(request.user, "id", None) + if hasattr(request, "user") + else None, + }, + ) + return None + + def process_response(self, request, response): + """Log response details.""" + import time + + duration = None + if hasattr(request, "_start_time"): + duration = time.time() - request._start_time + + request_id = getattr(request, "request_id", "N/A") + + logger.info( + f"[{request_id}] {request.method} {request.path} - " + f"{response.status_code} ({duration:.3f}s)" + if duration + else f"[{request_id}] {request.method} {request.path} - " + f"{response.status_code}", + extra={ + "request_id": request_id, + "method": request.method, + "path": request.path, + "status_code": response.status_code, + "duration": duration, + }, + ) + return response diff --git a/src/apps/core/migrations/0001_background_job.py b/src/apps/core/migrations/0001_background_job.py new file mode 100644 index 0000000..42a6386 --- /dev/null +++ b/src/apps/core/migrations/0001_background_job.py @@ -0,0 +1,52 @@ +# Generated by Django 3.2.25 on 2026-01-21 10:19 + +from django.db import migrations, models +import uuid + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='BackgroundJob', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, db_index=True, help_text='Дата и время создания записи', verbose_name='создано')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Дата и время последнего обновления', verbose_name='обновлено')), + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('task_id', models.CharField(db_index=True, help_text='Идентификатор задачи в Celery', max_length=255, unique=True, verbose_name='ID задачи Celery')), + ('task_name', models.CharField(db_index=True, help_text='Полное имя задачи (например, apps.myapp.tasks.process_data)', max_length=255, verbose_name='имя задачи')), + ('status', models.CharField(choices=[('pending', 'Ожидает'), ('started', 'Выполняется'), ('success', 'Успешно'), ('failure', 'Ошибка'), ('revoked', 'Отменена'), ('retry', 'Повтор')], db_index=True, default='pending', max_length=20, verbose_name='статус')), + ('progress', models.PositiveSmallIntegerField(default=0, help_text='Прогресс выполнения в процентах (0-100)', verbose_name='прогресс')), + ('progress_message', models.CharField(blank=True, default='', max_length=500, verbose_name='сообщение о прогрессе')), + ('result', models.JSONField(blank=True, help_text='Результат выполнения задачи (JSON)', null=True, verbose_name='результат')), + ('error', models.TextField(blank=True, default='', help_text='Текст ошибки при неудачном выполнении', verbose_name='ошибка')), + ('traceback', models.TextField(blank=True, default='', help_text='Полный traceback ошибки', verbose_name='traceback')), + ('started_at', models.DateTimeField(blank=True, null=True, verbose_name='время начала')), + ('completed_at', models.DateTimeField(blank=True, null=True, verbose_name='время завершения')), + ('user_id', models.PositiveIntegerField(blank=True, db_index=True, help_text='ID пользователя, запустившего задачу', null=True, verbose_name='ID пользователя')), + ('meta', models.JSONField(blank=True, default=dict, help_text='Дополнительные данные задачи', verbose_name='метаданные')), + ], + options={ + 'verbose_name': 'фоновая задача', + 'verbose_name_plural': 'фоновые задачи', + 'ordering': ['-created_at'], + }, + ), + migrations.AddIndex( + model_name='backgroundjob', + index=models.Index(fields=['status', 'created_at'], name='core_backgr_status_e66a68_idx'), + ), + migrations.AddIndex( + model_name='backgroundjob', + index=models.Index(fields=['user_id', 'status'], name='core_backgr_user_id_d81ce2_idx'), + ), + migrations.AddIndex( + model_name='backgroundjob', + index=models.Index(fields=['task_name', 'status'], name='core_backgr_task_na_fb8e14_idx'), + ), + ] diff --git a/src/apps/core/migrations/__init__.py b/src/apps/core/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/apps/core/mixins.py b/src/apps/core/mixins.py new file mode 100644 index 0000000..68bc228 --- /dev/null +++ b/src/apps/core/mixins.py @@ -0,0 +1,391 @@ +""" +Миксины для моделей Django. + +Предоставляют переиспользуемые поля и поведение для моделей: +- TimestampMixin: created_at, updated_at +- UUIDPrimaryKeyMixin: UUID вместо auto-increment ID +- SoftDeleteMixin: мягкое удаление (is_deleted + deleted_at) +- AuditMixin: created_by, updated_by (кто создал/изменил) +- OrderableMixin: поле order для сортировки +""" + +import uuid +from typing import TYPE_CHECKING + +from django.conf import settings +from django.db import models +from django.utils import timezone +from django.utils.translation import gettext_lazy as _ + +if TYPE_CHECKING: + from django.db.models import QuerySet + + +class TimestampMixin(models.Model): + """ + Миксин для автоматических временных меток. + + Поля: + created_at: Дата и время создания (автоматически) + updated_at: Дата и время последнего обновления (автоматически) + + Использование: + class MyModel(TimestampMixin, models.Model): + name = models.CharField(max_length=100) + + class Meta: + ordering = ['-created_at'] + """ + + created_at = models.DateTimeField( + _("создано"), + auto_now_add=True, + db_index=True, + help_text=_("Дата и время создания записи"), + ) + updated_at = models.DateTimeField( + _("обновлено"), + auto_now=True, + help_text=_("Дата и время последнего обновления"), + ) + + class Meta: + abstract = True + + +class UUIDPrimaryKeyMixin(models.Model): + """ + Миксин для использования UUID в качестве первичного ключа. + + Преимущества: + - Глобальная уникальность + - Безопасность (нельзя угадать ID) + - Возможность генерации на клиенте + + Использование: + class MyModel(UUIDPrimaryKeyMixin, models.Model): + name = models.CharField(max_length=100) + + Примечание: + При использовании этого миксина НЕ нужно определять поле id. + """ + + id = models.UUIDField( + primary_key=True, + default=uuid.uuid4, + editable=False, + verbose_name=_("ID"), + ) + + class Meta: + abstract = True + + +class SoftDeleteQuerySet(models.QuerySet): + """QuerySet с поддержкой мягкого удаления.""" + + def delete(self) -> tuple[int, dict[str, int]]: + """Мягкое удаление всех объектов в QuerySet.""" + return self.update(is_deleted=True, deleted_at=timezone.now()), {} + + def hard_delete(self) -> tuple[int, dict[str, int]]: + """Полное удаление из базы данных.""" + return super().delete() + + def alive(self) -> "QuerySet": + """Только активные (не удалённые) записи.""" + return self.filter(is_deleted=False) + + def dead(self) -> "QuerySet": + """Только удалённые записи.""" + return self.filter(is_deleted=True) + + +class SoftDeleteManager(models.Manager): + """ + Менеджер с поддержкой мягкого удаления. + + По умолчанию возвращает только активные записи. + Для доступа ко всем записям используйте `all_objects`. + """ + + def get_queryset(self) -> SoftDeleteQuerySet: + """Возвращает только активные записи.""" + return SoftDeleteQuerySet(self.model, using=self._db).alive() + + def all_with_deleted(self) -> SoftDeleteQuerySet: + """Возвращает все записи, включая удалённые.""" + return SoftDeleteQuerySet(self.model, using=self._db) + + def deleted_only(self) -> SoftDeleteQuerySet: + """Возвращает только удалённые записи.""" + return SoftDeleteQuerySet(self.model, using=self._db).dead() + + +class AllObjectsManager(models.Manager): + """Менеджер для доступа ко всем записям (включая удалённые).""" + + def get_queryset(self) -> SoftDeleteQuerySet: + return SoftDeleteQuerySet(self.model, using=self._db) + + +class SoftDeleteMixin(models.Model): + """ + Миксин для мягкого удаления записей. + + Вместо физического удаления записи помечаются как удалённые. + Это позволяет: + - Восстанавливать удалённые данные + - Вести историю удалений + - Сохранять ссылочную целостность + + Поля: + is_deleted: Флаг удаления + deleted_at: Дата и время удаления + + Менеджеры: + objects: Только активные записи (по умолчанию) + all_objects: Все записи, включая удалённые + + Использование: + class MyModel(SoftDeleteMixin, models.Model): + name = models.CharField(max_length=100) + + # Мягкое удаление + obj.delete() # is_deleted=True, deleted_at=now() + + # Восстановление + obj.restore() + + # Полное удаление + obj.hard_delete() + + # Получить все записи (включая удалённые) + MyModel.all_objects.all() + + # Получить только удалённые + MyModel.all_objects.deleted_only() + """ + + is_deleted = models.BooleanField( + _("удалено"), + default=False, + db_index=True, + help_text=_("Помечена ли запись как удалённая"), + ) + deleted_at = models.DateTimeField( + _("дата удаления"), + null=True, + blank=True, + help_text=_("Дата и время удаления записи"), + ) + + # Менеджеры + objects = SoftDeleteManager() + all_objects = AllObjectsManager() + + class Meta: + abstract = True + + def delete(self, using=None, keep_parents=False) -> tuple[int, dict[str, int]]: + """Мягкое удаление записи.""" + self.is_deleted = True + self.deleted_at = timezone.now() + self.save(update_fields=["is_deleted", "deleted_at"]) + return 1, {self._meta.label: 1} + + def hard_delete(self, using=None, keep_parents=False) -> tuple[int, dict[str, int]]: + """Полное удаление записи из базы данных.""" + return super().delete(using=using, keep_parents=keep_parents) + + def restore(self) -> None: + """Восстановление удалённой записи.""" + self.is_deleted = False + self.deleted_at = None + self.save(update_fields=["is_deleted", "deleted_at"]) + + @property + def is_active(self) -> bool: + """Проверка, активна ли запись (не удалена).""" + return not self.is_deleted + + +class AuditMixin(models.Model): + """ + Миксин для отслеживания автора создания и изменения. + + Поля: + created_by: Пользователь, создавший запись + updated_by: Пользователь, последний изменивший запись + + Использование: + class MyModel(AuditMixin, TimestampMixin, models.Model): + name = models.CharField(max_length=100) + + # В сервисе или view: + obj = MyModel.objects.create(name="Test", created_by=request.user) + obj.updated_by = request.user + obj.save() + + Примечание: + Поля created_by и updated_by нужно заполнять вручную + (в сервисе или через middleware). + """ + + created_by = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="%(app_label)s_%(class)s_created", + verbose_name=_("создано пользователем"), + help_text=_("Пользователь, создавший запись"), + ) + updated_by = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="%(app_label)s_%(class)s_updated", + verbose_name=_("обновлено пользователем"), + help_text=_("Пользователь, последний изменивший запись"), + ) + + class Meta: + abstract = True + + +class OrderableMixin(models.Model): + """ + Миксин для упорядочивания записей. + + Поля: + order: Порядковый номер для сортировки + + Использование: + class MenuItem(OrderableMixin, models.Model): + name = models.CharField(max_length=100) + + class Meta: + ordering = ['order'] + + # Перемещение элемента + item.move_up() + item.move_down() + item.move_to(5) + """ + + order = models.PositiveIntegerField( + _("порядок"), + default=0, + db_index=True, + help_text=_("Порядковый номер для сортировки"), + ) + + class Meta: + abstract = True + ordering = ["order"] + + def move_up(self) -> None: + """Переместить элемент вверх (уменьшить order).""" + if self.order > 0: + self.order -= 1 + self.save(update_fields=["order"]) + + def move_down(self) -> None: + """Переместить элемент вниз (увеличить order).""" + self.order += 1 + self.save(update_fields=["order"]) + + def move_to(self, position: int) -> None: + """Переместить элемент на указанную позицию.""" + if position >= 0: + self.order = position + self.save(update_fields=["order"]) + + +class SlugMixin(models.Model): + """ + Миксин для URL-friendly идентификатора. + + Поля: + slug: Уникальный slug для URL + + Использование: + class Article(SlugMixin, models.Model): + title = models.CharField(max_length=200) + + def save(self, *args, **kwargs): + if not self.slug: + self.slug = slugify(self.title) + super().save(*args, **kwargs) + """ + + slug = models.SlugField( + _("slug"), + max_length=255, + unique=True, + db_index=True, + help_text=_("URL-friendly идентификатор"), + ) + + class Meta: + abstract = True + + +class StatusMixin(models.Model): + """ + Миксин для статусов с типичными значениями. + + Использование: + class Order(StatusMixin, models.Model): + total = models.DecimalField(...) + + order = Order.objects.create(total=100) + order.activate() + order.deactivate() + """ + + class Status(models.TextChoices): + DRAFT = "draft", _("Черновик") + ACTIVE = "active", _("Активно") + INACTIVE = "inactive", _("Неактивно") + ARCHIVED = "archived", _("В архиве") + + status = models.CharField( + _("статус"), + max_length=20, + choices=Status.choices, + default=Status.DRAFT, + db_index=True, + ) + + class Meta: + abstract = True + + def activate(self) -> None: + """Активировать запись.""" + self.status = self.Status.ACTIVE + self.save(update_fields=["status"]) + + def deactivate(self) -> None: + """Деактивировать запись.""" + self.status = self.Status.INACTIVE + self.save(update_fields=["status"]) + + def archive(self) -> None: + """Отправить в архив.""" + self.status = self.Status.ARCHIVED + self.save(update_fields=["status"]) + + @property + def is_draft(self) -> bool: + return self.status == self.Status.DRAFT + + @property + def is_active_status(self) -> bool: + return self.status == self.Status.ACTIVE + + @property + def is_archived(self) -> bool: + return self.status == self.Status.ARCHIVED diff --git a/src/apps/core/models.py b/src/apps/core/models.py new file mode 100644 index 0000000..9b4cb7c --- /dev/null +++ b/src/apps/core/models.py @@ -0,0 +1,237 @@ +""" +Background Job Tracking - отслеживание статуса Celery задач. + +Предоставляет: +- Модель для хранения информации о задачах +- Сервис для управления задачами +- API эндпоинты для получения статуса +""" + +import uuid +from typing import Any + +from apps.core.mixins import TimestampMixin +from django.db import models +from django.utils import timezone +from django.utils.translation import gettext_lazy as _ + + +class JobStatus(models.TextChoices): + """Статусы фоновых задач.""" + + PENDING = "pending", _("Ожидает") + STARTED = "started", _("Выполняется") + SUCCESS = "success", _("Успешно") + FAILURE = "failure", _("Ошибка") + REVOKED = "revoked", _("Отменена") + RETRY = "retry", _("Повтор") + + +class BackgroundJob(TimestampMixin, models.Model): + """ + Модель для отслеживания фоновых задач Celery. + + Позволяет: + - Отслеживать статус выполнения + - Хранить результат или ошибку + - Отображать прогресс выполнения + - Связывать задачу с пользователем + + Использование в таске: + @shared_task(bind=True, base=TrackedTask) + def my_task(self, data): + job = BackgroundJob.objects.get(task_id=self.request.id) + job.update_progress(50, "Обработка...") + # ... логика + job.complete(result={"count": 100}) + """ + + id = models.UUIDField( + primary_key=True, + default=uuid.uuid4, + editable=False, + ) + task_id = models.CharField( + _("ID задачи Celery"), + max_length=255, + unique=True, + db_index=True, + help_text=_("Идентификатор задачи в Celery"), + ) + task_name = models.CharField( + _("имя задачи"), + max_length=255, + db_index=True, + help_text=_("Полное имя задачи (например, apps.myapp.tasks.process_data)"), + ) + status = models.CharField( + _("статус"), + max_length=20, + choices=JobStatus.choices, + default=JobStatus.PENDING, + db_index=True, + ) + progress = models.PositiveSmallIntegerField( + _("прогресс"), + default=0, + help_text=_("Прогресс выполнения в процентах (0-100)"), + ) + progress_message = models.CharField( + _("сообщение о прогрессе"), + max_length=500, + blank=True, + default="", + ) + result = models.JSONField( + _("результат"), + null=True, + blank=True, + help_text=_("Результат выполнения задачи (JSON)"), + ) + error = models.TextField( + _("ошибка"), + blank=True, + default="", + help_text=_("Текст ошибки при неудачном выполнении"), + ) + traceback = models.TextField( + _("traceback"), + blank=True, + default="", + help_text=_("Полный traceback ошибки"), + ) + started_at = models.DateTimeField( + _("время начала"), + null=True, + blank=True, + ) + completed_at = models.DateTimeField( + _("время завершения"), + null=True, + blank=True, + ) + # Опционально: связь с пользователем + user_id = models.PositiveIntegerField( + _("ID пользователя"), + null=True, + blank=True, + db_index=True, + help_text=_("ID пользователя, запустившего задачу"), + ) + # Метаданные + meta = models.JSONField( + _("метаданные"), + default=dict, + blank=True, + help_text=_("Дополнительные данные задачи"), + ) + + class Meta: + verbose_name = _("фоновая задача") + verbose_name_plural = _("фоновые задачи") + ordering = ["-created_at"] + indexes = [ + models.Index(fields=["status", "created_at"]), + models.Index(fields=["user_id", "status"]), + models.Index(fields=["task_name", "status"]), + ] + + def __str__(self) -> str: + return f"{self.task_name} ({self.status})" + + # ==================== Методы обновления статуса ==================== + + def mark_started(self) -> None: + """Отметить задачу как начатую.""" + self.status = JobStatus.STARTED + self.started_at = timezone.now() + self.save(update_fields=["status", "started_at", "updated_at"]) + + def update_progress(self, progress: int, message: str = "") -> None: + """ + Обновить прогресс выполнения. + + Args: + progress: Процент выполнения (0-100) + message: Описание текущего этапа + """ + self.progress = min(max(progress, 0), 100) + self.progress_message = message + self.save(update_fields=["progress", "progress_message", "updated_at"]) + + def complete(self, result: Any = None) -> None: + """ + Отметить задачу как успешно завершённую. + + Args: + result: Результат выполнения (сериализуемый в JSON) + """ + self.status = JobStatus.SUCCESS + self.progress = 100 + self.result = result + self.completed_at = timezone.now() + self.save( + update_fields=[ + "status", + "progress", + "result", + "completed_at", + "updated_at", + ] + ) + + def fail(self, error: str, traceback_str: str = "") -> None: + """ + Отметить задачу как завершённую с ошибкой. + + Args: + error: Текст ошибки + traceback_str: Полный traceback + """ + self.status = JobStatus.FAILURE + self.error = str(error) + self.traceback = traceback_str + self.completed_at = timezone.now() + self.save( + update_fields=[ + "status", + "error", + "traceback", + "completed_at", + "updated_at", + ] + ) + + def revoke(self) -> None: + """Отметить задачу как отменённую.""" + self.status = JobStatus.REVOKED + self.completed_at = timezone.now() + self.save(update_fields=["status", "completed_at", "updated_at"]) + + def mark_retry(self) -> None: + """Отметить, что задача будет повторена.""" + self.status = JobStatus.RETRY + self.save(update_fields=["status", "updated_at"]) + + # ==================== Свойства ==================== + + @property + def is_finished(self) -> bool: + """Проверка, завершена ли задача.""" + return self.status in ( + JobStatus.SUCCESS, + JobStatus.FAILURE, + JobStatus.REVOKED, + ) + + @property + def is_successful(self) -> bool: + """Проверка успешного завершения.""" + return self.status == JobStatus.SUCCESS + + @property + def duration(self) -> float | None: + """Длительность выполнения в секундах.""" + if self.started_at and self.completed_at: + return (self.completed_at - self.started_at).total_seconds() + return None diff --git a/src/apps/core/openapi.py b/src/apps/core/openapi.py new file mode 100644 index 0000000..59055b1 --- /dev/null +++ b/src/apps/core/openapi.py @@ -0,0 +1,374 @@ +""" +Утилиты для документирования API (OpenAPI/Swagger). + +Предоставляет декораторы и утилиты для улучшения +автоматически генерируемой документации. +""" + +from typing import Any + +from drf_yasg import openapi +from drf_yasg.utils import swagger_auto_schema + + +def api_docs( + *, + summary: str, + description: str | None = None, + request_body: Any = None, + responses: dict[int, Any] | None = None, + tags: list[str] | None = None, + operation_id: str | None = None, + deprecated: bool = False, + security: list[dict[str, list[str]]] | None = None, + manual_parameters: list[openapi.Parameter] | None = None, +): + """ + Декоратор для документирования API эндпоинтов. + + Упрощённая обёртка над swagger_auto_schema с поддержкой + типовых паттернов документирования. + + Args: + summary: Краткое описание эндпоинта (отображается в списке) + description: Подробное описание (отображается при раскрытии) + request_body: Схема тела запроса (serializer или openapi.Schema) + responses: Словарь возможных ответов {status_code: schema} + tags: Теги для группировки в документации + operation_id: Уникальный идентификатор операции + deprecated: Пометить как устаревший + security: Требования безопасности + manual_parameters: Дополнительные параметры запроса + + Пример использования: + class UserView(APIView): + @api_docs( + summary="Получить текущего пользователя", + description="Возвращает данные аутентифицированного пользователя", + responses={ + 200: UserSerializer, + 401: "Не авторизован", + }, + tags=["Пользователи"], + ) + def get(self, request): + ... + """ + # Преобразуем упрощённые responses в формат openapi + formatted_responses = {} + if responses: + for code, schema in responses.items(): + if isinstance(schema, str): + # Простое текстовое описание + formatted_responses[code] = openapi.Response(description=schema) + elif isinstance(schema, type): + # Serializer class + formatted_responses[code] = openapi.Response( + description=_get_status_description(code), + schema=schema, + ) + elif isinstance(schema, openapi.Response): + formatted_responses[code] = schema + else: + formatted_responses[code] = schema + + return swagger_auto_schema( + operation_summary=summary, + operation_description=description, + request_body=request_body, + responses=formatted_responses or None, + tags=tags, + operation_id=operation_id, + deprecated=deprecated, + security=security, + manual_parameters=manual_parameters, + ) + + +def _get_status_description(status_code: int) -> str: + """Возвращает описание HTTP статуса на русском.""" + descriptions = { + 200: "Успешный запрос", + 201: "Ресурс создан", + 204: "Успешно, без содержимого", + 400: "Некорректный запрос", + 401: "Не авторизован", + 403: "Доступ запрещён", + 404: "Ресурс не найден", + 409: "Конфликт", + 422: "Ошибка валидации", + 429: "Слишком много запросов", + 500: "Внутренняя ошибка сервера", + } + return descriptions.get(status_code, f"HTTP {status_code}") + + +# Предопределённые схемы ответов +class CommonResponses: + """ + Общие схемы ответов для документации. + + Пример использования: + @api_docs( + summary="Удалить ресурс", + responses={ + 204: CommonResponses.NO_CONTENT, + 404: CommonResponses.NOT_FOUND, + }, + ) + def delete(self, request, pk): + ... + """ + + SUCCESS = openapi.Response( + description="Успешный запрос", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=True), + "data": openapi.Schema(type=openapi.TYPE_OBJECT), + }, + ), + ) + + CREATED = openapi.Response( + description="Ресурс успешно создан", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=True), + "data": openapi.Schema(type=openapi.TYPE_OBJECT), + }, + ), + ) + + NO_CONTENT = openapi.Response(description="Успешно, без содержимого") + + BAD_REQUEST = openapi.Response( + description="Некорректный запрос", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=False), + "errors": openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "code": openapi.Schema(type=openapi.TYPE_STRING), + "message": openapi.Schema(type=openapi.TYPE_STRING), + }, + ), + ), + }, + ), + ) + + UNAUTHORIZED = openapi.Response( + description="Требуется аутентификация", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "detail": openapi.Schema( + type=openapi.TYPE_STRING, + default="Учётные данные не предоставлены.", + ), + }, + ), + ) + + FORBIDDEN = openapi.Response( + description="Доступ запрещён", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "detail": openapi.Schema( + type=openapi.TYPE_STRING, + default="У вас нет прав для выполнения этого действия.", + ), + }, + ), + ) + + NOT_FOUND = openapi.Response( + description="Ресурс не найден", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=False), + "errors": openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "code": openapi.Schema( + type=openapi.TYPE_STRING, default="not_found" + ), + "message": openapi.Schema( + type=openapi.TYPE_STRING, default="Ресурс не найден" + ), + }, + ), + ), + }, + ), + ) + + VALIDATION_ERROR = openapi.Response( + description="Ошибка валидации", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=False), + "errors": openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "code": openapi.Schema(type=openapi.TYPE_STRING), + "message": openapi.Schema(type=openapi.TYPE_STRING), + "details": openapi.Schema(type=openapi.TYPE_OBJECT), + }, + ), + ), + }, + ), + ) + + RATE_LIMITED = openapi.Response( + description="Превышен лимит запросов", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "detail": openapi.Schema( + type=openapi.TYPE_STRING, + default="Превышен лимит запросов. Повторите позже.", + ), + }, + ), + ) + + SERVER_ERROR = openapi.Response( + description="Внутренняя ошибка сервера", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=False), + "errors": openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "code": openapi.Schema( + type=openapi.TYPE_STRING, default="internal_error" + ), + "message": openapi.Schema( + type=openapi.TYPE_STRING, + default="Внутренняя ошибка сервера", + ), + }, + ), + ), + }, + ), + ) + + +# Параметры запроса +class CommonParameters: + """ + Общие параметры для документации API. + + Пример использования: + @api_docs( + summary="Список ресурсов", + manual_parameters=[ + CommonParameters.PAGE, + CommonParameters.PAGE_SIZE, + CommonParameters.SEARCH, + ], + ) + def get(self, request): + ... + """ + + PAGE = openapi.Parameter( + name="page", + in_=openapi.IN_QUERY, + type=openapi.TYPE_INTEGER, + description="Номер страницы", + default=1, + ) + + PAGE_SIZE = openapi.Parameter( + name="page_size", + in_=openapi.IN_QUERY, + type=openapi.TYPE_INTEGER, + description="Количество элементов на странице", + default=20, + ) + + SEARCH = openapi.Parameter( + name="search", + in_=openapi.IN_QUERY, + type=openapi.TYPE_STRING, + description="Поисковый запрос", + ) + + ORDERING = openapi.Parameter( + name="ordering", + in_=openapi.IN_QUERY, + type=openapi.TYPE_STRING, + description="Поле сортировки (префикс '-' для убывания)", + ) + + ID = openapi.Parameter( + name="id", + in_=openapi.IN_PATH, + type=openapi.TYPE_INTEGER, + description="ID ресурса", + required=True, + ) + + +def paginated_response(serializer_class: type) -> openapi.Response: + """ + Создаёт схему пагинированного ответа. + + Пример использования: + @api_docs( + summary="Список пользователей", + responses={200: paginated_response(UserSerializer)}, + ) + def get(self, request): + ... + """ + return openapi.Response( + description="Пагинированный список", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=True), + "data": openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema(type=openapi.TYPE_OBJECT), + ), + "meta": openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "pagination": openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "count": openapi.Schema(type=openapi.TYPE_INTEGER), + "page": openapi.Schema(type=openapi.TYPE_INTEGER), + "page_size": openapi.Schema(type=openapi.TYPE_INTEGER), + "total_pages": openapi.Schema( + type=openapi.TYPE_INTEGER + ), + }, + ), + }, + ), + }, + ), + ) diff --git a/src/apps/core/pagination.py b/src/apps/core/pagination.py new file mode 100644 index 0000000..eaa7ec3 --- /dev/null +++ b/src/apps/core/pagination.py @@ -0,0 +1,202 @@ +""" +Custom pagination classes for DRF. + +Provides cursor-based and standard pagination with unified response format. +""" + +from collections import OrderedDict +from typing import Any + +from rest_framework.pagination import CursorPagination, PageNumberPagination +from rest_framework.response import Response + + +class StandardPagination(PageNumberPagination): + """ + Standard page number pagination with unified response format. + + Query params: + - page: Page number (default: 1) + - page_size: Items per page (default: 20, max: 100) + + Response format: + { + "success": true, + "data": [...], + "errors": null, + "meta": { + "pagination": { + "page": 1, + "page_size": 20, + "total_count": 100, + "total_pages": 5, + "has_next": true, + "has_previous": false + } + } + } + """ + + page_size = 20 + page_size_query_param = "page_size" + max_page_size = 100 + + def get_paginated_response(self, data: list[Any]) -> Response: + """Return response in unified format.""" + return Response( + OrderedDict( + [ + ("success", True), + ("data", data), + ("errors", None), + ( + "meta", + { + "pagination": { + "page": self.page.number, + "page_size": self.get_page_size(self.request), + "total_count": self.page.paginator.count, + "total_pages": self.page.paginator.num_pages, + "has_next": self.page.has_next(), + "has_previous": self.page.has_previous(), + } + }, + ), + ] + ) + ) + + def get_paginated_response_schema(self, schema: dict) -> dict: + """OpenAPI schema for paginated response.""" + return { + "type": "object", + "properties": { + "success": {"type": "boolean", "example": True}, + "data": schema, + "errors": {"type": "null"}, + "meta": { + "type": "object", + "properties": { + "pagination": { + "type": "object", + "properties": { + "page": {"type": "integer", "example": 1}, + "page_size": {"type": "integer", "example": 20}, + "total_count": {"type": "integer", "example": 100}, + "total_pages": {"type": "integer", "example": 5}, + "has_next": {"type": "boolean", "example": True}, + "has_previous": {"type": "boolean", "example": False}, + }, + }, + }, + }, + }, + } + + +class StandardCursorPagination(CursorPagination): + """ + Cursor-based pagination for large datasets. + + Benefits over offset pagination: + - Consistent results even when data changes + - Better performance on large tables + - No "page drift" issues + + Query params: + - cursor: Opaque cursor string for next/previous page + - page_size: Items per page (default: 20, max: 100) + + Response format: + { + "success": true, + "data": [...], + "errors": null, + "meta": { + "pagination": { + "next_cursor": "...", + "previous_cursor": "...", + "page_size": 20 + } + } + } + """ + + page_size = 20 + page_size_query_param = "page_size" + max_page_size = 100 + ordering = "-created_at" # Default ordering, override per-view + + def get_paginated_response(self, data: list[Any]) -> Response: + """Return response in unified format.""" + return Response( + OrderedDict( + [ + ("success", True), + ("data", data), + ("errors", None), + ( + "meta", + { + "pagination": { + "next_cursor": self.get_next_link(), + "previous_cursor": self.get_previous_link(), + "page_size": self.get_page_size(self.request), + } + }, + ), + ] + ) + ) + + def get_paginated_response_schema(self, schema: dict) -> dict: + """OpenAPI schema for cursor paginated response.""" + return { + "type": "object", + "properties": { + "success": {"type": "boolean", "example": True}, + "data": schema, + "errors": {"type": "null"}, + "meta": { + "type": "object", + "properties": { + "pagination": { + "type": "object", + "properties": { + "next_cursor": { + "type": "string", + "nullable": True, + "example": "cD0yMDIxLTAxLTAxKzAwJTNBMDAl", + }, + "previous_cursor": { + "type": "string", + "nullable": True, + "example": None, + }, + "page_size": {"type": "integer", "example": 20}, + }, + }, + }, + }, + }, + } + + +class LargeResultSetPagination(StandardCursorPagination): + """ + Pagination optimized for very large result sets. + + Uses smaller page size and stricter limits. + """ + + page_size = 50 + max_page_size = 200 + + +class SmallResultSetPagination(StandardPagination): + """ + Pagination for small result sets where total count is acceptable. + """ + + page_size = 10 + max_page_size = 50 diff --git a/src/apps/core/permissions.py b/src/apps/core/permissions.py new file mode 100644 index 0000000..daaf397 --- /dev/null +++ b/src/apps/core/permissions.py @@ -0,0 +1,128 @@ +""" +Базовые классы разрешений для API. + +Предоставляет переиспользуемые permission classes для контроля доступа. +""" + +from rest_framework import permissions +from rest_framework.request import Request +from rest_framework.views import APIView + + +class IsOwner(permissions.BasePermission): + """ + Разрешает доступ только владельцу объекта. + + Объект должен иметь атрибут `user` или `owner`, + который сравнивается с текущим пользователем. + + Пример использования: + class MyView(APIView): + permission_classes = [IsAuthenticated, IsOwner] + """ + + owner_field = "user" # Можно переопределить в подклассах + + def has_object_permission(self, request: Request, view: APIView, obj) -> bool: + """Проверяет, является ли пользователь владельцем объекта.""" + owner = getattr(obj, self.owner_field, None) + if owner is None: + owner = getattr(obj, "owner", None) + return owner == request.user + + +class IsOwnerOrReadOnly(permissions.BasePermission): + """ + Разрешает изменение только владельцу, остальным - только чтение. + + SAFE_METHODS (GET, HEAD, OPTIONS) доступны всем аутентифицированным. + Изменяющие методы (POST, PUT, PATCH, DELETE) - только владельцу. + """ + + owner_field = "user" + + def has_object_permission(self, request: Request, view: APIView, obj) -> bool: + """Проверяет права на объект.""" + # Чтение разрешено всем + if request.method in permissions.SAFE_METHODS: + return True + + # Изменение - только владельцу + owner = getattr(obj, self.owner_field, None) + if owner is None: + owner = getattr(obj, "owner", None) + return owner == request.user + + +class IsAdminOrReadOnly(permissions.BasePermission): + """ + Разрешает изменение только админам, остальным - только чтение. + + SAFE_METHODS доступны всем (включая анонимных). + Изменяющие методы - только staff/superuser. + """ + + def has_permission(self, request: Request, view: APIView) -> bool: + """Проверяет права на уровне view.""" + if request.method in permissions.SAFE_METHODS: + return True + return request.user and request.user.is_staff + + +class IsAdmin(permissions.BasePermission): + """ + Разрешает доступ только администраторам (is_staff=True). + """ + + def has_permission(self, request: Request, view: APIView) -> bool: + """Проверяет, является ли пользователь администратором.""" + return request.user and request.user.is_staff + + +class IsSuperuser(permissions.BasePermission): + """ + Разрешает доступ только суперпользователям (is_superuser=True). + """ + + def has_permission(self, request: Request, view: APIView) -> bool: + """Проверяет, является ли пользователь суперпользователем.""" + return request.user and request.user.is_superuser + + +class IsVerified(permissions.BasePermission): + """ + Разрешает доступ только пользователям с подтверждённым email. + + Требует наличие поля `is_verified` у модели пользователя. + """ + + message = "Email не подтверждён" + + def has_permission(self, request: Request, view: APIView) -> bool: + """Проверяет, подтверждён ли email пользователя.""" + return ( + request.user + and request.user.is_authenticated + and getattr(request.user, "is_verified", False) + ) + + +class IsOwnerOrAdmin(permissions.BasePermission): + """ + Разрешает доступ владельцу объекта или администратору. + + Полезно для случаев, когда админ должен иметь доступ + к любым объектам, а пользователь - только к своим. + """ + + owner_field = "user" + + def has_object_permission(self, request: Request, view: APIView, obj) -> bool: + """Проверяет права на объект.""" + if request.user and request.user.is_staff: + return True + + owner = getattr(obj, self.owner_field, None) + if owner is None: + owner = getattr(obj, "owner", None) + return owner == request.user diff --git a/src/apps/core/response.py b/src/apps/core/response.py new file mode 100644 index 0000000..b03a74b --- /dev/null +++ b/src/apps/core/response.py @@ -0,0 +1,160 @@ +""" +Unified API response wrapper. + +Provides consistent response format across all API endpoints: +{ + "success": bool, + "data": {...} | [...] | null, + "errors": [...] | null, + "meta": { + "request_id": "uuid", + "pagination": {...} // optional + } +} +""" + +from typing import Any + +from rest_framework import status +from rest_framework.response import Response + + +def api_response( + data: Any = None, + *, + status_code: int = status.HTTP_200_OK, + request_id: str | None = None, + pagination: dict[str, Any] | None = None, + headers: dict[str, str] | None = None, +) -> Response: + """ + Create a successful API response. + + Args: + data: Response data (dict, list, or None) + status_code: HTTP status code (default 200) + request_id: Request tracking ID + pagination: Pagination metadata + headers: Additional response headers + + Returns: + DRF Response with unified format + """ + meta = {} + if request_id: + meta["request_id"] = request_id + if pagination: + meta["pagination"] = pagination + + response_data = { + "success": True, + "data": data, + "errors": None, + "meta": meta if meta else None, + } + + return Response(response_data, status=status_code, headers=headers) + + +def api_error_response( + errors: list[dict[str, Any]], + *, + status_code: int = status.HTTP_400_BAD_REQUEST, + request_id: str | None = None, + headers: dict[str, str] | None = None, +) -> Response: + """ + Create an error API response. + + Args: + errors: List of error dictionaries, each with 'code' and 'message' + status_code: HTTP status code (default 400) + request_id: Request tracking ID + headers: Additional response headers + + Returns: + DRF Response with unified error format + """ + meta = {} + if request_id: + meta["request_id"] = request_id + + response_data = { + "success": False, + "data": None, + "errors": errors, + "meta": meta if meta else None, + } + + return Response(response_data, status=status_code, headers=headers) + + +def api_created_response( + data: Any = None, + *, + request_id: str | None = None, + headers: dict[str, str] | None = None, +) -> Response: + """Shortcut for 201 Created response.""" + return api_response( + data, + status_code=status.HTTP_201_CREATED, + request_id=request_id, + headers=headers, + ) + + +def api_no_content_response( + *, + request_id: str | None = None, + headers: dict[str, str] | None = None, +) -> Response: + """Shortcut for 204 No Content response.""" + meta = {} + if request_id: + meta["request_id"] = request_id + + return Response( + {"success": True, "data": None, "errors": None, "meta": meta if meta else None}, + status=status.HTTP_204_NO_CONTENT, + headers=headers, + ) + + +def api_paginated_response( + data: list[Any], + *, + page: int, + page_size: int, + total_count: int, + request_id: str | None = None, + headers: dict[str, str] | None = None, +) -> Response: + """ + Create a paginated API response. + + Args: + data: List of items for current page + page: Current page number + page_size: Number of items per page + total_count: Total number of items + request_id: Request tracking ID + headers: Additional response headers + """ + total_pages = (total_count + page_size - 1) // page_size if page_size > 0 else 0 + + pagination = { + "page": page, + "page_size": page_size, + "total_count": total_count, + "total_pages": total_pages, + "has_next": page < total_pages, + "has_previous": page > 1, + } + + return api_response( + data, + request_id=request_id, + pagination=pagination, + headers=headers, + ) diff --git a/src/apps/core/serializers.py b/src/apps/core/serializers.py new file mode 100644 index 0000000..e4a13c9 --- /dev/null +++ b/src/apps/core/serializers.py @@ -0,0 +1,47 @@ +""" +Сериализаторы для apps.core. + +Содержит сериализаторы для: +- BackgroundJob - статус фоновых задач +""" + +from rest_framework import serializers + + +class BackgroundJobSerializer(serializers.Serializer): + """ + Сериализатор для отображения статуса фоновой задачи. + + Используется для API ответов о статусе задач. + """ + + id = serializers.UUIDField(read_only=True) + task_id = serializers.CharField(read_only=True) + task_name = serializers.CharField(read_only=True) + status = serializers.CharField(read_only=True) + progress = serializers.IntegerField(read_only=True) + progress_message = serializers.CharField(read_only=True) + result = serializers.JSONField(read_only=True) + error = serializers.CharField(read_only=True) + started_at = serializers.DateTimeField(read_only=True) + completed_at = serializers.DateTimeField(read_only=True) + created_at = serializers.DateTimeField(read_only=True) + duration = serializers.FloatField(read_only=True, source="duration") + + # Вычисляемые поля + is_finished = serializers.BooleanField(read_only=True) + is_successful = serializers.BooleanField(read_only=True) + + +class BackgroundJobListSerializer(serializers.Serializer): + """ + Краткий сериализатор для списка задач. + """ + + id = serializers.UUIDField(read_only=True) + task_id = serializers.CharField(read_only=True) + task_name = serializers.CharField(read_only=True) + status = serializers.CharField(read_only=True) + progress = serializers.IntegerField(read_only=True) + created_at = serializers.DateTimeField(read_only=True) + is_finished = serializers.BooleanField(read_only=True) diff --git a/src/apps/core/services.py b/src/apps/core/services.py new file mode 100644 index 0000000..48aeedf --- /dev/null +++ b/src/apps/core/services.py @@ -0,0 +1,671 @@ +""" +Base service classes for business logic layer. + +Services encapsulate business logic and are independent of HTTP layer. +They are easily testable and can manage transactions. +""" + +import logging +from typing import Any, Generic, TypeVar + +from apps.core.exceptions import NotFoundError +from django.db import models, transaction +from django.db.models import QuerySet + +logger = logging.getLogger(__name__) + +# Type variable for model +M = TypeVar("M", bound=models.Model) + + +class BaseService(Generic[M]): + """ + Base service class providing common CRUD operations. + + Usage: + class UserService(BaseService[User]): + model = User + + @classmethod + def create_user(cls, *, email: str, password: str) -> User: + # Business logic here + user = cls.model.objects.create_user(email=email, password=password) + return user + """ + + model: type[M] + + @classmethod + def get_queryset(cls) -> QuerySet[M]: + """Get base queryset for the model. Override to add default filters.""" + return cls.model.objects.all() + + @classmethod + def get_by_id(cls, pk: Any) -> M: + """ + Get entity by primary key. + + Raises: + NotFoundError: If entity not found + """ + try: + return cls.get_queryset().get(pk=pk) + except cls.model.DoesNotExist as e: + raise NotFoundError( + message=f"{cls.model.__name__} with id={pk} not found", + code="not_found", + details={"model": cls.model.__name__, "id": pk}, + ) from e + + @classmethod + def get_by_id_or_none(cls, pk: Any) -> M | None: + """Get entity by primary key or None if not found.""" + try: + return cls.get_queryset().get(pk=pk) + except cls.model.DoesNotExist: + return None + + @classmethod + def get_all(cls) -> QuerySet[M]: + """Get all entities.""" + return cls.get_queryset() + + @classmethod + def filter(cls, **kwargs: Any) -> QuerySet[M]: + """Filter entities by given criteria.""" + return cls.get_queryset().filter(**kwargs) + + @classmethod + def exists(cls, **kwargs: Any) -> bool: + """Check if entity with given criteria exists.""" + return cls.get_queryset().filter(**kwargs).exists() + + @classmethod + def count(cls, **kwargs: Any) -> int: + """Count entities matching criteria.""" + if kwargs: + return cls.get_queryset().filter(**kwargs).count() + return cls.get_queryset().count() + + @classmethod + @transaction.atomic + def create(cls, **kwargs: Any) -> M: + """ + Create new entity. + + Override this method to add business logic before/after creation. + """ + return cls.model.objects.create(**kwargs) + + @classmethod + @transaction.atomic + def update(cls, instance: M, **kwargs: Any) -> M: + """ + Update entity fields. + + Override this method to add business logic before/after update. + """ + for field, value in kwargs.items(): + setattr(instance, field, value) + instance.save(update_fields=list(kwargs.keys())) + return instance + + @classmethod + @transaction.atomic + def delete(cls, instance: M) -> None: + """ + Delete entity. + + Override this method to implement soft delete or add business logic. + """ + instance.delete() + + @classmethod + @transaction.atomic + def bulk_create(cls, instances: list[M], **kwargs: Any) -> list[M]: + """Bulk create entities.""" + return cls.model.objects.bulk_create(instances, **kwargs) + + @classmethod + @transaction.atomic + def bulk_update(cls, instances: list[M], fields: list[str], **kwargs: Any) -> int: + """Bulk update entities.""" + return cls.model.objects.bulk_update(instances, fields, **kwargs) + + +class BaseReadOnlyService(Generic[M]): + """ + Read-only service for entities that should not be modified via API. + + Useful for reference data, logs, audit trails, etc. + """ + + model: type[M] + + @classmethod + def get_queryset(cls) -> QuerySet[M]: + """Get base queryset for the model.""" + return cls.model.objects.all() + + @classmethod + def get_by_id(cls, pk: Any) -> M: + """Get entity by primary key.""" + try: + return cls.get_queryset().get(pk=pk) + except cls.model.DoesNotExist as e: + raise NotFoundError( + message=f"{cls.model.__name__} with id={pk} not found", + code="not_found", + ) from e + + @classmethod + def get_all(cls) -> QuerySet[M]: + """Get all entities.""" + return cls.get_queryset() + + @classmethod + def filter(cls, **kwargs: Any) -> QuerySet[M]: + """Filter entities by given criteria.""" + return cls.get_queryset().filter(**kwargs) + + +class TransactionMixin: + """ + Mixin providing transaction helpers for services. + + Usage: + class PaymentService(TransactionMixin, BaseService[Payment]): + @classmethod + def process_payment(cls, order_id: int) -> Payment: + with cls.atomic(): + # Multiple operations in single transaction + ... + """ + + @classmethod + def atomic(cls): + """Get atomic transaction context manager.""" + return transaction.atomic() + + @classmethod + def on_commit(cls, func): + """Register function to be called after transaction commits.""" + transaction.on_commit(func) + + @classmethod + def savepoint(cls): + """Create a savepoint within current transaction.""" + return transaction.savepoint() + + @classmethod + def savepoint_rollback(cls, sid): + """Rollback to a savepoint.""" + transaction.savepoint_rollback(sid) + + @classmethod + def savepoint_commit(cls, sid): + """Commit a savepoint.""" + transaction.savepoint_commit(sid) + + +class BulkOperationsMixin: + """ + Миксин для расширенных массовых операций. + + Дополняет BaseService методами: + - bulk_create_chunked: создание чанками для больших данных + - bulk_update_or_create: upsert операция + - bulk_delete: удаление по списку ID + - bulk_update_fields: обновление полей по фильтру + + Использование: + class ProductService(BulkOperationsMixin, BaseService[Product]): + model = Product + + # Создание 10000 записей чанками по 500 + ProductService.bulk_create_chunked(products, chunk_size=500) + + # Upsert по уникальному полю + ProductService.bulk_update_or_create( + items=data, + unique_fields=['sku'], + update_fields=['price', 'quantity'] + ) + """ + + model: type[models.Model] + + @classmethod + @transaction.atomic + def bulk_create_chunked( + cls, + instances: list, + *, + chunk_size: int = 500, + ignore_conflicts: bool = False, + update_conflicts: bool = False, + update_fields: list[str] | None = None, + unique_fields: list[str] | None = None, + ) -> int: + """ + Массовое создание чанками для больших объёмов. + + Args: + instances: Список объектов для создания + chunk_size: Размер чанка (по умолчанию 500) + ignore_conflicts: Игнорировать конфликты + update_conflicts: Обновлять при конфликтах (upsert) + update_fields: Поля для обновления при конфликте + unique_fields: Уникальные поля для определения конфликта + + Returns: + Количество созданных записей + """ + total_created = 0 + + for i in range(0, len(instances), chunk_size): + chunk = instances[i : i + chunk_size] + kwargs = { + "ignore_conflicts": ignore_conflicts, + } + + # Django 4.1+ поддерживает update_conflicts + if update_conflicts and update_fields and unique_fields: + kwargs["update_conflicts"] = True + kwargs["update_fields"] = update_fields + kwargs["unique_fields"] = unique_fields + + created = cls.model.objects.bulk_create(chunk, **kwargs) + total_created += len(created) + + return total_created + + @classmethod + @transaction.atomic + def bulk_update_or_create( + cls, + items: list[dict], + *, + unique_fields: list[str], + update_fields: list[str], + create_defaults: dict | None = None, + ) -> tuple[int, int]: + """ + Upsert: обновить существующие или создать новые. + + Args: + items: Список словарей с данными + unique_fields: Поля для поиска существующих + update_fields: Поля для обновления + create_defaults: Значения по умолчанию для создания + + Returns: + (created_count, updated_count) + """ + created_count = 0 + updated_count = 0 + defaults = create_defaults or {} + + for item in items: + lookup = {field: item[field] for field in unique_fields} + update_data = { + field: item[field] for field in update_fields if field in item + } + + obj, created = cls.model.objects.update_or_create( + **lookup, + defaults={**update_data, **defaults}, + ) + + if created: + created_count += 1 + else: + updated_count += 1 + + return created_count, updated_count + + @classmethod + @transaction.atomic + def bulk_delete( + cls, + ids: list, + *, + hard_delete: bool = True, + ) -> int: + """ + Массовое удаление по списку ID. + + Args: + ids: Список ID для удаления + hard_delete: Физическое удаление (игнорирует SoftDelete) + + Returns: + Количество удалённых записей + """ + queryset = cls.model.objects.filter(pk__in=ids) + + if hard_delete: + # Для SoftDelete моделей используем all_objects + if hasattr(cls.model, "all_objects"): + queryset = cls.model.all_objects.filter(pk__in=ids) + deleted, _ = queryset.delete() + else: + # Мягкое удаление + from django.utils import timezone + + deleted = queryset.update(is_deleted=True, deleted_at=timezone.now()) + + return deleted + + @classmethod + @transaction.atomic + def bulk_update_fields( + cls, + filters: dict, + updates: dict, + ) -> int: + """ + Массовое обновление полей по фильтру. + + Args: + filters: Фильтры для выборки + updates: Поля и значения для обновления + + Returns: + Количество обновлённых записей + + Пример: + ProductService.bulk_update_fields( + filters={'category': 'electronics'}, + updates={'discount': 10, 'is_featured': True} + ) + """ + return cls.model.objects.filter(**filters).update(**updates) + + +class QueryOptimizerMixin: + """ + Миксин для автоматической оптимизации запросов. + + Декларативный подход к select_related/prefetch_related. + + Атрибуты: + select_related: Список полей для select_related + prefetch_related: Список полей для prefetch_related + default_only: Поля для only() (ограничение столбцов) + default_defer: Поля для defer() (исключение столбцов) + + Использование: + class OrderService(QueryOptimizerMixin, BaseService[Order]): + model = Order + select_related = ['user', 'shipping_address'] + prefetch_related = ['items', 'items__product'] + default_defer = ['description', 'internal_notes'] + + # Автоматически применяет оптимизации + orders = OrderService.get_optimized_queryset() + """ + + model: type[models.Model] + select_related: list[str] = [] + prefetch_related: list[str] = [] + default_only: list[str] = [] + default_defer: list[str] = [] + + @classmethod + def get_optimized_queryset(cls) -> QuerySet: + """ + Получить оптимизированный queryset. + + Применяет все объявленные оптимизации. + """ + queryset = cls.model.objects.all() + return cls.apply_optimizations(queryset) + + @classmethod + def apply_optimizations( + cls, + queryset: QuerySet, + *, + include_select: bool = True, + include_prefetch: bool = True, + include_only: bool = True, + include_defer: bool = True, + ) -> QuerySet: + """ + Применить оптимизации к queryset. + + Args: + queryset: Исходный queryset + include_select: Применять select_related + include_prefetch: Применять prefetch_related + include_only: Применять only() + include_defer: Применять defer() + """ + if include_select and cls.select_related: + queryset = queryset.select_related(*cls.select_related) + + if include_prefetch and cls.prefetch_related: + queryset = queryset.prefetch_related(*cls.prefetch_related) + + if include_only and cls.default_only: + queryset = queryset.only(*cls.default_only) + + if include_defer and cls.default_defer: + queryset = queryset.defer(*cls.default_defer) + + return queryset + + @classmethod + def get_list_queryset(cls) -> QuerySet: + """ + Queryset для списков (может исключать тяжёлые поля). + """ + return cls.apply_optimizations( + cls.model.objects.all(), + include_only=True, + include_defer=True, + ) + + @classmethod + def get_detail_queryset(cls) -> QuerySet: + """ + Queryset для детального просмотра (все поля). + """ + return cls.apply_optimizations( + cls.model.objects.all(), + include_only=False, + include_defer=False, + ) + + @classmethod + def with_counts(cls, queryset: QuerySet, *count_fields: str) -> QuerySet: + """ + Добавить аннотации Count. + + Args: + queryset: Исходный queryset + count_fields: Поля для подсчёта + + Пример: + # Добавит items_count и reviews_count + qs = ProductService.with_counts(qs, 'items', 'reviews') + """ + from django.db.models import Count + + annotations = {f"{field}_count": Count(field) for field in count_fields} + return queryset.annotate(**annotations) + + @classmethod + def with_exists(cls, queryset: QuerySet, **subqueries: QuerySet) -> QuerySet: + """ + Добавить аннотации Exists. + + Пример: + from apps.reviews.models import Review + qs = ProductService.with_exists( + qs, + has_reviews=Review.objects.filter(product=OuterRef('pk')) + ) + """ + from django.db.models import Exists + + annotations = {name: Exists(subquery) for name, subquery in subqueries.items()} + return queryset.annotate(**annotations) + + +class BackgroundJobService(BaseReadOnlyService): + """ + Сервис для управления фоновыми задачами. + + Использование: + # Создание задачи + job = BackgroundJobService.create_job( + task_id="abc-123", + task_name="apps.myapp.tasks.process_data", + user_id=request.user.id, + ) + + # Получение статуса + job = BackgroundJobService.get_by_task_id("abc-123") + + # Список задач пользователя + jobs = BackgroundJobService.get_user_jobs(user_id=1) + """ + + # Импорт модели внутри методов для избежания circular import + + @classmethod + def get_model(cls): + """Ленивый импорт модели.""" + from apps.core.models import BackgroundJob + + return BackgroundJob + + @classmethod + def get_queryset(cls): + """Get base queryset.""" + return cls.get_model().objects.all() + + @classmethod + def create_job( + cls, + *, + task_id: str, + task_name: str, + user_id: int | None = None, + meta: dict | None = None, + ): + """ + Создать запись о фоновой задаче. + + Args: + task_id: ID задачи Celery + task_name: Имя задачи + user_id: ID пользователя (опционально) + meta: Дополнительные метаданные + + Returns: + BackgroundJob instance + """ + BackgroundJob = cls.get_model() + return BackgroundJob.objects.create( + task_id=task_id, + task_name=task_name, + user_id=user_id, + meta=meta or {}, + ) + + @classmethod + def get_by_task_id(cls, task_id: str): + """ + Получить задачу по ID Celery. + + Raises: + NotFoundError: Если задача не найдена + """ + BackgroundJob = cls.get_model() + try: + return BackgroundJob.objects.get(task_id=task_id) + except BackgroundJob.DoesNotExist as e: + raise NotFoundError( + message=f"Job with task_id={task_id} not found", + code="job_not_found", + ) from e + + @classmethod + def get_by_task_id_or_none(cls, task_id: str): + """Получить задачу по ID или None.""" + BackgroundJob = cls.get_model() + try: + return BackgroundJob.objects.get(task_id=task_id) + except BackgroundJob.DoesNotExist: + return None + + @classmethod + def get_user_jobs( + cls, + user_id: int, + *, + status: str | None = None, + limit: int = 50, + ): + """ + Получить задачи пользователя. + + Args: + user_id: ID пользователя + status: Фильтр по статусу (опционально) + limit: Максимальное количество записей + + Returns: + QuerySet задач + """ + qs = cls.get_queryset().filter(user_id=user_id) + if status: + qs = qs.filter(status=status) + return qs[:limit] + + @classmethod + def get_active_jobs(cls, user_id: int | None = None): + """ + Получить активные (незавершённые) задачи. + + Args: + user_id: Фильтр по пользователю (опционально) + """ + from apps.core.models import JobStatus + + qs = cls.get_queryset().filter( + status__in=[JobStatus.PENDING, JobStatus.STARTED, JobStatus.RETRY] + ) + if user_id: + qs = qs.filter(user_id=user_id) + return qs + + @classmethod + def cleanup_old_jobs(cls, *, days: int = 30) -> int: + """ + Удалить старые завершённые задачи. + + Args: + days: Количество дней (задачи старше будут удалены) + + Returns: + Количество удалённых записей + """ + from datetime import timedelta + + from apps.core.models import JobStatus + from django.utils import timezone + + cutoff = timezone.now() - timedelta(days=days) + deleted, _ = ( + cls.get_queryset() + .filter( + status__in=[JobStatus.SUCCESS, JobStatus.FAILURE, JobStatus.REVOKED], + completed_at__lt=cutoff, + ) + .delete() + ) + return deleted diff --git a/src/apps/core/signals.py b/src/apps/core/signals.py new file mode 100644 index 0000000..5d831d4 --- /dev/null +++ b/src/apps/core/signals.py @@ -0,0 +1,295 @@ +""" +Централизованная система управления сигналами Django. + +Предоставляет удобный способ регистрации и управления сигналами +в одном месте для лучшей организации кода. +""" + +import logging +from collections.abc import Callable +from typing import Any + +from django.db.models.signals import ( + post_delete, + post_save, + pre_delete, + pre_save, +) +from django.dispatch import Signal + +logger = logging.getLogger(__name__) + + +class SignalDispatcher: + """ + Диспетчер сигналов для централизованной регистрации. + + Позволяет регистрировать все обработчики сигналов в одном месте, + что упрощает отладку и понимание потока данных. + + Пример использования в apps.py: + from apps.core.signals import signal_dispatcher + + class UserConfig(AppConfig): + def ready(self): + from apps.user.signals import register_signals + register_signals(signal_dispatcher) + + Пример в signals.py приложения: + def register_signals(dispatcher): + dispatcher.register( + signal=post_save, + sender='user.User', + handler=create_user_profile, + description="Создаёт профиль при создании пользователя", + ) + """ + + def __init__(self) -> None: + self._handlers: list[dict[str, Any]] = [] + self._connected = False + + def register( + self, + signal: Signal, + sender: str | type, + handler: Callable[..., Any], + description: str = "", + dispatch_uid: str | None = None, + ) -> None: + """ + Регистрирует обработчик сигнала. + + Args: + signal: Django signal (post_save, pre_delete, etc.) + sender: Модель-отправитель (строка 'app.Model' или класс) + handler: Функция-обработчик + description: Описание для документации + dispatch_uid: Уникальный ID для предотвращения дублирования + """ + self._handlers.append( + { + "signal": signal, + "sender": sender, + "handler": handler, + "description": description, + "dispatch_uid": dispatch_uid + or f"{handler.__module__}.{handler.__name__}", + } + ) + + logger.debug( + f"Зарегистрирован обработчик сигнала: {handler.__name__} " + f"для {sender} ({description})" + ) + + def connect_all(self) -> None: + """Подключает все зарегистрированные обработчики.""" + if self._connected: + logger.warning("Сигналы уже подключены") + return + + for handler_info in self._handlers: + sender = handler_info["sender"] + + # Если sender - строка, получаем модель + if isinstance(sender, str): + from django.apps import apps + + app_label, model_name = sender.split(".") + sender = apps.get_model(app_label, model_name) + + handler_info["signal"].connect( + handler_info["handler"], + sender=sender, + dispatch_uid=handler_info["dispatch_uid"], + ) + + logger.info( + f"Подключен обработчик: {handler_info['handler'].__name__} -> {sender}" + ) + + self._connected = True + + def disconnect_all(self) -> None: + """Отключает все обработчики (полезно для тестов).""" + for handler_info in self._handlers: + sender = handler_info["sender"] + + if isinstance(sender, str): + from django.apps import apps + + app_label, model_name = sender.split(".") + sender = apps.get_model(app_label, model_name) + + handler_info["signal"].disconnect( + handler_info["handler"], + sender=sender, + dispatch_uid=handler_info["dispatch_uid"], + ) + + self._connected = False + logger.info("Все обработчики сигналов отключены") + + def list_handlers(self) -> list[dict[str, Any]]: + """Возвращает список всех зарегистрированных обработчиков.""" + return [ + { + "signal": h["signal"].__class__.__name__, + "sender": str(h["sender"]), + "handler": f"{h['handler'].__module__}.{h['handler'].__name__}", + "description": h["description"], + } + for h in self._handlers + ] + + +# Глобальный экземпляр диспетчера +signal_dispatcher = SignalDispatcher() + + +# Декораторы для удобной регистрации +def on_post_save( + sender: str | type, + description: str = "", + dispatch_uid: str | None = None, +) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + """ + Декоратор для регистрации обработчика post_save. + + Пример использования: + @on_post_save('user.User', description="Создаёт профиль") + def create_profile(sender, instance, created, **kwargs): + if created: + Profile.objects.create(user=instance) + """ + + def decorator(handler: Callable[..., Any]) -> Callable[..., Any]: + signal_dispatcher.register( + signal=post_save, + sender=sender, + handler=handler, + description=description, + dispatch_uid=dispatch_uid, + ) + return handler + + return decorator + + +def on_pre_save( + sender: str | type, + description: str = "", + dispatch_uid: str | None = None, +) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + """ + Декоратор для регистрации обработчика pre_save. + + Пример использования: + @on_pre_save('blog.Article', description="Генерирует slug") + def generate_slug(sender, instance, **kwargs): + if not instance.slug: + instance.slug = slugify(instance.title) + """ + + def decorator(handler: Callable[..., Any]) -> Callable[..., Any]: + signal_dispatcher.register( + signal=pre_save, + sender=sender, + handler=handler, + description=description, + dispatch_uid=dispatch_uid, + ) + return handler + + return decorator + + +def on_post_delete( + sender: str | type, + description: str = "", + dispatch_uid: str | None = None, +) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + """ + Декоратор для регистрации обработчика post_delete. + + Пример использования: + @on_post_delete('user.User', description="Удаляет связанные файлы") + def cleanup_user_files(sender, instance, **kwargs): + instance.avatar.delete(save=False) + """ + + def decorator(handler: Callable[..., Any]) -> Callable[..., Any]: + signal_dispatcher.register( + signal=post_delete, + sender=sender, + handler=handler, + description=description, + dispatch_uid=dispatch_uid, + ) + return handler + + return decorator + + +def on_pre_delete( + sender: str | type, + description: str = "", + dispatch_uid: str | None = None, +) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + """ + Декоратор для регистрации обработчика pre_delete. + + Пример использования: + @on_pre_delete('blog.Article', description="Архивирует перед удалением") + def archive_before_delete(sender, instance, **kwargs): + ArchivedArticle.objects.create_from_article(instance) + """ + + def decorator(handler: Callable[..., Any]) -> Callable[..., Any]: + signal_dispatcher.register( + signal=pre_delete, + sender=sender, + handler=handler, + description=description, + dispatch_uid=dispatch_uid, + ) + return handler + + return decorator + + +# Пользовательские сигналы для бизнес-событий +user_registered = Signal() # Отправляется при регистрации пользователя +user_verified = Signal() # Отправляется при верификации email +password_changed = Signal() # Отправляется при смене пароля + + +def emit_user_registered(user: Any) -> None: + """ + Отправляет сигнал о регистрации пользователя. + + Пример использования: + from apps.core.signals import emit_user_registered + + class UserService: + @classmethod + def register_user(cls, **data): + user = User.objects.create_user(**data) + emit_user_registered(user) + return user + """ + user_registered.send(sender=user.__class__, user=user) + logger.info(f"Отправлен сигнал user_registered для user_id={user.id}") + + +def emit_user_verified(user: Any) -> None: + """Отправляет сигнал о верификации email пользователя.""" + user_verified.send(sender=user.__class__, user=user) + logger.info(f"Отправлен сигнал user_verified для user_id={user.id}") + + +def emit_password_changed(user: Any) -> None: + """Отправляет сигнал о смене пароля.""" + password_changed.send(sender=user.__class__, user=user) + logger.info(f"Отправлен сигнал password_changed для user_id={user.id}") diff --git a/src/apps/core/tasks.py b/src/apps/core/tasks.py new file mode 100644 index 0000000..2a43479 --- /dev/null +++ b/src/apps/core/tasks.py @@ -0,0 +1,269 @@ +""" +Базовые классы для Celery задач. + +Предоставляет переиспользуемые базовые классы с логированием, +обработкой ошибок и retry логикой. +""" + +import logging +import time +from typing import Any + +from celery import Task +from django.db import transaction + +logger = logging.getLogger(__name__) + + +class BaseTask(Task): + """ + Базовый класс для всех Celery задач. + + Особенности: + - Автоматическое логирование начала/завершения + - Измерение времени выполнения + - Обработка ошибок с retry + - Логирование исключений + + Пример использования: + from config.celery import app + + @app.task(base=BaseTask, bind=True) + def my_task(self, arg1, arg2): + # Логика задачи + return result + """ + + # Настройки retry по умолчанию + autoretry_for = (Exception,) + retry_backoff = True + retry_backoff_max = 600 # Максимум 10 минут между retry + retry_jitter = True + max_retries = 3 + + # Не пересоздавать задачу при перезапуске worker'а + acks_late = True + reject_on_worker_lost = True + + def before_start( + self, + task_id: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + ) -> None: + """Вызывается перед началом выполнения задачи.""" + logger.info( + f"Задача {self.name}[{task_id}] запущена", + extra={ + "task_id": task_id, + "task_name": self.name, + "args": str(args)[:200], + "kwargs": str(kwargs)[:200], + }, + ) + + def on_success( + self, + retval: Any, + task_id: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + ) -> None: + """Вызывается при успешном завершении задачи.""" + logger.info( + f"Задача {self.name}[{task_id}] завершена успешно", + extra={ + "task_id": task_id, + "task_name": self.name, + "result": str(retval)[:200] if retval else None, + }, + ) + + def on_failure( + self, + exc: Exception, + task_id: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + einfo: Any, + ) -> None: + """Вызывается при ошибке выполнения задачи.""" + logger.error( + f"Задача {self.name}[{task_id}] завершена с ошибкой: {exc}", + extra={ + "task_id": task_id, + "task_name": self.name, + "exception": str(exc), + "args": str(args)[:200], + "kwargs": str(kwargs)[:200], + }, + exc_info=True, + ) + + def on_retry( + self, + exc: Exception, + task_id: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + einfo: Any, + ) -> None: + """Вызывается при повторной попытке выполнения.""" + logger.warning( + f"Задача {self.name}[{task_id}] будет повторена: {exc}", + extra={ + "task_id": task_id, + "task_name": self.name, + "exception": str(exc), + "retry_count": self.request.retries, + }, + ) + + +class TransactionalTask(BaseTask): + """ + Задача с поддержкой транзакций. + + Выполняет задачу в рамках database transaction. + При ошибке транзакция откатывается. + + Пример использования: + @app.task(base=TransactionalTask, bind=True) + def update_user_stats(self, user_id): + # Все операции в одной транзакции + user = User.objects.get(id=user_id) + user.stats.update() + user.save() + """ + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Выполняет задачу в транзакции.""" + with transaction.atomic(): + return super().__call__(*args, **kwargs) + + +class IdempotentTask(BaseTask): + """ + Идемпотентная задача. + + Гарантирует, что задача с одинаковыми аргументами + не будет выполнена повторно в течение lock_timeout. + + Пример использования: + @app.task(base=IdempotentTask, bind=True) + def send_notification(self, user_id, message): + # Не отправит дважды одно уведомление + send_email(user_id, message) + """ + + lock_timeout = 3600 # 1 час по умолчанию + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Выполняет задачу с проверкой идемпотентности.""" + from django.core.cache import cache + + # Формируем ключ блокировки + lock_key = ( + f"task_lock:{self.name}:{hash((args, tuple(sorted(kwargs.items()))))}" + ) + + # Пробуем получить блокировку + if not cache.add(lock_key, True, self.lock_timeout): + logger.info( + f"Задача {self.name} пропущена (идемпотентность)", + extra={"lock_key": lock_key}, + ) + return None + + try: + return super().__call__(*args, **kwargs) + finally: + # Не удаляем блокировку - она истечёт по таймауту + pass + + +class TimedTask(BaseTask): + """ + Задача с измерением времени выполнения. + + Логирует время выполнения и предупреждает о медленных задачах. + + Атрибуты: + slow_threshold: Порог для предупреждения (в секундах) + + Пример использования: + @app.task(base=TimedTask, bind=True) + def process_data(self): + # Время выполнения будет залогировано + process_heavy_data() + """ + + slow_threshold = 60 # 1 минута по умолчанию + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Выполняет задачу с измерением времени.""" + start_time = time.time() + try: + return super().__call__(*args, **kwargs) + finally: + elapsed = time.time() - start_time + log_extra = { + "task_id": self.request.id, + "task_name": self.name, + "elapsed_seconds": round(elapsed, 2), + } + + if elapsed > self.slow_threshold: + logger.warning( + f"Задача {self.name} выполнялась {elapsed:.2f}с " + f"(порог: {self.slow_threshold}с)", + extra=log_extra, + ) + else: + logger.debug( + f"Задача {self.name} выполнена за {elapsed:.2f}с", + extra=log_extra, + ) + + +class PeriodicTask(TimedTask): + """ + Базовый класс для периодических задач. + + Объединяет TimedTask с дополнительной логикой + для периодических задач (beat). + + Пример использования: + @app.task(base=PeriodicTask, bind=True) + def cleanup_old_data(self): + # Периодическая очистка + OldData.objects.filter(created_at__lt=threshold).delete() + + # В beat_schedule: + app.conf.beat_schedule = { + 'cleanup-every-day': { + 'task': 'tasks.cleanup_old_data', + 'schedule': crontab(hour=3, minute=0), + }, + } + """ + + # Периодические задачи обычно не требуют retry + max_retries = 1 + autoretry_for = () + + def before_start( + self, + task_id: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + ) -> None: + """Логирует запуск периодической задачи.""" + logger.info( + f"Периодическая задача {self.name}[{task_id}] запущена", + extra={ + "task_id": task_id, + "task_name": self.name, + "periodic": True, + }, + ) diff --git a/src/apps/core/urls.py b/src/apps/core/urls.py new file mode 100644 index 0000000..382194e --- /dev/null +++ b/src/apps/core/urls.py @@ -0,0 +1,14 @@ +""" +URL configuration for core app. +""" + +from apps.core.views import HealthCheckView, LivenessView, ReadinessView +from django.urls import path + +app_name = "core" + +urlpatterns = [ + path("", HealthCheckView.as_view(), name="health"), + path("live/", LivenessView.as_view(), name="liveness"), + path("ready/", ReadinessView.as_view(), name="readiness"), +] diff --git a/src/apps/core/views.py b/src/apps/core/views.py new file mode 100644 index 0000000..056376c --- /dev/null +++ b/src/apps/core/views.py @@ -0,0 +1,249 @@ +""" +Health check views for monitoring and orchestration. + +Provides endpoints for: +- Basic liveness check (is the app running?) +- Readiness check (is the app ready to serve traffic?) +- Detailed health check (DB, Redis, Celery status) +""" + +import logging +import time +from typing import Any + +from django.conf import settings +from django.db import connection +from rest_framework import status +from rest_framework.permissions import AllowAny +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.views import APIView + +logger = logging.getLogger(__name__) + + +class HealthCheckView(APIView): + """ + Comprehensive health check endpoint. + + GET /api/health/ + Returns detailed status of all dependencies. + + Response: + { + "status": "healthy" | "degraded" | "unhealthy", + "version": "1.0.0", + "checks": { + "database": {"status": "up", "latency_ms": 5}, + "redis": {"status": "up", "latency_ms": 2}, + "celery": {"status": "up"} + } + } + """ + + permission_classes = [AllowAny] + authentication_classes = [] # No auth required + + def get(self, request: Request) -> Response: + """Run all health checks and return status.""" + checks = {} + overall_status = "healthy" + + # Database check + db_check = self._check_database() + checks["database"] = db_check + if db_check["status"] != "up": + overall_status = "unhealthy" + + # Redis check + redis_check = self._check_redis() + checks["redis"] = redis_check + if redis_check["status"] != "up" and overall_status == "healthy": + overall_status = "degraded" + + # Celery check (optional, may be slow) + if request.query_params.get("include_celery", "").lower() == "true": + celery_check = self._check_celery() + checks["celery"] = celery_check + if celery_check["status"] != "up" and overall_status == "healthy": + overall_status = "degraded" + + response_data = { + "status": overall_status, + "version": getattr(settings, "APP_VERSION", "1.0.0"), + "checks": checks, + } + + # 503 only for unhealthy (critical services down) + # 200 for healthy and degraded (non-critical services down) + status_code = ( + status.HTTP_503_SERVICE_UNAVAILABLE + if overall_status == "unhealthy" + else status.HTTP_200_OK + ) + + return Response(response_data, status=status_code) + + def _check_database(self) -> dict[str, Any]: + """Check database connectivity.""" + start = time.time() + try: + with connection.cursor() as cursor: + cursor.execute("SELECT 1") + cursor.fetchone() + latency = (time.time() - start) * 1000 + return {"status": "up", "latency_ms": round(latency, 2)} + except Exception as e: + logger.error(f"Database health check failed: {e}") + return {"status": "down", "error": str(e)} + + def _check_redis(self) -> dict[str, Any]: + """Check Redis connectivity.""" + start = time.time() + try: + from django_redis import get_redis_connection + + redis_conn = get_redis_connection("default") + redis_conn.ping() + latency = (time.time() - start) * 1000 + return {"status": "up", "latency_ms": round(latency, 2)} + except ImportError: + return {"status": "skipped", "reason": "django_redis not installed"} + except Exception as e: + logger.warning(f"Redis health check failed: {e}") + return {"status": "down", "error": str(e)} + + def _check_celery(self) -> dict[str, Any]: + """Check Celery worker availability.""" + try: + from config.celery import app as celery_app + + inspector = celery_app.control.inspect(timeout=2.0) + active = inspector.active() + if active: + worker_count = len(active) + return {"status": "up", "workers": worker_count} + return {"status": "down", "error": "No active workers"} + except Exception as e: + logger.warning(f"Celery health check failed: {e}") + return {"status": "down", "error": str(e)} + + +class LivenessView(APIView): + """ + Kubernetes liveness probe endpoint. + + GET /api/health/live/ + Returns 200 if the application is running. + """ + + permission_classes = [AllowAny] + authentication_classes = [] + + def get(self, request: Request) -> Response: + """Simple liveness check.""" + return Response({"status": "alive"}, status=status.HTTP_200_OK) + + +class ReadinessView(APIView): + """ + Kubernetes readiness probe endpoint. + + GET /api/health/ready/ + Returns 200 if the application is ready to serve traffic. + """ + + permission_classes = [AllowAny] + authentication_classes = [] + + def get(self, request: Request) -> Response: + """Check if app is ready to serve traffic.""" + # Check database connection + try: + with connection.cursor() as cursor: + cursor.execute("SELECT 1") + cursor.fetchone() + except Exception as e: + logger.error(f"Readiness check failed - database: {e}") + return Response( + {"status": "not_ready", "reason": "database unavailable"}, + status=status.HTTP_503_SERVICE_UNAVAILABLE, + ) + + return Response({"status": "ready"}, status=status.HTTP_200_OK) + + +class BackgroundJobStatusView(APIView): + """ + Получение статуса фоновой задачи. + + GET /api/v1/jobs/{task_id}/ + Возвращает статус, прогресс и результат задачи. + + Response: + { + "id": "uuid", + "task_id": "celery-task-id", + "status": "pending|started|success|failure|revoked", + "progress": 75, + "progress_message": "Обработка данных...", + "result": {...}, + "error": "", + "is_finished": false + } + """ + + from rest_framework.permissions import IsAuthenticated + + permission_classes = [IsAuthenticated] + + def get(self, request: Request, task_id: str) -> Response: + """Получить статус задачи по task_id.""" + from apps.core.serializers import BackgroundJobSerializer + from apps.core.services import BackgroundJobService + + job = BackgroundJobService.get_by_task_id(task_id) + + # Проверка доступа: только владелец или админ + if job.user_id and job.user_id != request.user.id and not request.user.is_staff: + return Response( + {"detail": "Нет доступа к этой задаче"}, + status=status.HTTP_403_FORBIDDEN, + ) + + serializer = BackgroundJobSerializer(job) + return Response(serializer.data) + + +class BackgroundJobListView(APIView): + """ + Список фоновых задач пользователя. + + GET /api/v1/jobs/ + Возвращает список задач текущего пользователя. + + Query params: + status: Фильтр по статусу (pending, started, success, failure) + limit: Количество записей (по умолчанию 50) + """ + + from rest_framework.permissions import IsAuthenticated + + permission_classes = [IsAuthenticated] + + def get(self, request: Request) -> Response: + """Получить список задач пользователя.""" + from apps.core.serializers import BackgroundJobListSerializer + from apps.core.services import BackgroundJobService + + status_filter = request.query_params.get("status") + limit = min(int(request.query_params.get("limit", 50)), 100) + + jobs = BackgroundJobService.get_user_jobs( + user_id=request.user.id, + status=status_filter, + limit=limit, + ) + + serializer = BackgroundJobListSerializer(jobs, many=True) + return Response(serializer.data) diff --git a/src/apps/core/viewsets.py b/src/apps/core/viewsets.py new file mode 100644 index 0000000..99d17c1 --- /dev/null +++ b/src/apps/core/viewsets.py @@ -0,0 +1,468 @@ +""" +Базовые ViewSet классы для API. + +Предоставляет переиспользуемые ViewSet с общей логикой. +""" + +import logging +from typing import Any, Generic, TypeVar + +from apps.core.pagination import StandardPagination +from apps.core.response import api_error_response, api_response +from django.db.models import Model, QuerySet +from django_filters import rest_framework as filters +from rest_framework import status, viewsets +from rest_framework.filters import OrderingFilter, SearchFilter +from rest_framework.permissions import IsAuthenticated +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.serializers import Serializer + +logger = logging.getLogger(__name__) + +M = TypeVar("M", bound=Model) + + +class BaseViewSet(viewsets.ModelViewSet, Generic[M]): + """ + Базовый ViewSet с общей логикой для CRUD операций. + + Особенности: + - Унифицированный формат ответов + - Стандартная пагинация + - Фильтрация, поиск, сортировка + - Логирование операций + - Автоматическая оптимизация запросов + - Обработка ошибок + + Пример использования: + class ArticleViewSet(BaseViewSet[Article]): + queryset = Article.objects.all() + serializer_class = ArticleSerializer + filterset_class = ArticleFilter + search_fields = ['title', 'content'] + ordering_fields = ['created_at', 'title'] + + # Оптимизация запросов (декларативно) + select_related_fields = ['author', 'category'] + prefetch_related_fields = ['tags', 'comments'] + defer_fields = ['full_text'] # Тяжёлые поля + """ + + pagination_class = StandardPagination + permission_classes = [IsAuthenticated] + filter_backends = [ + filters.DjangoFilterBackend, + SearchFilter, + OrderingFilter, + ] + ordering = ["-created_at"] + + # Оптимизация запросов (декларативный подход) + select_related_fields: list[str] = [] + prefetch_related_fields: list[str] = [] + only_fields: list[str] = [] # Только эти поля (list) + defer_fields: list[str] = [] # Исключить эти поля (detail) + + # Можно переопределить для разных action'ов + serializer_classes: dict[str, type[Serializer[Any]]] = {} + + def get_serializer_class(self) -> type[Serializer[Any]]: + """Возвращает serializer в зависимости от action.""" + if self.action in self.serializer_classes: + return self.serializer_classes[self.action] + return super().get_serializer_class() + + def get_queryset(self) -> QuerySet[M]: + """Возвращает базовый queryset с оптимизациями.""" + queryset = super().get_queryset() + + # Декларативные оптимизации (новый стиль) + if self.select_related_fields: + queryset = queryset.select_related(*self.select_related_fields) + + if self.prefetch_related_fields: + queryset = queryset.prefetch_related(*self.prefetch_related_fields) + + # only/defer только для list + if self.action == "list": + if self.only_fields: + queryset = queryset.only(*self.only_fields) + elif self.defer_fields: + queryset = queryset.defer(*self.defer_fields) + + # Старый стиль (обратная совместимость) + if ( + hasattr(self, "select_related_fields") + and not self.select_related_fields + and hasattr(self, "_select_related") + ): + queryset = queryset.select_related(*self._select_related) + + if ( + hasattr(self, "prefetch_related_fields") + and not self.prefetch_related_fields + and hasattr(self, "_prefetch_related") + ): + queryset = queryset.prefetch_related(*self._prefetch_related) + + return queryset + + def list(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Получение списка объектов.""" + queryset = self.filter_queryset(self.get_queryset()) + + page = self.paginate_queryset(queryset) + if page is not None: + serializer = self.get_serializer(page, many=True) + return self.get_paginated_response(serializer.data) + + serializer = self.get_serializer(queryset, many=True) + return api_response(serializer.data) + + def retrieve(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Получение одного объекта.""" + instance = self.get_object() + serializer = self.get_serializer(instance) + return api_response(serializer.data) + + def create(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Создание объекта.""" + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + self.perform_create(serializer) + + logger.info( + f"Created {self.get_queryset().model.__name__}", + extra={ + "model": self.get_queryset().model.__name__, + "user_id": request.user.id if request.user.is_authenticated else None, + }, + ) + + return api_response( + serializer.data, + status_code=status.HTTP_201_CREATED, + ) + + def perform_create(self, serializer: Serializer[Any]) -> M: + """Выполняет создание объекта. Можно переопределить для добавления логики.""" + return serializer.save() + + def update(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Полное обновление объекта.""" + partial = kwargs.pop("partial", False) + instance = self.get_object() + serializer = self.get_serializer(instance, data=request.data, partial=partial) + serializer.is_valid(raise_exception=True) + self.perform_update(serializer) + + logger.info( + f"Updated {self.get_queryset().model.__name__}", + extra={ + "model": self.get_queryset().model.__name__, + "instance_id": instance.pk, + "user_id": request.user.id if request.user.is_authenticated else None, + }, + ) + + return api_response(serializer.data) + + def perform_update(self, serializer: Serializer[Any]) -> M: + """Выполняет обновление объекта.""" + return serializer.save() + + def destroy(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Удаление объекта.""" + instance = self.get_object() + instance_id = instance.pk + + self.perform_destroy(instance) + + logger.info( + f"Deleted {self.get_queryset().model.__name__}", + extra={ + "model": self.get_queryset().model.__name__, + "instance_id": instance_id, + "user_id": request.user.id if request.user.is_authenticated else None, + }, + ) + + return Response(status=status.HTTP_204_NO_CONTENT) + + def perform_destroy(self, instance: M) -> None: + """Выполняет удаление объекта.""" + instance.delete() + + +class ReadOnlyViewSet(viewsets.ReadOnlyModelViewSet, Generic[M]): + """ + ViewSet только для чтения. + + Предоставляет только list и retrieve действия. + + Пример использования: + class PublicArticleViewSet(ReadOnlyViewSet[Article]): + queryset = Article.objects.filter(is_published=True) + serializer_class = ArticleSerializer + permission_classes = [AllowAny] + """ + + pagination_class = StandardPagination + filter_backends = [ + filters.DjangoFilterBackend, + SearchFilter, + OrderingFilter, + ] + ordering = ["-created_at"] + + def list(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Получение списка объектов.""" + queryset = self.filter_queryset(self.get_queryset()) + + page = self.paginate_queryset(queryset) + if page is not None: + serializer = self.get_serializer(page, many=True) + return self.get_paginated_response(serializer.data) + + serializer = self.get_serializer(queryset, many=True) + return api_response(serializer.data) + + def retrieve(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Получение одного объекта.""" + instance = self.get_object() + serializer = self.get_serializer(instance) + return api_response(serializer.data) + + +class OwnerViewSet(BaseViewSet[M]): + """ + ViewSet с фильтрацией по владельцу. + + Автоматически фильтрует queryset по текущему пользователю + и устанавливает владельца при создании. + + Атрибуты: + owner_field: Имя поля владельца (по умолчанию 'user') + + Пример использования: + class MyTaskViewSet(OwnerViewSet[Task]): + queryset = Task.objects.all() + serializer_class = TaskSerializer + owner_field = 'owner' # или 'user' + """ + + owner_field = "user" + + def get_queryset(self) -> QuerySet[M]: + """Фильтрует queryset по текущему пользователю.""" + queryset = super().get_queryset() + + if self.request.user.is_authenticated: + filter_kwargs = {self.owner_field: self.request.user} + queryset = queryset.filter(**filter_kwargs) + + return queryset + + def perform_create(self, serializer: Serializer[Any]) -> M: + """Устанавливает владельца при создании.""" + return serializer.save(**{self.owner_field: self.request.user}) + + +class BulkMixin: + """ + Миксин для массовых операций в ViewSet. + + Добавляет возможность создания/обновления/удаления + нескольких объектов за один запрос. + + Пример использования: + class ArticleViewSet(BulkMixin, BaseViewSet[Article]): + ... + + # POST /articles/bulk_create/ + # {"items": [{"title": "A"}, {"title": "B"}]} + + # PATCH /articles/bulk_update/ + # {"items": [{"id": 1, "title": "A updated"}, {"id": 2, "title": "B updated"}]} + + # DELETE /articles/bulk_delete/ + # {"ids": [1, 2, 3]} + + Для использования добавьте в urls.py: + from rest_framework.decorators import action + + class MyViewSet(BulkMixin, BaseViewSet[MyModel]): + @action(detail=False, methods=['post']) + def bulk_create(self, request): + return super().bulk_create(request) + + @action(detail=False, methods=['patch']) + def bulk_update(self, request): + return super().bulk_update(request) + + @action(detail=False, methods=['delete']) + def bulk_delete(self, request): + return super().bulk_delete(request) + """ + + # Максимальное количество объектов для bulk операций + bulk_max_items: int = 100 + + def bulk_create(self, request: Request) -> Response: + """ + Массовое создание объектов. + + Ожидает: {"items": [{...}, {...}]} + """ + items = request.data.get("items", []) + + if not items: + return api_error_response( + [{"code": "invalid_data", "message": "Список items пуст"}], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if len(items) > self.bulk_max_items: + return api_error_response( + [ + { + "code": "too_many_items", + "message": f"Максимум {self.bulk_max_items} объектов за запрос", + } + ], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + serializer = self.get_serializer(data=items, many=True) # type: ignore + serializer.is_valid(raise_exception=True) + instances = serializer.save() + + logger.info( + f"Bulk created {len(instances)} {self.get_queryset().model.__name__}", + extra={ + "model": self.get_queryset().model.__name__, + "count": len(instances), + "user_id": request.user.id if request.user.is_authenticated else None, + }, + ) + + return api_response( + self.get_serializer(instances, many=True).data, # type: ignore + status_code=status.HTTP_201_CREATED, + ) + + def bulk_update(self, request: Request) -> Response: + """ + Массовое обновление объектов. + + Ожидает: {"items": [{"id": 1, "field": "value"}, ...]} + """ + items = request.data.get("items", []) + + if not items: + return api_error_response( + [{"code": "invalid_data", "message": "Список items пуст"}], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if len(items) > self.bulk_max_items: + return api_error_response( + [ + { + "code": "too_many_items", + "message": f"Максимум {self.bulk_max_items} объектов за запрос", + } + ], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + # Собираем ID и получаем объекты + ids = [item.get("id") for item in items if item.get("id")] + if not ids: + return api_error_response( + [ + { + "code": "missing_ids", + "message": "Все элементы должны содержать id", + } + ], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + instances_dict = {obj.pk: obj for obj in self.get_queryset().filter(pk__in=ids)} # type: ignore + + updated_instances = [] + errors = [] + + for item in items: + item_id = item.get("id") + if item_id not in instances_dict: + errors.append({"id": item_id, "error": "Объект не найден"}) + continue + + instance = instances_dict[item_id] + serializer = self.get_serializer(instance, data=item, partial=True) # type: ignore + + if serializer.is_valid(): + serializer.save() + updated_instances.append(serializer.data) + else: + errors.append({"id": item_id, "errors": serializer.errors}) + + logger.info( + f"Bulk updated {len(updated_instances)} {self.get_queryset().model.__name__}", + extra={ + "model": self.get_queryset().model.__name__, + "count": len(updated_instances), + "errors": len(errors), + "user_id": request.user.id if request.user.is_authenticated else None, + }, + ) + + return api_response( + { + "updated": updated_instances, + "errors": errors, + } + ) + + def bulk_delete(self, request: Request) -> Response: + """ + Массовое удаление объектов. + + Ожидает: {"ids": [1, 2, 3]} + """ + ids = request.data.get("ids", []) + + if not ids: + return api_error_response( + [{"code": "invalid_data", "message": "Список ids пуст"}], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if len(ids) > self.bulk_max_items: + return api_error_response( + [ + { + "code": "too_many_items", + "message": f"Максимум {self.bulk_max_items} объектов за запрос", + } + ], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + queryset = self.get_queryset().filter(pk__in=ids) # type: ignore + count = queryset.count() + queryset.delete() + + logger.info( + f"Bulk deleted {count} {self.get_queryset().model.__name__}", + extra={ + "model": self.get_queryset().model.__name__, + "count": count, + "user_id": request.user.id if request.user.is_authenticated else None, + }, + ) + + return api_response({"deleted": count}) diff --git a/src/apps/user/__init__.py b/src/apps/user/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/apps/user/apps.py b/src/apps/user/apps.py new file mode 100644 index 0000000..7107b88 --- /dev/null +++ b/src/apps/user/apps.py @@ -0,0 +1,10 @@ +from django.apps import AppConfig + + +class UserConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "apps.user" + verbose_name = "User Management" + + def ready(self): + import apps.user.signals # noqa diff --git a/src/apps/user/migrations/0001_initial.py b/src/apps/user/migrations/0001_initial.py new file mode 100644 index 0000000..b04bc0c --- /dev/null +++ b/src/apps/user/migrations/0001_initial.py @@ -0,0 +1,71 @@ +# Generated by Django 3.2.25 on 2026-01-19 12:19 + +from django.conf import settings +import django.contrib.auth.models +import django.contrib.auth.validators +from django.db import migrations, models +import django.db.models.deletion +import django.utils.timezone + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('auth', '0012_alter_user_first_name_max_length'), + ] + + operations = [ + migrations.CreateModel( + name='User', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('password', models.CharField(max_length=128, verbose_name='password')), + ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), + ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), + ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')), + ('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')), + ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')), + ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), + ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), + ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), + ('email', models.EmailField(help_text='Required. Must be unique.', max_length=254, unique=True, verbose_name='email address')), + ('phone', models.CharField(blank=True, help_text='Phone number in international format', max_length=20, null=True, verbose_name='phone number')), + ('is_verified', models.BooleanField(default=False, help_text='Designates whether the user has verified their email.', verbose_name='email verified')), + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='updated at')), + ('groups', models.ManyToManyField(blank=True, help_text='', related_name='custom_user_set', related_query_name='custom_user', to='auth.Group', verbose_name='groups')), + ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='custom_user_set', related_query_name='custom_user', to='auth.Permission', verbose_name='user permissions')), + ], + options={ + 'verbose_name': 'user', + 'verbose_name_plural': 'users', + 'db_table': 'users', + 'ordering': ['-created_at'], + }, + managers=[ + ('objects', django.contrib.auth.models.UserManager()), + ], + ), + migrations.CreateModel( + name='Profile', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('first_name', models.CharField(blank=True, max_length=50, null=True, verbose_name='first name')), + ('last_name', models.CharField(blank=True, max_length=50, null=True, verbose_name='last name')), + ('bio', models.TextField(blank=True, help_text='Short biography or description', null=True, verbose_name='bio')), + ('avatar', models.ImageField(blank=True, help_text='User avatar image', null=True, upload_to='avatars/', verbose_name='avatar')), + ('date_of_birth', models.DateField(blank=True, null=True, verbose_name='date of birth')), + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='updated at')), + ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL, verbose_name='user')), + ], + options={ + 'verbose_name': 'profile', + 'verbose_name_plural': 'profiles', + 'db_table': 'profiles', + 'ordering': ['-created_at'], + }, + ), + ] diff --git a/src/apps/user/migrations/__init__.py b/src/apps/user/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/apps/user/models.py b/src/apps/user/models.py new file mode 100644 index 0000000..20bd9f9 --- /dev/null +++ b/src/apps/user/models.py @@ -0,0 +1,109 @@ +from django.contrib.auth.models import AbstractUser +from django.db import models +from django.utils.translation import gettext_lazy as _ + + +class User(AbstractUser): + """Расширенная модель пользователя""" + + # Переопределяем группы и разрешения для избежания конфликта + groups = models.ManyToManyField( + "auth.Group", + verbose_name=_("groups"), + blank=True, + help_text=_(""), + related_name="custom_user_set", + related_query_name="custom_user", + ) + user_permissions = models.ManyToManyField( + "auth.Permission", + verbose_name=_("user permissions"), + blank=True, + help_text=_("Specific permissions for this user."), + related_name="custom_user_set", + related_query_name="custom_user", + ) + + email = models.EmailField( + _("email address"), unique=True, help_text=_("Required. Must be unique.") + ) + + phone = models.CharField( + _("phone number"), + max_length=20, + blank=True, + null=True, + help_text=_("Phone number in international format"), + ) + + is_verified = models.BooleanField( + _("email verified"), + default=False, + help_text=_("Designates whether the user has verified their email."), + ) + + created_at = models.DateTimeField(_("created at"), auto_now_add=True) + + updated_at = models.DateTimeField(_("updated at"), auto_now=True) + + USERNAME_FIELD = "email" + REQUIRED_FIELDS = ["username"] + + class Meta: + db_table = "users" + verbose_name = _("user") + verbose_name_plural = _("users") + ordering = ["-created_at"] + + def __str__(self): + return f"{self.username} ({self.email})" + + +class Profile(models.Model): + """Профиль пользователя (OneToOne связь с User)""" + + user = models.OneToOneField( + User, on_delete=models.CASCADE, related_name="profile", verbose_name=_("user") + ) + + first_name = models.CharField(_("first name"), max_length=50, blank=True, null=True) + + last_name = models.CharField(_("last name"), max_length=50, blank=True, null=True) + + bio = models.TextField( + _("bio"), blank=True, null=True, help_text=_("Short biography or description") + ) + + avatar = models.ImageField( + _("avatar"), + upload_to="avatars/", + blank=True, + null=True, + help_text=_("User avatar image"), + ) + + date_of_birth = models.DateField(_("date of birth"), blank=True, null=True) + + created_at = models.DateTimeField(_("created at"), auto_now_add=True) + + updated_at = models.DateTimeField(_("updated at"), auto_now=True) + + class Meta: + db_table = "profiles" + verbose_name = _("profile") + verbose_name_plural = _("profiles") + ordering = ["-created_at"] + + def __str__(self): + return f"Profile of {self.user.username}" + + @property + def full_name(self): + """Полное имя пользователя""" + if self.first_name and self.last_name: + return f"{self.first_name} {self.last_name}" + elif self.first_name: + return self.first_name + elif self.last_name: + return self.last_name + return self.user.username diff --git a/src/apps/user/serializers.py b/src/apps/user/serializers.py new file mode 100644 index 0000000..a0f13f4 --- /dev/null +++ b/src/apps/user/serializers.py @@ -0,0 +1,155 @@ +from django.contrib.auth import get_user_model +from rest_framework import serializers +from rest_framework.validators import UniqueValidator + +from .models import Profile + +User = get_user_model() + + +class UserRegistrationSerializer(serializers.ModelSerializer): + """Сериализатор для регистрации пользователя""" + + email = serializers.EmailField( + validators=[UniqueValidator(queryset=User.objects.all())], + help_text="Email пользователя (уникальный)", + ) + password = serializers.CharField( + write_only=True, min_length=8, help_text="Пароль (минимум 8 символов)" + ) + password_confirm = serializers.CharField( + write_only=True, min_length=8, help_text="Подтверждение пароля" + ) + + class Meta: + model = User + fields = ("email", "username", "password", "password_confirm", "phone") + extra_kwargs = { + "username": { + "validators": [UniqueValidator(queryset=User.objects.all())], + "help_text": "Username пользователя (уникальный)", + } + } + + def validate(self, attrs): + if attrs["password"] != attrs["password_confirm"]: + raise serializers.ValidationError("Пароли не совпадают") + return attrs + + def create(self, validated_data): + validated_data.pop("password_confirm") + password = validated_data.pop("password") + user = User.objects.create_user(**validated_data) + user.set_password(password) + user.save() + return user + + +class UserProfileSerializer(serializers.ModelSerializer): + """Сериализатор для профиля пользователя""" + + full_name = serializers.ReadOnlyField(help_text="Полное имя") + avatar = serializers.ImageField(required=False, allow_null=True) + + class Meta: + model = Profile + fields = ( + "id", + "first_name", + "last_name", + "full_name", + "bio", + "avatar", + "date_of_birth", + ) + read_only_fields = ("id",) + + +class UserSerializer(serializers.ModelSerializer): + """Сериализатор для пользователя""" + + profile = UserProfileSerializer(read_only=True) + + class Meta: + model = User + fields = ( + "id", + "email", + "username", + "phone", + "is_verified", + "profile", + "created_at", + "updated_at", + ) + read_only_fields = ("id", "is_verified", "created_at", "updated_at") + + +class UserUpdateSerializer(serializers.ModelSerializer): + """Сериализатор для обновления данных пользователя""" + + class Meta: + model = User + fields = ("username", "phone") + + +class ProfileUpdateSerializer(serializers.ModelSerializer): + """Сериализатор для обновления профиля""" + + class Meta: + model = Profile + fields = ("first_name", "last_name", "bio", "avatar", "date_of_birth") + + +class LoginSerializer(serializers.Serializer): + """Сериализатор для входа""" + + email = serializers.EmailField(help_text="Email пользователя") + password = serializers.CharField(help_text="Пароль") + + +class TokenSerializer(serializers.Serializer): + """Сериализатор для токенов""" + + access = serializers.CharField(help_text="Access token") + refresh = serializers.CharField(help_text="Refresh token") + + +class PasswordChangeSerializer(serializers.Serializer): + """Сериализатор для смены пароля""" + + old_password = serializers.CharField(help_text="Старый пароль") + new_password = serializers.CharField( + min_length=8, help_text="Новый пароль (минимум 8 символов)" + ) + new_password_confirm = serializers.CharField( + min_length=8, help_text="Подтверждение нового пароля" + ) + + def validate(self, attrs): + if attrs["new_password"] != attrs["new_password_confirm"]: + raise serializers.ValidationError("Новые пароли не совпадают") + return attrs + + +class PasswordResetRequestSerializer(serializers.Serializer): + """Сериализатор для запроса сброса пароля""" + + email = serializers.EmailField(help_text="Email пользователя") + + +class PasswordResetConfirmSerializer(serializers.Serializer): + """Сериализатор для подтверждения сброса пароля""" + + token = serializers.CharField(help_text="Токен сброса") + new_password = serializers.CharField( + min_length=8, help_text="Новый пароль (минимум 8 символов)" + ) + new_password_confirm = serializers.CharField( + min_length=8, help_text="Подтверждение нового пароля" + ) + + def validate(self, attrs): + if attrs["new_password"] != attrs["new_password_confirm"]: + raise serializers.ValidationError("Новые пароли не совпадают") + return attrs diff --git a/src/apps/user/services.py b/src/apps/user/services.py new file mode 100644 index 0000000..3398035 --- /dev/null +++ b/src/apps/user/services.py @@ -0,0 +1,240 @@ +from typing import Any + +from apps.core.exceptions import NotFoundError +from django.contrib.auth import get_user_model +from django.db import transaction +from rest_framework_simplejwt.tokens import RefreshToken + +from .models import Profile + +User = get_user_model() + + +class UserService: + """Сервисный слой для работы с пользователями""" + + @classmethod + def create_user( + cls, *, email: str, username: str, password: str, **extra_fields + ) -> User: + """ + Создает нового пользователя + + Args: + email: Email пользователя + username: Username пользователя + password: Пароль + **extra_fields: Дополнительные поля + + Returns: + User: Созданный пользователь + + Raises: + ValidationError: При некорректных данных + """ + with transaction.atomic(): + user = User.objects.create_user( + email=email, username=username, password=password, **extra_fields + ) + return user + + @classmethod + def get_user_by_email(cls, email: str) -> User: + """Получает пользователя по email + + Raises: + NotFoundError: Если пользователь не найден + """ + try: + return User.objects.get(email=email) + except User.DoesNotExist as e: + raise NotFoundError( + message=f"User with email={email} not found", + details={"email": email}, + ) from e + + @classmethod + def get_user_by_email_or_none(cls, email: str) -> User | None: + """Получает пользователя по email или None""" + try: + return User.objects.get(email=email) + except User.DoesNotExist: + return None + + @classmethod + def get_user_by_id(cls, user_id: int) -> User: + """Получает пользователя по ID + + Raises: + NotFoundError: Если пользователь не найден + """ + try: + return User.objects.get(id=user_id) + except User.DoesNotExist as e: + raise NotFoundError( + message=f"User with id={user_id} not found", + details={"user_id": user_id}, + ) from e + + @classmethod + def get_user_by_id_or_none(cls, user_id: int) -> User | None: + """Получает пользователя по ID или None""" + try: + return User.objects.get(id=user_id) + except User.DoesNotExist: + return None + + @classmethod + def update_user(cls, user_id: int, **fields) -> User: + """ + Обновляет данные пользователя + + Args: + user_id: ID пользователя + **fields: Поля для обновления + + Returns: + User: Обновленный пользователь + + Raises: + NotFoundError: Если пользователь не найден + """ + user = cls.get_user_by_id(user_id) + + for field, value in fields.items(): + setattr(user, field, value) + + user.save() + return user + + @classmethod + def delete_user(cls, user_id: int) -> None: + """ + Удаляет пользователя + + Args: + user_id: ID пользователя + + Raises: + NotFoundError: Если пользователь не найден + """ + user = cls.get_user_by_id(user_id) + user.delete() + + @classmethod + def get_tokens_for_user(cls, user: User) -> dict[str, str]: + """ + Генерирует JWT токены для пользователя + + Args: + user: Пользователь + + Returns: + Dict[str, str]: refresh и access токены + """ + refresh = RefreshToken.for_user(user) + return { + "refresh": str(refresh), + "access": str(refresh.access_token), + } + + @classmethod + def verify_email(cls, user_id: int) -> User: + """ + Подтверждает email пользователя + + Args: + user_id: ID пользователя + + Returns: + User: Обновленный пользователь + + Raises: + NotFoundError: Если пользователь не найден + """ + user = cls.get_user_by_id(user_id) + user.is_verified = True + user.save() + return user + + +class ProfileService: + """Сервисный слой для работы с профилями""" + + @classmethod + def get_profile_by_user_id(cls, user_id: int) -> Profile: + """Получает профиль по ID пользователя + + Raises: + NotFoundError: Если профиль не найден + """ + try: + return Profile.objects.select_related("user").get(user_id=user_id) + except Profile.DoesNotExist as e: + raise NotFoundError( + message=f"Profile for user_id={user_id} not found", + details={"user_id": user_id}, + ) from e + + @classmethod + def get_profile_by_user_id_or_none(cls, user_id: int) -> Profile | None: + """Получает профиль по ID пользователя или None""" + try: + return Profile.objects.select_related("user").get(user_id=user_id) + except Profile.DoesNotExist: + return None + + @classmethod + def update_profile(cls, user_id: int, **fields) -> Profile: + """ + Обновляет профиль пользователя + + Args: + user_id: ID пользователя + **fields: Поля для обновления + + Returns: + Profile: Обновленный профиль + + Raises: + NotFoundError: Если профиль не найден + """ + profile = cls.get_profile_by_user_id(user_id) + + for field, value in fields.items(): + setattr(profile, field, value) + + profile.save() + return profile + + @classmethod + def get_full_profile_data(cls, user_id: int) -> dict[str, Any]: + """ + Получает полные данные пользователя и профиля + + Args: + user_id: ID пользователя + + Returns: + Dict: Полные данные + + Raises: + NotFoundError: Если профиль не найден + """ + profile = cls.get_profile_by_user_id(user_id) + user = profile.user + return { + "id": user.id, + "email": user.email, + "username": user.username, + "is_verified": user.is_verified, + "phone": user.phone, + "first_name": profile.first_name, + "last_name": profile.last_name, + "full_name": profile.full_name, + "bio": profile.bio, + "avatar": profile.avatar.url if profile.avatar else None, + "date_of_birth": profile.date_of_birth, + "created_at": user.created_at, + "updated_at": user.updated_at, + } diff --git a/src/apps/user/signals.py b/src/apps/user/signals.py new file mode 100644 index 0000000..7959fc5 --- /dev/null +++ b/src/apps/user/signals.py @@ -0,0 +1,25 @@ +from django.contrib.auth import get_user_model +from django.db.models.signals import post_save +from django.dispatch import receiver + +from .models import Profile + +User = get_user_model() + + +@receiver(post_save, sender=User) +def create_user_profile(sender, instance, created, **kwargs): + """ + Автоматически создает профиль при создании пользователя + """ + if created: + Profile.objects.create(user=instance) + + +@receiver(post_save, sender=User) +def save_user_profile(sender, instance, **kwargs): + """ + Сохраняет профиль при сохранении пользователя + """ + if hasattr(instance, "profile"): + instance.profile.save() diff --git a/src/apps/user/urls.py b/src/apps/user/urls.py new file mode 100644 index 0000000..8de1a0a --- /dev/null +++ b/src/apps/user/urls.py @@ -0,0 +1,24 @@ +from django.urls import path +from rest_framework_simplejwt.views import TokenVerifyView + +from . import views + +app_name = "user" + +urlpatterns = [ + # Аутентификация + path("register/", views.RegisterView.as_view(), name="register"), + path("login/", views.LoginView.as_view(), name="login"), + path("logout/", views.LogoutView.as_view(), name="logout"), + path("token/refresh/", views.TokenRefreshView.as_view(), name="token_refresh"), + path("token/verify/", TokenVerifyView.as_view(), name="token_verify"), + # Пользовательские данные + path("me/", views.CurrentUserView.as_view(), name="current_user"), + path("me/update/", views.UserUpdateView.as_view(), name="user_update"), + path("profile/", views.ProfileDetailView.as_view(), name="profile_detail"), + path("profile/full/", views.user_profile_detail, name="profile_full"), + # Безопасность + path( + "password/change/", views.PasswordChangeView.as_view(), name="password_change" + ), +] diff --git a/src/apps/user/views.py b/src/apps/user/views.py new file mode 100644 index 0000000..0259014 --- /dev/null +++ b/src/apps/user/views.py @@ -0,0 +1,290 @@ +from django.contrib.auth import authenticate +from django.contrib.auth.hashers import check_password +from drf_yasg import openapi +from drf_yasg.utils import swagger_auto_schema +from rest_framework import generics, status +from rest_framework.decorators import api_view, permission_classes +from rest_framework.permissions import AllowAny, IsAuthenticated +from rest_framework.response import Response +from rest_framework.views import APIView +from rest_framework_simplejwt.tokens import RefreshToken + +from .serializers import ( + LoginSerializer, + PasswordChangeSerializer, + ProfileUpdateSerializer, + TokenSerializer, + UserRegistrationSerializer, + UserSerializer, + UserUpdateSerializer, +) +from .services import ProfileService, UserService + + +class RegisterView(APIView): + """Регистрация нового пользователя""" + + permission_classes = [AllowAny] + + @swagger_auto_schema( + request_body=UserRegistrationSerializer, responses={201: UserSerializer} + ) + def post(self, request): + serializer = UserRegistrationSerializer(data=request.data) + if serializer.is_valid(): + # Убираем password_confirm из данных для создания пользователя + user_data = serializer.validated_data.copy() + user_data.pop("password_confirm", None) + + user = UserService.create_user(**user_data) + user_serializer = UserSerializer(user) + tokens = UserService.get_tokens_for_user(user) + + return Response( + {"user": user_serializer.data, "tokens": tokens}, + status=status.HTTP_201_CREATED, + ) + + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class LoginView(APIView): + """Вход пользователя""" + + permission_classes = [AllowAny] + + @swagger_auto_schema(request_body=LoginSerializer, responses={200: TokenSerializer}) + def post(self, request): + serializer = LoginSerializer(data=request.data) + if serializer.is_valid(): + email = serializer.validated_data["email"] + password = serializer.validated_data["password"] + + user = authenticate(email=email, password=password) + if user: + tokens = UserService.get_tokens_for_user(user) + return Response(tokens, status=status.HTTP_200_OK) + else: + return Response( + {"error": "Неверные учетные данные"}, + status=status.HTTP_401_UNAUTHORIZED, + ) + + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class LogoutView(APIView): + """Выход пользователя""" + + permission_classes = [IsAuthenticated] + + @swagger_auto_schema( + manual_parameters=[ + openapi.Parameter( + "Authorization", + openapi.IN_HEADER, + description="Bearer ", + type=openapi.TYPE_STRING, + required=True, + ) + ], + responses={200: "Успешный выход"}, + ) + def post(self, request): + try: + refresh_token = request.data.get("refresh") + if refresh_token: + token = RefreshToken(refresh_token) + token.blacklist() + return Response({"message": "Успешный выход"}, status=status.HTTP_200_OK) + except Exception: + return Response( + {"error": "Неверный токен"}, status=status.HTTP_400_BAD_REQUEST + ) + + +class CurrentUserView(APIView): + """Получение данных текущего пользователя""" + + permission_classes = [IsAuthenticated] + + @swagger_auto_schema( + manual_parameters=[ + openapi.Parameter( + "Authorization", + openapi.IN_HEADER, + description="Bearer ", + type=openapi.TYPE_STRING, + required=True, + ) + ], + responses={200: UserSerializer}, + ) + def get(self, request): + serializer = UserSerializer(request.user) + return Response(serializer.data) + + +class UserUpdateView(APIView): + """Обновление данных пользователя""" + + permission_classes = [IsAuthenticated] + + @swagger_auto_schema( + request_body=UserUpdateSerializer, + manual_parameters=[ + openapi.Parameter( + "Authorization", + openapi.IN_HEADER, + description="Bearer ", + type=openapi.TYPE_STRING, + required=True, + ) + ], + responses={200: UserSerializer}, + ) + def patch(self, request): + serializer = UserUpdateSerializer(request.user, data=request.data, partial=True) + if serializer.is_valid(): + user = UserService.update_user(request.user.id, **serializer.validated_data) + user_serializer = UserSerializer(user) + return Response(user_serializer.data) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class ProfileDetailView(generics.RetrieveUpdateAPIView): + """Получение и обновление профиля пользователя""" + + permission_classes = [IsAuthenticated] + serializer_class = ProfileUpdateSerializer + + def get_object(self): + profile = ProfileService.get_profile_by_user_id_or_none(self.request.user.id) + if not profile: + # Если профиль не существует, создаем его + from .models import Profile + + profile = Profile.objects.create(user=self.request.user) + return profile + + @swagger_auto_schema( + manual_parameters=[ + openapi.Parameter( + "Authorization", + openapi.IN_HEADER, + description="Bearer ", + type=openapi.TYPE_STRING, + required=True, + ) + ] + ) + def get(self, request, *args, **kwargs): + profile = self.get_object() + serializer = self.get_serializer(profile) + return Response(serializer.data) + + @swagger_auto_schema( + request_body=ProfileUpdateSerializer, + manual_parameters=[ + openapi.Parameter( + "Authorization", + openapi.IN_HEADER, + description="Bearer ", + type=openapi.TYPE_STRING, + required=True, + ) + ], + ) + def patch(self, request, *args, **kwargs): + profile = self.get_object() + serializer = self.get_serializer(profile, data=request.data, partial=True) + if serializer.is_valid(): + updated_profile = ProfileService.update_profile( + request.user.id, **serializer.validated_data + ) + return Response(ProfileUpdateSerializer(updated_profile).data) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class PasswordChangeView(APIView): + """Смена пароля""" + + permission_classes = [IsAuthenticated] + + @swagger_auto_schema( + request_body=PasswordChangeSerializer, + manual_parameters=[ + openapi.Parameter( + "Authorization", + openapi.IN_HEADER, + description="Bearer ", + type=openapi.TYPE_STRING, + required=True, + ) + ], + responses={200: "Пароль успешно изменен"}, + ) + def post(self, request): + serializer = PasswordChangeSerializer(data=request.data) + if serializer.is_valid(): + user = request.user + old_password = serializer.validated_data["old_password"] + + if check_password(old_password, user.password): + new_password = serializer.validated_data["new_password"] + user.set_password(new_password) + user.save() + return Response( + {"message": "Пароль успешно изменен"}, status=status.HTTP_200_OK + ) + else: + return Response( + {"error": "Неверный старый пароль"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +@api_view(["GET"]) +@permission_classes([IsAuthenticated]) +def user_profile_detail(request): + """Получение полных данных профиля пользователя""" + profile_data = ProfileService.get_full_profile_data(request.user.id) + return Response(profile_data) + + +class TokenRefreshView(APIView): + """Обновление access токена через refresh токен""" + + permission_classes = [AllowAny] + + @swagger_auto_schema( + request_body=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "refresh": openapi.Schema( + type=openapi.TYPE_STRING, description="Refresh token" + ) + }, + required=["refresh"], + ), + responses={200: TokenSerializer}, + ) + def post(self, request): + refresh_token = request.data.get("refresh") + if not refresh_token: + return Response( + {"error": "Refresh token обязателен"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + try: + refresh = RefreshToken(refresh_token) + return Response( + {"access": str(refresh.access_token), "refresh": str(refresh)} + ) + except Exception: + return Response( + {"error": "Неверный refresh token"}, status=status.HTTP_401_UNAUTHORIZED + ) diff --git a/src/config/__init__.py b/src/config/__init__.py new file mode 100644 index 0000000..53f4ccb --- /dev/null +++ b/src/config/__init__.py @@ -0,0 +1,3 @@ +from .celery import app as celery_app + +__all__ = ("celery_app",) diff --git a/src/config/api_v1_urls.py b/src/config/api_v1_urls.py new file mode 100644 index 0000000..a3abefd --- /dev/null +++ b/src/config/api_v1_urls.py @@ -0,0 +1,20 @@ +""" +API v1 URL configuration. + +All API endpoints are versioned under /api/v1/ +""" + +from apps.core.views import BackgroundJobListView, BackgroundJobStatusView +from django.urls import include, path + +app_name = "api_v1" + +jobs_urlpatterns = [ + path("", BackgroundJobListView.as_view(), name="job-list"), + path("/", BackgroundJobStatusView.as_view(), name="job-status"), +] + +urlpatterns = [ + path("users/", include("apps.user.urls")), + path("jobs/", include((jobs_urlpatterns, "jobs"))), +] diff --git a/src/config/asgi.py b/src/config/asgi.py new file mode 100644 index 0000000..c86952c --- /dev/null +++ b/src/config/asgi.py @@ -0,0 +1,16 @@ +""" +ASGI config for the project. + +It exposes the ASGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/ +""" + +import os + +from django.core.asgi import get_asgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production") + +application = get_asgi_application() diff --git a/src/config/celery.py b/src/config/celery.py new file mode 100644 index 0000000..a45ae92 --- /dev/null +++ b/src/config/celery.py @@ -0,0 +1,42 @@ +""" +Celery configuration for the project. + +This module contains Celery configuration and task registration. +""" + +import os + +from celery import Celery + +# Set the default Django settings module for the 'celery' program. +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.development") + +app = Celery("project") + +# Using a string here means the worker doesn't have to serialize +# the configuration object to child processes. +# - namespace='CELERY' means all celery-related configuration keys +# should have a `CELERY_` prefix. +app.config_from_object("django.conf:settings", namespace="CELERY") + +# Load task modules from all registered Django apps. +app.autodiscover_tasks() + +# Configure Celery Beat schedule +app.conf.beat_schedule = { + "check-pending-scraping-jobs": { + "task": "apps.scraping.tasks.check_pending_jobs", + "schedule": 300.0, # Every 5 minutes + }, + "process-extracted-data": { + "task": "apps.data_processor.tasks.process_extracted_data", + "schedule": 600.0, # Every 10 minutes + }, +} + +app.conf.timezone = "UTC" + + +@app.task(bind=True) +def debug_task(self): + print(f"Request: {self.request!r}") diff --git a/src/config/settings/__init__.py b/src/config/settings/__init__.py new file mode 100644 index 0000000..18c525d --- /dev/null +++ b/src/config/settings/__init__.py @@ -0,0 +1,9 @@ +""" +Django settings module. +""" + +# This will be overridden by the specific settings file +try: + from .development import * +except ImportError: + from .base import * diff --git a/src/config/settings/base.py b/src/config/settings/base.py new file mode 100644 index 0000000..60f6b50 --- /dev/null +++ b/src/config/settings/base.py @@ -0,0 +1,271 @@ +""" +Base settings for Django project. + +Generated by 'django-admin startproject' using Django 3.2.25. +""" + +from pathlib import Path + +from decouple import Config, RepositoryEnv + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent.parent + +# Application version +APP_VERSION = "1.0.0" + +# Load environment variables +ENV_FILE = BASE_DIR / ".env" +if ENV_FILE.exists(): + config = Config(RepositoryEnv(str(ENV_FILE))) +else: + from decouple import AutoConfig + + config = AutoConfig(search_path=BASE_DIR) + + +# Helper function for getting config values +def get_env(key, default=None): + return config(key, default=default) + + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = get_env( + "SECRET_KEY", "django-insecure-development-key-change-in-production" +) + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = get_env("DEBUG", True) +if isinstance(DEBUG, str): + DEBUG = DEBUG.lower() in ("true", "1", "yes") + +ALLOWED_HOSTS = get_env("ALLOWED_HOSTS", "localhost,127.0.0.1") +if isinstance(ALLOWED_HOSTS, str): + ALLOWED_HOSTS = ALLOWED_HOSTS.split(",") + +# Application definition +INSTALLED_APPS = [ + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + # Third-party apps + "rest_framework", + "django_filters", + "corsheaders", + "django_celery_beat", + "django_celery_results", + "drf_yasg", + # Local apps + "apps.core", + "apps.user", +] + +MIDDLEWARE = [ + "apps.core.middleware.RequestIDMiddleware", + "corsheaders.middleware.CorsMiddleware", + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", +] + +ROOT_URLCONF = "config.urls" + +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [BASE_DIR / "templates"], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] + +WSGI_APPLICATION = "config.wsgi.application" + +# Database +DATABASES = { + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": get_env("POSTGRES_DB", "project_db"), + "USER": get_env("POSTGRES_USER", "project_user"), + "PASSWORD": get_env("POSTGRES_PASSWORD", "project_password"), + "HOST": get_env("POSTGRES_HOST", "db"), + "PORT": int(get_env("POSTGRES_PORT", "5432")), + "OPTIONS": { + "charset": "utf8mb4", + }, + }, +} + +# Cache configuration +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": get_env("REDIS_URL", "redis://localhost:6379/0"), + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + }, + }, +} + + +# Password validation +AUTH_PASSWORD_VALIDATORS = [ + { + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", + }, +] + +# Internationalization +LANGUAGE_CODE = "ru-RU" +TIME_ZONE = "UTC" +USE_I18N = True +USE_L10N = True +USE_TZ = True + +# Static files (CSS, JavaScript, Images) +STATIC_URL = "/static/" +STATIC_ROOT = BASE_DIR / "staticfiles" +STATICFILES_DIRS = [BASE_DIR / "static"] + +# Media files +MEDIA_URL = "/media/" +MEDIA_ROOT = BASE_DIR / "media" + +# Default primary key field type +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" + +# Custom user model +AUTH_USER_MODEL = "user.User" + +# REST Framework settings +REST_FRAMEWORK = { + "DEFAULT_AUTHENTICATION_CLASSES": [ + "rest_framework_simplejwt.authentication.JWTAuthentication", + "rest_framework.authentication.SessionAuthentication", + ], + "DEFAULT_PERMISSION_CLASSES": [ + "rest_framework.permissions.IsAuthenticatedOrReadOnly", + ], + "DEFAULT_FILTER_BACKENDS": [ + "django_filters.rest_framework.DjangoFilterBackend", + "rest_framework.filters.SearchFilter", + "rest_framework.filters.OrderingFilter", + ], + "DEFAULT_PAGINATION_CLASS": "apps.core.pagination.StandardPagination", + "PAGE_SIZE": 20, + "DEFAULT_RENDERER_CLASSES": [ + "rest_framework.renderers.JSONRenderer", + "rest_framework.renderers.BrowsableAPIRenderer", + ], + "EXCEPTION_HANDLER": "apps.core.exception_handler.custom_exception_handler", + # Rate limiting + "DEFAULT_THROTTLE_CLASSES": [ + "rest_framework.throttling.AnonRateThrottle", + "rest_framework.throttling.UserRateThrottle", + ], + "DEFAULT_THROTTLE_RATES": { + "anon": "100/hour", + "user": "1000/hour", + }, +} + +# JWT settings +from datetime import timedelta + +SIMPLE_JWT = { + "ACCESS_TOKEN_LIFETIME": timedelta(minutes=60), + "REFRESH_TOKEN_LIFETIME": timedelta(days=7), + "ROTATE_REFRESH_TOKENS": True, + "BLACKLIST_AFTER_ROTATION": True, + "UPDATE_LAST_LOGIN": True, + "ALGORITHM": "HS256", + "SIGNING_KEY": SECRET_KEY, + "VERIFYING_KEY": None, + "AUDIENCE": None, + "ISSUER": None, + "JWK_URL": None, + "LEEWAY": 0, + "AUTH_HEADER_TYPES": ("Bearer",), + "AUTH_HEADER_NAME": "HTTP_AUTHORIZATION", + "USER_ID_FIELD": "id", + "USER_ID_CLAIM": "user_id", + "USER_AUTHENTICATION_RULE": "rest_framework_simplejwt.authentication.default_user_authentication_rule", + "AUTH_TOKEN_CLASSES": ("rest_framework_simplejwt.tokens.AccessToken",), + "TOKEN_TYPE_CLAIM": "token_type", + "TOKEN_USER_CLASS": "rest_framework_simplejwt.models.TokenUser", + "JTI_CLAIM": "jti", + "SLIDING_TOKEN_REFRESH_EXP_CLAIM": "refresh_exp", + "SLIDING_TOKEN_LIFETIME": timedelta(minutes=5), + "SLIDING_TOKEN_REFRESH_LIFETIME": timedelta(days=1), +} + +# CORS settings +CORS_ALLOWED_ORIGINS = get_env( + "CORS_ALLOWED_ORIGINS", "http://localhost:3000,http://127.0.0.1:3000" +) +if isinstance(CORS_ALLOWED_ORIGINS, str): + CORS_ALLOWED_ORIGINS = CORS_ALLOWED_ORIGINS.split(",") +CORS_ALLOW_CREDENTIALS = True + +# Logging configuration +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "verbose": { + "format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}", + "style": "{", + }, + "simple": { + "format": "{levelname} {message}", + "style": "{", + }, + }, + "handlers": { + "file": { + "level": "INFO", + "class": "logging.FileHandler", + "filename": BASE_DIR / "logs/django.log", + "formatter": "verbose", + }, + "console": { + "level": "INFO", + "class": "logging.StreamHandler", + "formatter": "simple", + }, + }, + "root": { + "handlers": ["console", "file"], + "level": "INFO", + }, + "loggers": { + "django": { + "handlers": ["console", "file"], + "level": "INFO", + "propagate": False, + }, + }, +} diff --git a/src/config/settings/development.py b/src/config/settings/development.py new file mode 100644 index 0000000..b841587 --- /dev/null +++ b/src/config/settings/development.py @@ -0,0 +1,46 @@ +from .base import * + +# Development settings - unsuitable for production +# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = "django-insecure-development-key-change-in-production" + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = True + +ALLOWED_HOSTS = ["localhost", "127.0.0.1", "0.0.0.0", "testserver"] + +# Database for development +DATABASES = { + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": "project_dev", + "USER": "postgres", + "PASSWORD": "postgres", + "HOST": "localhost", + "PORT": "5432", + } +} + +# Celery Configuration for Development +CELERY_BROKER_URL = "redis://localhost:6379/0" +CELERY_RESULT_BACKEND = "redis://localhost:6379/0" +CELERY_ACCEPT_CONTENT = ["json"] +CELERY_TASK_SERIALIZER = "json" +CELERY_RESULT_SERIALIZER = "json" +CELERY_TIMEZONE = "UTC" + +# Email backend for development +EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" + +# Cache configuration for development +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": "redis://127.0.0.1:6379/1", + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + }, + } +} diff --git a/src/config/settings/production.py b/src/config/settings/production.py new file mode 100644 index 0000000..a85e324 --- /dev/null +++ b/src/config/settings/production.py @@ -0,0 +1,110 @@ +from .base import * + +# Production settings +# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = os.getenv("SECRET_KEY") + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = False + +ALLOWED_HOSTS = os.getenv("ALLOWED_HOSTS", "").split(",") + +# HTTPS settings +SECURE_SSL_REDIRECT = True +SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") +SECURE_HSTS_SECONDS = 31536000 +SECURE_HSTS_INCLUDE_SUBDOMAINS = True +SECURE_HSTS_PRELOAD = True + +# Session security +SESSION_COOKIE_SECURE = True +CSRF_COOKIE_SECURE = True + +# Database for production +DATABASES = { + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": os.getenv("POSTGRES_DB"), + "USER": os.getenv("POSTGRES_USER"), + "PASSWORD": os.getenv("POSTGRES_PASSWORD"), + "HOST": os.getenv("POSTGRES_HOST"), + "PORT": os.getenv("POSTGRES_PORT", "5432"), + "OPTIONS": { + "sslmode": "require", + }, + } +} + +# Celery Configuration for Production +CELERY_BROKER_URL = os.getenv("REDIS_URL", "redis://redis:6379/0") +CELERY_RESULT_BACKEND = os.getenv("REDIS_URL", "redis://redis:6379/0") +CELERY_ACCEPT_CONTENT = ["json"] +CELERY_TASK_SERIALIZER = "json" +CELERY_RESULT_SERIALIZER = "json" +CELERY_TIMEZONE = "UTC" +CELERY_TASK_ALWAYS_EAGER = False +CELERY_WORKER_PREFETCH_MULTIPLIER = 1 + +# Cache configuration for production +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": os.getenv("REDIS_CACHE_URL", "redis://redis:6379/1"), + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + "CONNECTION_POOL_KWARGS": { + "max_connections": 20, + "retry_on_timeout": True, + }, + }, + } +} + +# Logging for production +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "verbose": { + "format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}", + "style": "{", + }, + }, + "handlers": { + "file": { + "level": "INFO", + "class": "logging.handlers.RotatingFileHandler", + "filename": "/var/log/django/app.log", + "maxBytes": 1024 * 1024 * 15, # 15MB + "backupCount": 10, + "formatter": "verbose", + }, + "mail_admins": { + "level": "ERROR", + "class": "django.utils.log.AdminEmailHandler", + }, + }, + "root": { + "handlers": ["file"], + "level": "INFO", + }, + "loggers": { + "django": { + "handlers": ["file"], + "level": "INFO", + "propagate": False, + }, + "apps.data_processor": { + "handlers": ["file"], + "level": "INFO", + "propagate": False, + }, + "apps.scraping": { + "handlers": ["file"], + "level": "INFO", + "propagate": False, + }, + }, +} diff --git a/src/config/settings/test.py b/src/config/settings/test.py new file mode 100644 index 0000000..2e68d19 --- /dev/null +++ b/src/config/settings/test.py @@ -0,0 +1,105 @@ +from .base import * + +# Test settings +SECRET_KEY = "django-insecure-test-key-only-for-testing" # noqa: S105 + +DEBUG = True + +ALLOWED_HOSTS = ["localhost", "127.0.0.1", "0.0.0.0", "testserver"] # noqa: S104 + +# Use in-memory SQLite database for faster tests +DATABASES = { + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": ":memory:", + "TEST": { + "NAME": ":memory:", + }, + } +} + + +# Disable migrations for faster tests +class DisableMigrations: + def __contains__(self, item): + return True + + def __getitem__(self, item): + return None + + +MIGRATION_MODULES = DisableMigrations() + +# Cache configuration for tests (use local memory) +CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + } +} + +# Email backend for tests +EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" + +# Celery Configuration for Tests (use eager execution) +CELERY_TASK_ALWAYS_EAGER = True +CELERY_TASK_EAGER_PROPAGATES = True +CELERY_BROKER_URL = "memory://" +CELERY_RESULT_BACKEND = "cache+memory://" + +# Password hashers - use fast hasher for tests +PASSWORD_HASHERS = [ + "django.contrib.auth.hashers.MD5PasswordHasher", +] + +# Disable logging during tests +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "handlers": { + "null": { + "class": "logging.NullHandler", + }, + }, + "root": { + "handlers": ["null"], + }, + "loggers": { + "django": { + "handlers": ["null"], + "propagate": False, + }, + "django.request": { + "handlers": ["null"], + "propagate": False, + }, + }, +} + +# Media files for tests +MEDIA_ROOT = "/tmp/test_media" # noqa: S108 + +# Static files for tests +STATICFILES_STORAGE = "django.contrib.staticfiles.storage.StaticFilesStorage" + +# Disable CSRF for API tests and disable throttling +REST_FRAMEWORK = { + **globals().get("REST_FRAMEWORK", {}), + "DEFAULT_AUTHENTICATION_CLASSES": [ + "rest_framework_simplejwt.authentication.JWTAuthentication", + "rest_framework.authentication.SessionAuthentication", + ], + "TEST_REQUEST_DEFAULT_FORMAT": "json", + # Disable throttling for tests + "DEFAULT_THROTTLE_CLASSES": [], + "DEFAULT_THROTTLE_RATES": {}, +} + +# JWT settings for tests +from datetime import timedelta + +SIMPLE_JWT = { + **globals().get("SIMPLE_JWT", {}), + "ACCESS_TOKEN_LIFETIME": timedelta(minutes=5), + "REFRESH_TOKEN_LIFETIME": timedelta(days=1), + "ROTATE_REFRESH_TOKENS": True, +} diff --git a/src/config/urls.py b/src/config/urls.py new file mode 100644 index 0000000..aca36e0 --- /dev/null +++ b/src/config/urls.py @@ -0,0 +1,44 @@ +""" +URL Configuration for the project. + +The `urlpatterns` list routes URLs to views. +""" + +from django.conf import settings +from django.conf.urls.static import static +from django.contrib import admin +from django.urls import include, path +from drf_yasg import openapi +from drf_yasg.views import get_schema_view +from rest_framework import permissions + +# Swagger schema view +schema_view = get_schema_view( + openapi.Info( + title="Mostovik API", + default_version="v1", + description="API documentation for Mostovik project", + terms_of_service="https://www.google.com/policies/terms/", + contact=openapi.Contact(email="contact@mostovik.local"), + license=openapi.License(name="BSD License"), + ), + public=True, + permission_classes=(permissions.AllowAny,), +) + +urlpatterns = [ + path( + "", + schema_view.with_ui("swagger", cache_timeout=0), + name="schema-swagger-ui", + ), + path("admin/", admin.site.urls), + path("health/", include("apps.core.urls")), + path("api/v1/", include("config.api_v1_urls", namespace="api_v1")), + path("auth/", include("rest_framework.urls")), +] + +# Serve media files in development +if settings.DEBUG: + urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) + urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) diff --git a/src/config/wsgi.py b/src/config/wsgi.py new file mode 100644 index 0000000..e4b0f2f --- /dev/null +++ b/src/config/wsgi.py @@ -0,0 +1,16 @@ +""" +WSGI config for the project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production") + +application = get_wsgi_application() diff --git a/src/manage.py b/src/manage.py new file mode 100644 index 0000000..e58570e --- /dev/null +++ b/src/manage.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + """Run administrative tasks.""" + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.development") + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == "__main__": + main() diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 0000000..39c80d1 --- /dev/null +++ b/tests/README.md @@ -0,0 +1,368 @@ +# Тесты для mostovik-backend + +Этот документ описывает организацию и запуск тестов в проекте mostovik-backend. + +## 📁 Структура тестов + +``` +tests/ +├── __init__.py # Корневой пакет тестов +├── conftest.py # Конфигурация pytest и общие фикстуры +├── README.md # Этот файл +└── apps/ # Тесты для Django приложений + ├── __init__.py + └── user/ # Тесты для приложения user + ├── __init__.py + ├── factories.py # Фабрики для создания тестовых данных + ├── test_models.py # Тесты моделей + ├── test_serializers.py # Тесты сериализаторов + ├── test_services.py # Тесты сервисного слоя + └── test_views.py # Тесты представлений (API views) +``` + +## 🚀 Запуск тестов + +### Быстрый старт + +```bash +# Запуск всех тестов (рекомендуемый способ) +make test + +# Запуск конкретных тестов +make test TARGET=user # Все тесты user app +make test TARGET=models # Только тесты моделей +make test TARGET=views # Только тесты представлений + +# Или напрямую через скрипт +python run_tests_simple.py +python run_tests_simple.py user +``` + +### Различные способы запуска + +#### 1. Через универсальную команду make test (рекомендуется) + +```bash +# Все тесты +make test + +# Конкретные группы тестов +make test TARGET=user # Все тесты user app +make test TARGET=models # Тесты моделей +make test TARGET=views # Тесты представлений +make test TARGET=serializers # Тесты сериализаторов +make test TARGET=services # Тесты сервисов + +# Можно также использовать полные имена +make test TARGET=test_models # То же что и models +make test TARGET=test_views # То же что и views +``` + +#### 2. Через улучшенный Django runner + +```bash +# Все тесты +python run_tests_simple.py + +# Конкретное приложение +python run_tests_simple.py user + +# Конкретные группы тестов +python run_tests_simple.py models +python run_tests_simple.py views +python run_tests_simple.py serializers +python run_tests_simple.py services + +# Полные имена файлов +python run_tests_simple.py test_models +python run_tests_simple.py test_views +``` + +#### 3. Через стандартный Django test runner + +```bash +# Все тесты +python run_tests.py + +# Конкретное приложение +python run_tests.py test tests.apps.user + +# Конкретный класс тестов +python run_tests.py test tests.apps.user.test_models.UserModelTest +``` + +#### 4. Через pytest (возможны проблемы с pdbpp) + +```bash +# Через скрипт-обертку +python run_pytest.py + +# Или напрямую, если настроен PYTHONPATH +export PYTHONPATH=src:$PYTHONPATH +export DJANGO_SETTINGS_MODULE=config.settings.test +pytest tests/ +``` + +## 🔧 Конфигурация + +### Настройки тестов + +Тесты используют специальные настройки Django из `src/config/settings/test.py`: + +- **База данных**: SQLite в памяти для быстрого выполнения +- **Кэш**: Local memory cache вместо Redis +- **Email**: Локальный backend для тестирования +- **Celery**: Синхронное выполнение задач +- **Миграции**: Отключены для ускорения +- **Логирование**: Отключено + +### Pytest конфигурация + +Основные настройки в `pytest.ini`: + +- Автоматическое обнаружение Django настроек +- Переиспользование тестовой базы данных +- Отчеты о покрытии кода +- Фильтрация предупреждений + +### Полезные опции pytest + +```bash +# Подробная информация (автоматически включена) +make test TARGET=models + +# Запуск конкретного файла напрямую +python run_tests_simple.py test_models + +# Все тесты с подробным выводом +python run_tests_simple.py +``` + +## 🏭 Фабрики тестовых данных + +### UserFactory + +```python +from tests.apps.user.factories import UserFactory + +# Создание обычного пользователя +user = UserFactory.create_user() + +# Создание пользователя с конкретными данными +user = UserFactory.create_user( + email="test@example.com", + username="testuser" +) + +# Создание суперпользователя +admin = UserFactory.create_superuser() +``` + +### ProfileFactory + +```python +from tests.apps.user.factories import ProfileFactory + +# Создание профиля с новым пользователем +profile = ProfileFactory.create_profile() + +# Создание профиля для существующего пользователя +profile = ProfileFactory.create_profile( + user=existing_user, + first_name="John", + last_name="Doe" +) +``` + +## 🧪 Фикстуры pytest + +Доступные фикстуры в `tests/conftest.py`: + +```python +def test_example(test_user, authenticated_api_client): + """Пример использования фикстур""" + # test_user - готовый тестовый пользователь + # authenticated_api_client - API клиент с авторизацией + response = authenticated_api_client.get('/api/user/profile/') + assert response.status_code == 200 +``` + +### Список фикстур + +- `api_client` - DRF APIClient +- `user_factory` - Фабрика пользователей +- `profile_factory` - Фабрика профилей +- `test_user` - Готовый тестовый пользователь +- `test_superuser` - Готовый суперпользователь +- `test_profile` - Готовый профиль +- `authenticated_api_client` - Авторизованный API клиент +- `admin_api_client` - API клиент с админскими правами + +## 📊 Маркеры тестов + +Используйте маркеры для категоризации тестов: + +```python +import pytest + +@pytest.mark.unit +def test_user_model(): + """Юнит тест модели""" + pass + +@pytest.mark.integration +def test_user_registration_flow(): + """Интеграционный тест""" + pass + +@pytest.mark.slow +def test_heavy_operation(): + """Медленный тест""" + pass +``` + +Запуск по маркерам: + +```bash +# Только юнит тесты +python run_pytest.py -m "unit" + +# Исключить медленные тесты +python run_pytest.py -m "not slow" + +# Тесты моделей +python run_pytest.py -m "models" +``` + +## 🔍 Отладка тестов + +### Просмотр вывода + +```bash +# Показать print statements +python run_pytest.py -s + +# Подробные ошибки +python run_pytest.py --tb=long + +# Показать локальные переменные при ошибке +python run_pytest.py --tb=long --showlocals +``` + +### Использование pdb + +```python +def test_something(): + import pdb; pdb.set_trace() + # ваш код тестирования +``` + +```bash +# Запуск с автоматическим pdb при ошибках +python run_pytest.py --pdb +``` + +## 📈 Покрытие кода + +### Генерация отчета + +```bash +# HTML отчет +make test-coverage + +# Или напрямую +python run_pytest.py --cov=src --cov-report=html:htmlcov + +# Открыть отчет в браузере +open htmlcov/index.html +``` + +### Просмотр в терминале + +```bash +python run_pytest.py --cov=src --cov-report=term-missing +``` + +## 🔧 Добавление новых тестов + +### Создание нового файла тестов + +1. Создайте файл в соответствующей папке: `tests/apps/{app_name}/test_{module}.py` +2. Импортируйте необходимые зависимости +3. Создайте классы тестов, наследуя от `TestCase` или используя функции pytest + +### Пример структуры теста + +```python +"""Tests for new module""" + +from django.test import TestCase +from tests.apps.user.factories import UserFactory + + +class NewModuleTest(TestCase): + """Tests for NewModule""" + + def setUp(self): + """Подготовка данных для тестов""" + self.user = UserFactory.create_user() + + def test_something(self): + """Test description""" + # Arrange + expected_value = "test" + + # Act + result = some_function() + + # Assert + self.assertEqual(result, expected_value) +``` + +## 🚨 Решение проблем + +### Частые ошибки + +1. **Ошибка импорта**: Проверьте, что `PYTHONPATH` включает папку `src` +2. **База данных**: Убедитесь, что используются тестовые настройки +3. **Миграции**: В тестах миграции отключены, но модели должны быть синхронизированы + +### Очистка тестовых данных + +```bash +# Очистка кеша и временных файлов +make clean + +# Пересоздание тестовой базы данных +rm -f test_db.sqlite3 +python run_pytest.py --create-db +``` + +## 📚 Полезные ссылки + +- [Django Testing Documentation](https://docs.djangoproject.com/en/3.2/topics/testing/) +- [Pytest Documentation](https://docs.pytest.org/) +- [pytest-django](https://pytest-django.readthedocs.io/) +- [DRF Testing](https://www.django-rest-framework.org/api-guide/testing/) +- [Factory Boy](https://factoryboy.readthedocs.io/) + +## 🚀 Быстрая справка команд + +```bash +# Главная команда - make test с опциональным TARGET +make test # Все тесты +make test TARGET=user # User app (77 тестов) +make test TARGET=models # Модели (16 тестов) +make test TARGET=views # Представления (20 тестов) +make test TARGET=serializers # Сериализаторы (22 теста) +make test TARGET=services # Сервисы (18 тестов) +``` + +## 🤝 Рекомендации + +1. **Используйте make test** - это основная и самая удобная команда +2. **Именование**: Используйте описательные имена для тестов +3. **Изоляция**: Каждый тест должен быть независимым +4. **Покрытие**: Стремитесь к покрытию не менее 80% +5. **Быстрота**: Избегайте медленных операций в юнит тестах +6. **Читаемость**: Тесты должны быть понятными и хорошо документированными \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..84b82af --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,5 @@ +""" +Test suite for mostovik-backend project + +This package contains all tests organized by application structure. +""" diff --git a/tests/apps/__init__.py b/tests/apps/__init__.py new file mode 100644 index 0000000..d7f25c6 --- /dev/null +++ b/tests/apps/__init__.py @@ -0,0 +1,5 @@ +""" +Tests for applications + +This package contains tests for all Django applications. +""" diff --git a/tests/apps/core/__init__.py b/tests/apps/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/apps/core/test_background_jobs.py b/tests/apps/core/test_background_jobs.py new file mode 100644 index 0000000..b226195 --- /dev/null +++ b/tests/apps/core/test_background_jobs.py @@ -0,0 +1,236 @@ +"""Тесты для BackgroundJob.""" + +from apps.core.models import BackgroundJob, JobStatus +from apps.core.services import BackgroundJobService +from django.test import TestCase +from faker import Faker + +fake = Faker() + + +class BackgroundJobModelTest(TestCase): + """Тесты для модели BackgroundJob.""" + + def test_create_job(self): + """Тест создания задачи.""" + task_id = fake.uuid4() + job = BackgroundJob.objects.create( + task_id=task_id, + task_name="apps.test.tasks.my_task", + ) + self.assertEqual(job.task_id, task_id) + self.assertEqual(job.status, JobStatus.PENDING) + self.assertEqual(job.progress, 0) + + def test_mark_started(self): + """Тест отметки о начале выполнения.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + job.mark_started() + + self.assertEqual(job.status, JobStatus.STARTED) + self.assertIsNotNone(job.started_at) + + def test_update_progress(self): + """Тест обновления прогресса.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + job.update_progress(50, "Обработка данных...") + + self.assertEqual(job.progress, 50) + self.assertEqual(job.progress_message, "Обработка данных...") + + def test_complete(self): + """Тест успешного завершения.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + result = {"processed": 100, "errors": 0} + job.complete(result=result) + + self.assertEqual(job.status, JobStatus.SUCCESS) + self.assertEqual(job.progress, 100) + self.assertEqual(job.result, result) + self.assertIsNotNone(job.completed_at) + + def test_fail(self): + """Тест завершения с ошибкой.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + job.fail("Something went wrong", "Traceback...") + + self.assertEqual(job.status, JobStatus.FAILURE) + self.assertEqual(job.error, "Something went wrong") + self.assertEqual(job.traceback, "Traceback...") + self.assertIsNotNone(job.completed_at) + + def test_revoke(self): + """Тест отмены задачи.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + job.revoke() + + self.assertEqual(job.status, JobStatus.REVOKED) + self.assertIsNotNone(job.completed_at) + + def test_is_finished_property(self): + """Тест свойства is_finished.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + self.assertFalse(job.is_finished) + + job.complete() + self.assertTrue(job.is_finished) + + def test_is_successful_property(self): + """Тест свойства is_successful.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + self.assertFalse(job.is_successful) + + job.complete() + self.assertTrue(job.is_successful) + + def test_duration_property(self): + """Тест свойства duration.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + self.assertIsNone(job.duration) + + job.mark_started() + job.complete() + self.assertIsNotNone(job.duration) + self.assertGreaterEqual(job.duration, 0) + + +class BackgroundJobServiceTest(TestCase): + """Тесты для BackgroundJobService.""" + + def test_create_job(self): + """Тест создания задачи через сервис.""" + task_id = fake.uuid4() + job = BackgroundJobService.create_job( + task_id=task_id, + task_name="apps.test.tasks.my_task", + user_id=1, + meta={"key": "value"}, + ) + self.assertEqual(job.task_id, task_id) + self.assertEqual(job.user_id, 1) + self.assertEqual(job.meta, {"key": "value"}) + + def test_get_by_task_id(self): + """Тест получения задачи по task_id.""" + task_id = fake.uuid4() + created_job = BackgroundJobService.create_job( + task_id=task_id, + task_name="test.task", + ) + found_job = BackgroundJobService.get_by_task_id(task_id) + self.assertEqual(created_job.id, found_job.id) + + def test_get_by_task_id_not_found(self): + """Тест получения несуществующей задачи.""" + from apps.core.exceptions import NotFoundError + + with self.assertRaises(NotFoundError): + BackgroundJobService.get_by_task_id("non-existent-task-id") + + def test_get_by_task_id_or_none(self): + """Тест получения задачи или None.""" + result = BackgroundJobService.get_by_task_id_or_none("non-existent") + self.assertIsNone(result) + + task_id = fake.uuid4() + BackgroundJobService.create_job( + task_id=task_id, + task_name="test.task", + ) + result = BackgroundJobService.get_by_task_id_or_none(task_id) + self.assertIsNotNone(result) + + def test_get_user_jobs(self): + """Тест получения задач пользователя.""" + user_id = 123 + # Создаём несколько задач + for i in range(3): + BackgroundJobService.create_job( + task_id=f"task-{user_id}-{i}", + task_name="test.task", + user_id=user_id, + ) + # И одну задачу другого пользователя + BackgroundJobService.create_job( + task_id="task-other-user", + task_name="test.task", + user_id=999, + ) + + jobs = BackgroundJobService.get_user_jobs(user_id) + self.assertEqual(len(jobs), 3) + + def test_get_user_jobs_with_status_filter(self): + """Тест фильтрации по статусу.""" + user_id = 456 + job1 = BackgroundJobService.create_job( + task_id="task-pending", + task_name="test.task", + user_id=user_id, + ) + job2 = BackgroundJobService.create_job( + task_id="task-success", + task_name="test.task", + user_id=user_id, + ) + job2.complete() + + pending_jobs = BackgroundJobService.get_user_jobs( + user_id, status=JobStatus.PENDING + ) + self.assertEqual(len(pending_jobs), 1) + + success_jobs = BackgroundJobService.get_user_jobs( + user_id, status=JobStatus.SUCCESS + ) + self.assertEqual(len(success_jobs), 1) + + def test_get_active_jobs(self): + """Тест получения активных задач.""" + # Создаём задачи с разными статусами + job_pending = BackgroundJobService.create_job( + task_id="job-active-pending", + task_name="test.task", + ) + job_started = BackgroundJobService.create_job( + task_id="job-active-started", + task_name="test.task", + ) + job_started.mark_started() + + job_success = BackgroundJobService.create_job( + task_id="job-active-success", + task_name="test.task", + ) + job_success.complete() + + active_jobs = list(BackgroundJobService.get_active_jobs()) + active_task_ids = [j.task_id for j in active_jobs] + + self.assertIn("job-active-pending", active_task_ids) + self.assertIn("job-active-started", active_task_ids) + self.assertNotIn("job-active-success", active_task_ids) diff --git a/tests/apps/core/test_bulk_operations.py b/tests/apps/core/test_bulk_operations.py new file mode 100644 index 0000000..5656d0e --- /dev/null +++ b/tests/apps/core/test_bulk_operations.py @@ -0,0 +1,186 @@ +"""Тесты для BulkOperationsMixin и QueryOptimizerMixin.""" + +from apps.core.models import BackgroundJob +from apps.core.services import ( + BulkOperationsMixin, + QueryOptimizerMixin, +) +from django.test import TestCase +from faker import Faker + +fake = Faker() + + +class BulkOperationsMixinTest(TestCase): + """Тесты для BulkOperationsMixin.""" + + def test_mixin_has_bulk_create_chunked(self): + """Проверка наличия метода bulk_create_chunked.""" + self.assertTrue(hasattr(BulkOperationsMixin, "bulk_create_chunked")) + + def test_mixin_has_bulk_update_or_create(self): + """Проверка наличия метода bulk_update_or_create.""" + self.assertTrue(hasattr(BulkOperationsMixin, "bulk_update_or_create")) + + def test_mixin_has_bulk_delete(self): + """Проверка наличия метода bulk_delete.""" + self.assertTrue(hasattr(BulkOperationsMixin, "bulk_delete")) + + def test_mixin_has_bulk_update_fields(self): + """Проверка наличия метода bulk_update_fields.""" + self.assertTrue(hasattr(BulkOperationsMixin, "bulk_update_fields")) + + +class QueryOptimizerMixinTest(TestCase): + """Тесты для QueryOptimizerMixin.""" + + def test_mixin_has_get_optimized_queryset(self): + """Проверка наличия метода get_optimized_queryset.""" + self.assertTrue(hasattr(QueryOptimizerMixin, "get_optimized_queryset")) + + def test_mixin_has_apply_optimizations(self): + """Проверка наличия метода apply_optimizations.""" + self.assertTrue(hasattr(QueryOptimizerMixin, "apply_optimizations")) + + def test_mixin_has_get_list_queryset(self): + """Проверка наличия метода get_list_queryset.""" + self.assertTrue(hasattr(QueryOptimizerMixin, "get_list_queryset")) + + def test_mixin_has_get_detail_queryset(self): + """Проверка наличия метода get_detail_queryset.""" + self.assertTrue(hasattr(QueryOptimizerMixin, "get_detail_queryset")) + + def test_mixin_has_with_counts(self): + """Проверка наличия метода with_counts.""" + self.assertTrue(hasattr(QueryOptimizerMixin, "with_counts")) + + def test_mixin_has_with_exists(self): + """Проверка наличия метода with_exists.""" + self.assertTrue(hasattr(QueryOptimizerMixin, "with_exists")) + + def test_mixin_default_attributes(self): + """Проверка атрибутов по умолчанию.""" + self.assertEqual(QueryOptimizerMixin.select_related, []) + self.assertEqual(QueryOptimizerMixin.prefetch_related, []) + self.assertEqual(QueryOptimizerMixin.default_only, []) + self.assertEqual(QueryOptimizerMixin.default_defer, []) + + +class BulkOperationsIntegrationTest(TestCase): + """Интеграционные тесты для bulk операций с BackgroundJob.""" + + def test_bulk_create_chunked(self): + """Тест массового создания чанками.""" + # Создаём тестовый сервис с BulkOperationsMixin + class TestService(BulkOperationsMixin): + model = BackgroundJob + + # Создаём 10 объектов чанками по 3 + jobs = [ + BackgroundJob( + task_id=f"bulk-chunk-{i}", + task_name="test.bulk.task", + ) + for i in range(10) + ] + + count = TestService.bulk_create_chunked(jobs, chunk_size=3) + self.assertEqual(count, 10) + + # Проверяем что все созданы + self.assertEqual(BackgroundJob.objects.filter(task_name="test.bulk.task").count(), 10) + + def test_bulk_delete(self): + """Тест массового удаления.""" + class TestService(BulkOperationsMixin): + model = BackgroundJob + + # Создаём несколько задач + jobs = [] + for i in range(5): + job = BackgroundJob.objects.create( + task_id=f"bulk-delete-{i}", + task_name="test.delete.task", + ) + jobs.append(job) + + # Удаляем первые 3 + ids_to_delete = [j.pk for j in jobs[:3]] + deleted = TestService.bulk_delete(ids_to_delete) + + self.assertEqual(deleted, 3) + self.assertEqual(BackgroundJob.objects.filter(task_name="test.delete.task").count(), 2) + + def test_bulk_update_fields(self): + """Тест массового обновления полей.""" + class TestService(BulkOperationsMixin): + model = BackgroundJob + + # Создаём задачи + for i in range(5): + BackgroundJob.objects.create( + task_id=f"bulk-update-{i}", + task_name="test.update.task", + progress=0, + ) + + # Обновляем все задачи этого типа + updated = TestService.bulk_update_fields( + filters={"task_name": "test.update.task"}, + updates={"progress": 50}, + ) + + self.assertEqual(updated, 5) + + # Проверяем что обновились + for job in BackgroundJob.objects.filter(task_name="test.update.task"): + self.assertEqual(job.progress, 50) + + def test_bulk_update_or_create_creates(self): + """Тест upsert - создание новых.""" + class TestService(BulkOperationsMixin): + model = BackgroundJob + + items = [ + {"task_id": "upsert-new-1", "task_name": "upsert.task", "progress": 10}, + {"task_id": "upsert-new-2", "task_name": "upsert.task", "progress": 20}, + ] + + created, updated = TestService.bulk_update_or_create( + items=items, + unique_fields=["task_id"], + update_fields=["task_name", "progress"], + ) + + self.assertEqual(created, 2) + self.assertEqual(updated, 0) + + def test_bulk_update_or_create_updates(self): + """Тест upsert - обновление существующих.""" + class TestService(BulkOperationsMixin): + model = BackgroundJob + + # Создаём существующую запись + BackgroundJob.objects.create( + task_id="upsert-existing", + task_name="old.task", + progress=0, + ) + + items = [ + {"task_id": "upsert-existing", "task_name": "new.task", "progress": 100}, + ] + + created, updated = TestService.bulk_update_or_create( + items=items, + unique_fields=["task_id"], + update_fields=["task_name", "progress"], + ) + + self.assertEqual(created, 0) + self.assertEqual(updated, 1) + + # Проверяем обновление + job = BackgroundJob.objects.get(task_id="upsert-existing") + self.assertEqual(job.task_name, "new.task") + self.assertEqual(job.progress, 100) diff --git a/tests/apps/core/test_cache.py b/tests/apps/core/test_cache.py new file mode 100644 index 0000000..ed8ca70 --- /dev/null +++ b/tests/apps/core/test_cache.py @@ -0,0 +1,193 @@ +"""Tests for core cache utilities""" + +from apps.core.cache import ( + CacheManager, + _build_cache_key, + cache_method, + cache_result, +) +from django.core.cache import cache +from django.test import TestCase + + +class CacheResultDecoratorTest(TestCase): + """Tests for @cache_result decorator""" + + def setUp(self): + cache.clear() + self.call_count = 0 + + def test_result_is_cached(self): + """Test that function result is cached""" + + @cache_result(timeout=60, key_prefix="test") + def expensive_function(x): + self.call_count += 1 + return x * 2 + + # First call - should execute + result1 = expensive_function(5) + self.assertEqual(result1, 10) + self.assertEqual(self.call_count, 1) + + # Second call - should return cached result + result2 = expensive_function(5) + self.assertEqual(result2, 10) + self.assertEqual(self.call_count, 1) # Still 1, not called again + + def test_different_args_not_cached(self): + """Test that different arguments create different cache entries""" + + @cache_result(timeout=60, key_prefix="test") + def expensive_function(x): + self.call_count += 1 + return x * 2 + + result1 = expensive_function(5) + result2 = expensive_function(10) + + self.assertEqual(result1, 10) + self.assertEqual(result2, 20) + self.assertEqual(self.call_count, 2) + + def test_kwargs_included_in_cache_key(self): + """Test that kwargs are included in cache key""" + + @cache_result(timeout=60, key_prefix="test") + def expensive_function(x, multiplier=2): + self.call_count += 1 + return x * multiplier + + result1 = expensive_function(5, multiplier=2) + result2 = expensive_function(5, multiplier=3) + + self.assertEqual(result1, 10) + self.assertEqual(result2, 15) + self.assertEqual(self.call_count, 2) + + +class CacheMethodDecoratorTest(TestCase): + """Tests for @cache_method decorator""" + + def setUp(self): + cache.clear() + + def test_classmethod_caching(self): + """Test caching works with classmethod""" + call_count = {"value": 0} + + class MyService: + @classmethod + @cache_method(timeout=60, key_prefix="service") + def get_data(cls, item_id): + call_count["value"] += 1 + return {"id": item_id, "data": "test"} + + # First call + result1 = MyService.get_data(1) + self.assertEqual(result1["id"], 1) + self.assertEqual(call_count["value"], 1) + + # Second call - should be cached + result2 = MyService.get_data(1) + self.assertEqual(result2["id"], 1) + self.assertEqual(call_count["value"], 1) + + # Different argument + result3 = MyService.get_data(2) + self.assertEqual(result3["id"], 2) + self.assertEqual(call_count["value"], 2) + + +class CacheManagerTest(TestCase): + """Tests for CacheManager""" + + def setUp(self): + cache.clear() + self.manager = CacheManager("test_prefix") + + def test_set_and_get(self): + """Test basic set and get operations""" + self.manager.set("key1", "value1", timeout=60) + result = self.manager.get("key1") + self.assertEqual(result, "value1") + + def test_get_default(self): + """Test get returns default for missing key""" + result = self.manager.get("nonexistent", default="default_value") + self.assertEqual(result, "default_value") + + def test_delete(self): + """Test delete operation""" + self.manager.set("key1", "value1") + self.manager.delete("key1") + result = self.manager.get("key1") + self.assertIsNone(result) + + def test_get_or_set(self): + """Test get_or_set operation""" + call_count = {"value": 0} + + def compute_value(): + call_count["value"] += 1 + return "computed" + + # First call - should compute + result1 = self.manager.get_or_set("key1", compute_value) + self.assertEqual(result1, "computed") + self.assertEqual(call_count["value"], 1) + + # Second call - should return cached + result2 = self.manager.get_or_set("key1", compute_value) + self.assertEqual(result2, "computed") + self.assertEqual(call_count["value"], 1) + + def test_prefix_applied(self): + """Test that prefix is applied to keys""" + self.manager.set("mykey", "myvalue") + + # Direct cache access should use prefixed key + direct_result = cache.get("test_prefix:mykey") + self.assertEqual(direct_result, "myvalue") + + +class BuildCacheKeyTest(TestCase): + """Tests for _build_cache_key function""" + + def test_key_includes_function_name(self): + """Test cache key includes function name""" + + def my_function(): + pass + + key = _build_cache_key(my_function, "", (), {}) + self.assertIn("my_function", key) + + def test_key_includes_prefix(self): + """Test cache key includes prefix""" + + def my_function(): + pass + + key = _build_cache_key(my_function, "myprefix", (), {}) + self.assertTrue(key.startswith("myprefix:")) + + def test_different_args_different_keys(self): + """Test different arguments produce different keys""" + + def my_function(): + pass + + key1 = _build_cache_key(my_function, "", (1, 2), {}) + key2 = _build_cache_key(my_function, "", (1, 3), {}) + self.assertNotEqual(key1, key2) + + def test_different_kwargs_different_keys(self): + """Test different kwargs produce different keys""" + + def my_function(): + pass + + key1 = _build_cache_key(my_function, "", (), {"a": 1}) + key2 = _build_cache_key(my_function, "", (), {"a": 2}) + self.assertNotEqual(key1, key2) diff --git a/tests/apps/core/test_exceptions.py b/tests/apps/core/test_exceptions.py new file mode 100644 index 0000000..f862d1a --- /dev/null +++ b/tests/apps/core/test_exceptions.py @@ -0,0 +1,152 @@ +"""Tests for core exceptions and exception handler""" + +from apps.core.exceptions import ( + AuthenticationError, + BadRequestError, + BaseAPIException, + BusinessLogicError, + ConflictError, + DuplicateError, + InternalError, + InvalidStateError, + NotFoundError, + PermissionDeniedError, + QuotaExceededError, + RateLimitError, + ServiceUnavailableError, + ValidationError, +) +from django.test import TestCase + + +class BaseAPIExceptionTest(TestCase): + """Tests for BaseAPIException""" + + def test_default_values(self): + """Test exception with default values""" + exc = BaseAPIException() + self.assertEqual(exc.message, "An error occurred") + self.assertEqual(exc.code, "error") + self.assertEqual(exc.status_code, 400) + self.assertIsNone(exc.details) + + def test_custom_values(self): + """Test exception with custom values""" + exc = BaseAPIException( + message="Custom message", + code="custom_code", + details={"field": "value"}, + ) + self.assertEqual(exc.message, "Custom message") + self.assertEqual(exc.code, "custom_code") + self.assertEqual(exc.details, {"field": "value"}) + + def test_to_dict(self): + """Test conversion to dictionary""" + exc = BaseAPIException( + message="Test message", + code="test_code", + details={"key": "value"}, + ) + result = exc.to_dict() + + self.assertEqual(result["message"], "Test message") + self.assertEqual(result["code"], "test_code") + self.assertEqual(result["details"], {"key": "value"}) + + def test_to_dict_without_details(self): + """Test to_dict without details""" + exc = BaseAPIException(message="Test") + result = exc.to_dict() + + self.assertNotIn("details", result) + + +class ClientErrorExceptionsTest(TestCase): + """Tests for client error exceptions (4xx)""" + + def test_validation_error(self): + """Test ValidationError defaults""" + exc = ValidationError() + self.assertEqual(exc.status_code, 400) + self.assertEqual(exc.code, "validation_error") + + def test_bad_request_error(self): + """Test BadRequestError defaults""" + exc = BadRequestError() + self.assertEqual(exc.status_code, 400) + self.assertEqual(exc.code, "bad_request") + + def test_authentication_error(self): + """Test AuthenticationError defaults""" + exc = AuthenticationError() + self.assertEqual(exc.status_code, 401) + self.assertEqual(exc.code, "authentication_error") + + def test_permission_denied_error(self): + """Test PermissionDeniedError defaults""" + exc = PermissionDeniedError() + self.assertEqual(exc.status_code, 403) + self.assertEqual(exc.code, "permission_denied") + + def test_not_found_error(self): + """Test NotFoundError defaults""" + exc = NotFoundError() + self.assertEqual(exc.status_code, 404) + self.assertEqual(exc.code, "not_found") + + def test_conflict_error(self): + """Test ConflictError defaults""" + exc = ConflictError() + self.assertEqual(exc.status_code, 409) + self.assertEqual(exc.code, "conflict") + + def test_rate_limit_error(self): + """Test RateLimitError defaults""" + exc = RateLimitError() + self.assertEqual(exc.status_code, 429) + self.assertEqual(exc.code, "rate_limit_exceeded") + + +class ServerErrorExceptionsTest(TestCase): + """Tests for server error exceptions (5xx)""" + + def test_internal_error(self): + """Test InternalError defaults""" + exc = InternalError() + self.assertEqual(exc.status_code, 500) + self.assertEqual(exc.code, "internal_error") + + def test_service_unavailable_error(self): + """Test ServiceUnavailableError defaults""" + exc = ServiceUnavailableError() + self.assertEqual(exc.status_code, 503) + self.assertEqual(exc.code, "service_unavailable") + + +class BusinessLogicExceptionsTest(TestCase): + """Tests for business logic exceptions""" + + def test_business_logic_error(self): + """Test BusinessLogicError defaults""" + exc = BusinessLogicError() + self.assertEqual(exc.status_code, 400) + self.assertEqual(exc.code, "business_error") + + def test_invalid_state_error(self): + """Test InvalidStateError defaults""" + exc = InvalidStateError() + self.assertEqual(exc.status_code, 400) + self.assertEqual(exc.code, "invalid_state") + + def test_duplicate_error(self): + """Test DuplicateError defaults""" + exc = DuplicateError() + self.assertEqual(exc.status_code, 409) + self.assertEqual(exc.code, "duplicate") + + def test_quota_exceeded_error(self): + """Test QuotaExceededError defaults""" + exc = QuotaExceededError() + self.assertEqual(exc.status_code, 400) + self.assertEqual(exc.code, "quota_exceeded") diff --git a/tests/apps/core/test_filters.py b/tests/apps/core/test_filters.py new file mode 100644 index 0000000..609761f --- /dev/null +++ b/tests/apps/core/test_filters.py @@ -0,0 +1,128 @@ +"""Tests for core filter utilities""" + +from apps.core.filters import ( + BaseFilterSet, + FilterMixin, + StandardOrderingFilter, + StandardSearchFilter, + get_filter_backends, +) +from django.test import TestCase +from django_filters import rest_framework as filters +from rest_framework.filters import OrderingFilter, SearchFilter + + +class BaseFilterSetTest(TestCase): + """Tests for BaseFilterSet""" + + def test_has_created_at_filters(self): + """Test BaseFilterSet has created_at filters""" + filter_fields = BaseFilterSet.declared_filters + + self.assertIn("created_at_after", filter_fields) + self.assertIn("created_at_before", filter_fields) + + def test_has_updated_at_filters(self): + """Test BaseFilterSet has updated_at filters""" + filter_fields = BaseFilterSet.declared_filters + + self.assertIn("updated_at_after", filter_fields) + self.assertIn("updated_at_before", filter_fields) + + def test_created_at_after_is_datetime_filter(self): + """Test created_at_after is DateTimeFilter""" + filter_field = BaseFilterSet.declared_filters["created_at_after"] + + self.assertIsInstance(filter_field, filters.DateTimeFilter) + self.assertEqual(filter_field.field_name, "created_at") + self.assertEqual(filter_field.lookup_expr, "gte") + + def test_created_at_before_is_datetime_filter(self): + """Test created_at_before is DateTimeFilter""" + filter_field = BaseFilterSet.declared_filters["created_at_before"] + + self.assertIsInstance(filter_field, filters.DateTimeFilter) + self.assertEqual(filter_field.field_name, "created_at") + self.assertEqual(filter_field.lookup_expr, "lte") + + +class StandardSearchFilterTest(TestCase): + """Tests for StandardSearchFilter""" + + def test_inherits_from_search_filter(self): + """Test StandardSearchFilter inherits from SearchFilter""" + self.assertTrue(issubclass(StandardSearchFilter, SearchFilter)) + + def test_search_param(self): + """Test search parameter name""" + filter_instance = StandardSearchFilter() + self.assertEqual(filter_instance.search_param, "search") + + def test_search_title(self): + """Test search title is in Russian""" + filter_instance = StandardSearchFilter() + self.assertEqual(filter_instance.search_title, "Поиск") + + +class StandardOrderingFilterTest(TestCase): + """Tests for StandardOrderingFilter""" + + def test_inherits_from_ordering_filter(self): + """Test StandardOrderingFilter inherits from OrderingFilter""" + self.assertTrue(issubclass(StandardOrderingFilter, OrderingFilter)) + + def test_ordering_param(self): + """Test ordering parameter name""" + filter_instance = StandardOrderingFilter() + self.assertEqual(filter_instance.ordering_param, "ordering") + + def test_ordering_title(self): + """Test ordering title is in Russian""" + filter_instance = StandardOrderingFilter() + self.assertEqual(filter_instance.ordering_title, "Сортировка") + + +class GetFilterBackendsTest(TestCase): + """Tests for get_filter_backends function""" + + def test_returns_list(self): + """Test function returns a list""" + backends = get_filter_backends() + self.assertIsInstance(backends, list) + + def test_contains_django_filter_backend(self): + """Test list contains DjangoFilterBackend""" + backends = get_filter_backends() + self.assertIn(filters.DjangoFilterBackend, backends) + + def test_contains_search_filter(self): + """Test list contains StandardSearchFilter""" + backends = get_filter_backends() + self.assertIn(StandardSearchFilter, backends) + + def test_contains_ordering_filter(self): + """Test list contains StandardOrderingFilter""" + backends = get_filter_backends() + self.assertIn(StandardOrderingFilter, backends) + + +class FilterMixinTest(TestCase): + """Tests for FilterMixin""" + + def test_has_filter_backends(self): + """Test FilterMixin has filter_backends""" + self.assertTrue(hasattr(FilterMixin, "filter_backends")) + self.assertIsInstance(FilterMixin.filter_backends, list) + + def test_has_default_ordering(self): + """Test FilterMixin has default ordering""" + self.assertTrue(hasattr(FilterMixin, "ordering")) + self.assertEqual(FilterMixin.ordering, ["-created_at"]) + + def test_filter_backends_contains_required_backends(self): + """Test filter_backends contains all required backends""" + backends = FilterMixin.filter_backends + + self.assertIn(filters.DjangoFilterBackend, backends) + self.assertIn(StandardSearchFilter, backends) + self.assertIn(StandardOrderingFilter, backends) diff --git a/tests/apps/core/test_logging.py b/tests/apps/core/test_logging.py new file mode 100644 index 0000000..394085c --- /dev/null +++ b/tests/apps/core/test_logging.py @@ -0,0 +1,163 @@ +"""Tests for core logging utilities""" + +import json +import logging +from io import StringIO + +from apps.core.logging import ( + ContextLogger, + JSONFormatter, + get_json_logging_config, +) +from django.test import TestCase + + +class JSONFormatterTest(TestCase): + """Tests for JSONFormatter""" + + def setUp(self): + self.formatter = JSONFormatter() + self.logger = logging.getLogger("test_json") + self.logger.setLevel(logging.DEBUG) + + # Remove existing handlers + self.logger.handlers = [] + + # Add handler with JSONFormatter + self.stream = StringIO() + handler = logging.StreamHandler(self.stream) + handler.setFormatter(self.formatter) + self.logger.addHandler(handler) + + def test_output_is_valid_json(self): + """Test that output is valid JSON""" + self.logger.info("Test message") + output = self.stream.getvalue() + + # Should not raise + parsed = json.loads(output) + self.assertIsInstance(parsed, dict) + + def test_contains_required_fields(self): + """Test that output contains required fields""" + self.logger.info("Test message") + output = self.stream.getvalue() + parsed = json.loads(output) + + self.assertIn("timestamp", parsed) + self.assertIn("level", parsed) + self.assertIn("logger", parsed) + self.assertIn("message", parsed) + + def test_level_is_correct(self): + """Test that log level is correct""" + self.logger.warning("Warning message") + output = self.stream.getvalue() + parsed = json.loads(output) + + self.assertEqual(parsed["level"], "WARNING") + + def test_message_is_correct(self): + """Test that message is correct""" + self.logger.info("My test message") + output = self.stream.getvalue() + parsed = json.loads(output) + + self.assertEqual(parsed["message"], "My test message") + + def test_extra_fields_included(self): + """Test that extra fields are included""" + self.logger.info("Test message", extra={"user_id": 42, "action": "login"}) + output = self.stream.getvalue() + parsed = json.loads(output) + + self.assertIn("extra", parsed) + self.assertEqual(parsed["extra"]["user_id"], 42) + self.assertEqual(parsed["extra"]["action"], "login") + + def test_exception_info_included(self): + """Test that exception info is included""" + try: + raise ValueError("Test error") + except ValueError: + self.logger.exception("An error occurred") + + output = self.stream.getvalue() + parsed = json.loads(output) + + self.assertIn("exception", parsed) + self.assertEqual(parsed["exception"]["type"], "ValueError") + self.assertIn("Test error", parsed["exception"]["message"]) + + +class ContextLoggerTest(TestCase): + """Tests for ContextLogger""" + + def setUp(self): + self.context_logger = ContextLogger("test_context") + + def test_set_context(self): + """Test context is stored""" + self.context_logger.set_context(user_id=42, action="test") + + self.assertEqual(self.context_logger._context["user_id"], 42) + self.assertEqual(self.context_logger._context["action"], "test") + + def test_clear_context(self): + """Test context is cleared""" + self.context_logger.set_context(user_id=42) + self.context_logger.clear_context() + + self.assertEqual(self.context_logger._context, {}) + + def test_context_updated_not_replaced(self): + """Test that set_context updates rather than replaces""" + self.context_logger.set_context(user_id=42) + self.context_logger.set_context(action="test") + + self.assertEqual(self.context_logger._context["user_id"], 42) + self.assertEqual(self.context_logger._context["action"], "test") + + +class GetJsonLoggingConfigTest(TestCase): + """Tests for get_json_logging_config function""" + + def test_returns_dict(self): + """Test function returns a dictionary""" + config = get_json_logging_config() + self.assertIsInstance(config, dict) + + def test_has_required_keys(self): + """Test config has required keys""" + config = get_json_logging_config() + + self.assertIn("version", config) + self.assertIn("formatters", config) + self.assertIn("handlers", config) + self.assertIn("loggers", config) + + def test_json_formatter_configured(self): + """Test JSON formatter is configured""" + config = get_json_logging_config() + + self.assertIn("json", config["formatters"]) + self.assertEqual( + config["formatters"]["json"]["()"], + "apps.core.logging.JSONFormatter", + ) + + def test_log_level_applied(self): + """Test log level is applied""" + config = get_json_logging_config(log_level="DEBUG") + + self.assertEqual(config["root"]["level"], "DEBUG") + + def test_file_handler_added_when_path_provided(self): + """Test file handler is added when path is provided""" + config = get_json_logging_config(log_file="/var/log/test.log") + + self.assertIn("file", config["handlers"]) + self.assertEqual( + config["handlers"]["file"]["filename"], + "/var/log/test.log", + ) diff --git a/tests/apps/core/test_management_commands.py b/tests/apps/core/test_management_commands.py new file mode 100644 index 0000000..9288da3 --- /dev/null +++ b/tests/apps/core/test_management_commands.py @@ -0,0 +1,92 @@ +"""Тесты для базового класса management commands.""" + +from io import StringIO + +from apps.core.management.commands.base import BaseAppCommand +from django.core.management.base import CommandError +from django.test import TestCase + + +class TestCommand(BaseAppCommand): + """Тестовая команда для проверки BaseAppCommand.""" + + help = "Test command" + + def execute_command(self, *args, **options): + if options.get("fail"): + raise ValueError("Test error") + return "Success" + + +class BaseAppCommandTest(TestCase): + """Тесты для BaseAppCommand.""" + + def test_base_command_has_dry_run_argument(self): + """Проверка наличия аргумента --dry-run.""" + cmd = BaseAppCommand() + parser = cmd.create_parser("manage.py", "test") + # Парсер должен принимать --dry-run + args = parser.parse_args(["--dry-run"]) + self.assertTrue(args.dry_run) + + def test_base_command_has_silent_argument(self): + """Проверка наличия аргумента --silent.""" + cmd = BaseAppCommand() + parser = cmd.create_parser("manage.py", "test") + args = parser.parse_args(["--silent"]) + self.assertTrue(args.silent) + + def test_log_methods_exist(self): + """Проверка наличия методов логирования.""" + cmd = BaseAppCommand() + cmd.stdout = StringIO() + cmd.stderr = StringIO() + cmd.silent = False + cmd.verbosity = 2 + + # Методы должны существовать и не падать + cmd.log_info("Test info") + cmd.log_success("Test success") + cmd.log_warning("Test warning") + cmd.log_error("Test error") + cmd.log_debug("Test debug") + + def test_progress_iter(self): + """Тест итератора с прогрессом.""" + cmd = BaseAppCommand() + cmd.stdout = StringIO() + cmd.silent = True # Без вывода + + items = list(range(10)) + result = list(cmd.progress_iter(items, "Processing")) + + self.assertEqual(result, items) + + def test_confirm_in_dry_run(self): + """Тест подтверждения в dry-run режиме.""" + cmd = BaseAppCommand() + cmd.stdout = StringIO() + cmd.dry_run = True + cmd.silent = False + + # В dry-run confirm всегда возвращает True + result = cmd.confirm("Continue?") + self.assertTrue(result) + + def test_abort_raises_command_error(self): + """Тест прерывания команды.""" + cmd = BaseAppCommand() + + with self.assertRaises(CommandError): + cmd.abort("Test abort") + + def test_timed_operation(self): + """Тест контекстного менеджера для измерения времени.""" + cmd = BaseAppCommand() + cmd.stdout = StringIO() + cmd.verbosity = 2 + + with cmd.timed_operation("Test operation"): + pass # Операция + + # Не должно падать diff --git a/tests/apps/core/test_middleware.py b/tests/apps/core/test_middleware.py new file mode 100644 index 0000000..e6ad34b --- /dev/null +++ b/tests/apps/core/test_middleware.py @@ -0,0 +1,34 @@ +"""Tests for core middleware""" + +from django.urls import reverse +from rest_framework.test import APITestCase + + +class RequestIDMiddlewareTest(APITestCase): + """Tests for RequestIDMiddleware""" + + def test_request_id_generated(self): + """Test that request ID is generated and returned in response header""" + url = reverse("core:health") + response = self.client.get(url) + + self.assertIn("X-Request-ID", response) + self.assertIsNotNone(response["X-Request-ID"]) + # UUID format check (36 chars with hyphens) + self.assertEqual(len(response["X-Request-ID"]), 36) + + def test_request_id_passed_through(self): + """Test that provided X-Request-ID is passed through""" + url = reverse("core:health") + custom_id = "custom-request-id-12345" + response = self.client.get(url, HTTP_X_REQUEST_ID=custom_id) + + self.assertEqual(response["X-Request-ID"], custom_id) + + def test_different_requests_different_ids(self): + """Test that different requests get different IDs""" + url = reverse("core:health") + response1 = self.client.get(url) + response2 = self.client.get(url) + + self.assertNotEqual(response1["X-Request-ID"], response2["X-Request-ID"]) diff --git a/tests/apps/core/test_mixins.py b/tests/apps/core/test_mixins.py new file mode 100644 index 0000000..f92d2b7 --- /dev/null +++ b/tests/apps/core/test_mixins.py @@ -0,0 +1,110 @@ +"""Тесты для Model Mixins.""" + +from apps.core.mixins import ( + OrderableMixin, + SoftDeleteMixin, + StatusMixin, +) +from django.test import TestCase + + +class TimestampMixinTest(TestCase): + """Тесты для TimestampMixin.""" + + def test_created_at_auto_set(self): + """Проверка автоматической установки created_at.""" + # Используем BackgroundJob как пример модели с TimestampMixin + from apps.core.models import BackgroundJob + + job = BackgroundJob.objects.create( + task_id="test-task-1", + task_name="test.task", + ) + self.assertIsNotNone(job.created_at) + self.assertIsNotNone(job.updated_at) + + def test_updated_at_auto_update(self): + """Проверка автоматического обновления updated_at.""" + from apps.core.models import BackgroundJob + + job = BackgroundJob.objects.create( + task_id="test-task-2", + task_name="test.task", + ) + original_updated = job.updated_at + + # Обновляем запись + job.progress = 50 + job.save() + job.refresh_from_db() + + self.assertGreaterEqual(job.updated_at, original_updated) + + +class UUIDPrimaryKeyMixinTest(TestCase): + """Тесты для UUIDPrimaryKeyMixin.""" + + def test_uuid_auto_generated(self): + """Проверка автоматической генерации UUID.""" + from apps.core.models import BackgroundJob + + job = BackgroundJob.objects.create( + task_id="test-task-3", + task_name="test.task", + ) + self.assertIsNotNone(job.id) + # Проверяем что ID похож на UUID (строка 32+ символа с дефисами) + self.assertEqual(len(str(job.id)), 36) + + def test_uuid_unique(self): + """Проверка уникальности UUID.""" + from apps.core.models import BackgroundJob + + job1 = BackgroundJob.objects.create( + task_id="test-task-4a", + task_name="test.task", + ) + job2 = BackgroundJob.objects.create( + task_id="test-task-4b", + task_name="test.task", + ) + self.assertNotEqual(job1.id, job2.id) + + +class SoftDeleteMixinTest(TestCase): + """Тесты для SoftDeleteMixin.""" + + def test_soft_delete_mixin_fields(self): + """Проверка наличия полей is_deleted и deleted_at.""" + # Проверяем что поля определены в миксине + field_names = [f.name for f in SoftDeleteMixin._meta.get_fields()] + self.assertIn("is_deleted", field_names) + self.assertIn("deleted_at", field_names) + + def test_soft_delete_queryset_methods(self): + """Проверка методов SoftDeleteQuerySet.""" + from apps.core.mixins import SoftDeleteQuerySet + + # Проверяем что методы определены + self.assertTrue(hasattr(SoftDeleteQuerySet, "alive")) + self.assertTrue(hasattr(SoftDeleteQuerySet, "dead")) + self.assertTrue(hasattr(SoftDeleteQuerySet, "hard_delete")) + + +class StatusMixinTest(TestCase): + """Тесты для StatusMixin.""" + + def test_status_choices(self): + """Проверка наличия статусов.""" + self.assertEqual(StatusMixin.Status.DRAFT, "draft") + self.assertEqual(StatusMixin.Status.ACTIVE, "active") + self.assertEqual(StatusMixin.Status.INACTIVE, "inactive") + self.assertEqual(StatusMixin.Status.ARCHIVED, "archived") + + +class OrderableMixinTest(TestCase): + """Тесты для OrderableMixin.""" + + def test_orderable_mixin_has_order_field(self): + """Проверка наличия поля order.""" + self.assertTrue(hasattr(OrderableMixin, "order")) diff --git a/tests/apps/core/test_openapi.py b/tests/apps/core/test_openapi.py new file mode 100644 index 0000000..9bc164f --- /dev/null +++ b/tests/apps/core/test_openapi.py @@ -0,0 +1,133 @@ +"""Tests for core OpenAPI utilities""" + +from apps.core.openapi import ( + CommonParameters, + CommonResponses, + _get_status_description, + api_docs, + paginated_response, +) +from django.test import TestCase +from drf_yasg import openapi +from rest_framework import serializers + + +class DummySerializer(serializers.Serializer): + """Dummy serializer for testing""" + + id = serializers.IntegerField() + name = serializers.CharField() + + +class ApiDocsDecoratorTest(TestCase): + """Tests for @api_docs decorator""" + + def test_decorator_returns_function(self): + """Test decorator returns wrapped function""" + + @api_docs(summary="Test endpoint") + def my_view(request): + pass + + self.assertTrue(callable(my_view)) + + def test_decorator_preserves_function_name(self): + """Test decorator preserves original function name""" + + @api_docs(summary="Test endpoint") + def my_view(request): + pass + + self.assertEqual(my_view.__name__, "my_view") + + +class GetStatusDescriptionTest(TestCase): + """Tests for _get_status_description function""" + + def test_known_status_codes(self): + """Test known status codes return Russian descriptions""" + self.assertEqual(_get_status_description(200), "Успешный запрос") + self.assertEqual(_get_status_description(201), "Ресурс создан") + self.assertEqual(_get_status_description(400), "Некорректный запрос") + self.assertEqual(_get_status_description(401), "Не авторизован") + self.assertEqual(_get_status_description(403), "Доступ запрещён") + self.assertEqual(_get_status_description(404), "Ресурс не найден") + self.assertEqual(_get_status_description(500), "Внутренняя ошибка сервера") + + def test_unknown_status_code(self): + """Test unknown status code returns generic description""" + result = _get_status_description(418) + self.assertEqual(result, "HTTP 418") + + +class CommonResponsesTest(TestCase): + """Tests for CommonResponses class""" + + def test_success_response_type(self): + """Test SUCCESS is an openapi.Response""" + self.assertIsInstance(CommonResponses.SUCCESS, openapi.Response) + + def test_created_response_type(self): + """Test CREATED is an openapi.Response""" + self.assertIsInstance(CommonResponses.CREATED, openapi.Response) + + def test_not_found_response_type(self): + """Test NOT_FOUND is an openapi.Response""" + self.assertIsInstance(CommonResponses.NOT_FOUND, openapi.Response) + + def test_unauthorized_response_type(self): + """Test UNAUTHORIZED is an openapi.Response""" + self.assertIsInstance(CommonResponses.UNAUTHORIZED, openapi.Response) + + def test_validation_error_response_type(self): + """Test VALIDATION_ERROR is an openapi.Response""" + self.assertIsInstance(CommonResponses.VALIDATION_ERROR, openapi.Response) + + def test_server_error_response_type(self): + """Test SERVER_ERROR is an openapi.Response""" + self.assertIsInstance(CommonResponses.SERVER_ERROR, openapi.Response) + + +class CommonParametersTest(TestCase): + """Tests for CommonParameters class""" + + def test_page_parameter(self): + """Test PAGE parameter configuration""" + self.assertEqual(CommonParameters.PAGE.name, "page") + self.assertEqual(CommonParameters.PAGE.in_, openapi.IN_QUERY) + self.assertEqual(CommonParameters.PAGE.type, openapi.TYPE_INTEGER) + + def test_page_size_parameter(self): + """Test PAGE_SIZE parameter configuration""" + self.assertEqual(CommonParameters.PAGE_SIZE.name, "page_size") + self.assertEqual(CommonParameters.PAGE_SIZE.in_, openapi.IN_QUERY) + + def test_search_parameter(self): + """Test SEARCH parameter configuration""" + self.assertEqual(CommonParameters.SEARCH.name, "search") + self.assertEqual(CommonParameters.SEARCH.type, openapi.TYPE_STRING) + + def test_ordering_parameter(self): + """Test ORDERING parameter configuration""" + self.assertEqual(CommonParameters.ORDERING.name, "ordering") + self.assertEqual(CommonParameters.ORDERING.type, openapi.TYPE_STRING) + + def test_id_parameter(self): + """Test ID parameter configuration""" + self.assertEqual(CommonParameters.ID.name, "id") + self.assertEqual(CommonParameters.ID.in_, openapi.IN_PATH) + self.assertTrue(CommonParameters.ID.required) + + +class PaginatedResponseTest(TestCase): + """Tests for paginated_response function""" + + def test_returns_response_object(self): + """Test function returns openapi.Response""" + result = paginated_response(DummySerializer) + self.assertIsInstance(result, openapi.Response) + + def test_response_has_description(self): + """Test response has description""" + result = paginated_response(DummySerializer) + self.assertEqual(result.description, "Пагинированный список") diff --git a/tests/apps/core/test_permissions.py b/tests/apps/core/test_permissions.py new file mode 100644 index 0000000..10d7dea --- /dev/null +++ b/tests/apps/core/test_permissions.py @@ -0,0 +1,252 @@ +"""Tests for core permissions""" + +from apps.core.permissions import ( + IsAdmin, + IsAdminOrReadOnly, + IsOwner, + IsOwnerOrAdmin, + IsOwnerOrReadOnly, + IsSuperuser, + IsVerified, +) +from django.contrib.auth import get_user_model +from django.test import RequestFactory, TestCase +from rest_framework.views import APIView + +from tests.apps.user.factories import UserFactory + +User = get_user_model() + + +class MockObject: + """Mock object for testing ownership""" + + def __init__(self, user=None, owner=None): + self.user = user + self.owner = owner + + +class IsOwnerTest(TestCase): + """Tests for IsOwner permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsOwner() + self.user = UserFactory.create_user() + self.other_user = UserFactory.create_user() + + def test_owner_has_permission(self): + """Test owner has permission to object""" + request = self.factory.get("/") + request.user = self.user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertTrue(result) + + def test_non_owner_denied(self): + """Test non-owner is denied""" + request = self.factory.get("/") + request.user = self.other_user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertFalse(result) + + def test_owner_field_fallback(self): + """Test fallback to 'owner' field""" + request = self.factory.get("/") + request.user = self.user + obj = MockObject(owner=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertTrue(result) + + +class IsOwnerOrReadOnlyTest(TestCase): + """Tests for IsOwnerOrReadOnly permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsOwnerOrReadOnly() + self.user = UserFactory.create_user() + self.other_user = UserFactory.create_user() + + def test_safe_methods_allowed_for_all(self): + """Test GET/HEAD/OPTIONS allowed for non-owners""" + for method in ["get", "head", "options"]: + request = getattr(self.factory, method)("/") + request.user = self.other_user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertTrue(result, f"{method.upper()} should be allowed") + + def test_unsafe_methods_denied_for_non_owner(self): + """Test POST/PUT/PATCH/DELETE denied for non-owners""" + for method in ["post", "put", "patch", "delete"]: + request = getattr(self.factory, method)("/") + request.user = self.other_user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertFalse(result, f"{method.upper()} should be denied") + + def test_unsafe_methods_allowed_for_owner(self): + """Test unsafe methods allowed for owner""" + request = self.factory.put("/") + request.user = self.user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertTrue(result) + + +class IsAdminOrReadOnlyTest(TestCase): + """Tests for IsAdminOrReadOnly permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsAdminOrReadOnly() + self.user = UserFactory.create_user() + self.admin = UserFactory.create_user(is_staff=True) + + def test_safe_methods_allowed_for_all(self): + """Test GET allowed for non-admins""" + request = self.factory.get("/") + request.user = self.user + + result = self.permission.has_permission(request, APIView()) + self.assertTrue(result) + + def test_unsafe_methods_denied_for_non_admin(self): + """Test POST denied for non-admins""" + request = self.factory.post("/") + request.user = self.user + + result = self.permission.has_permission(request, APIView()) + self.assertFalse(result) + + def test_unsafe_methods_allowed_for_admin(self): + """Test POST allowed for admins""" + request = self.factory.post("/") + request.user = self.admin + + result = self.permission.has_permission(request, APIView()) + self.assertTrue(result) + + +class IsAdminTest(TestCase): + """Tests for IsAdmin permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsAdmin() + self.user = UserFactory.create_user() + self.admin = UserFactory.create_user(is_staff=True) + + def test_admin_has_permission(self): + """Test admin has permission""" + request = self.factory.get("/") + request.user = self.admin + + result = self.permission.has_permission(request, APIView()) + self.assertTrue(result) + + def test_non_admin_denied(self): + """Test non-admin is denied""" + request = self.factory.get("/") + request.user = self.user + + result = self.permission.has_permission(request, APIView()) + self.assertFalse(result) + + +class IsSuperuserTest(TestCase): + """Tests for IsSuperuser permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsSuperuser() + self.user = UserFactory.create_user() + self.superuser = UserFactory.create_superuser() + + def test_superuser_has_permission(self): + """Test superuser has permission""" + request = self.factory.get("/") + request.user = self.superuser + + result = self.permission.has_permission(request, APIView()) + self.assertTrue(result) + + def test_non_superuser_denied(self): + """Test non-superuser is denied""" + request = self.factory.get("/") + request.user = self.user + + result = self.permission.has_permission(request, APIView()) + self.assertFalse(result) + + +class IsVerifiedTest(TestCase): + """Tests for IsVerified permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsVerified() + self.user = UserFactory.create_user(is_verified=False) + self.verified_user = UserFactory.create_user(is_verified=True) + + def test_verified_user_has_permission(self): + """Test verified user has permission""" + request = self.factory.get("/") + request.user = self.verified_user + + result = self.permission.has_permission(request, APIView()) + self.assertTrue(result) + + def test_unverified_user_denied(self): + """Test unverified user is denied""" + request = self.factory.get("/") + request.user = self.user + + result = self.permission.has_permission(request, APIView()) + self.assertFalse(result) + + +class IsOwnerOrAdminTest(TestCase): + """Tests for IsOwnerOrAdmin permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsOwnerOrAdmin() + self.user = UserFactory.create_user() + self.other_user = UserFactory.create_user() + self.admin = UserFactory.create_user(is_staff=True) + + def test_owner_has_permission(self): + """Test owner has permission""" + request = self.factory.get("/") + request.user = self.user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertTrue(result) + + def test_admin_has_permission(self): + """Test admin has permission to any object""" + request = self.factory.get("/") + request.user = self.admin + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertTrue(result) + + def test_non_owner_non_admin_denied(self): + """Test non-owner non-admin is denied""" + request = self.factory.get("/") + request.user = self.other_user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertFalse(result) diff --git a/tests/apps/core/test_response.py b/tests/apps/core/test_response.py new file mode 100644 index 0000000..485d4f3 --- /dev/null +++ b/tests/apps/core/test_response.py @@ -0,0 +1,138 @@ +"""Tests for core response wrapper""" + +from apps.core.response import ( + api_created_response, + api_error_response, + api_no_content_response, + api_paginated_response, + api_response, +) +from django.test import TestCase +from rest_framework import status + + +class APIResponseTest(TestCase): + """Tests for api_response function""" + + def test_basic_response(self): + """Test basic successful response""" + response = api_response({"key": "value"}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["success"]) + self.assertEqual(response.data["data"], {"key": "value"}) + self.assertIsNone(response.data["errors"]) + + def test_response_with_request_id(self): + """Test response includes request ID in meta""" + response = api_response({"key": "value"}, request_id="test-id-123") + + self.assertEqual(response.data["meta"]["request_id"], "test-id-123") + + def test_response_with_custom_status(self): + """Test response with custom status code""" + response = api_response(None, status_code=status.HTTP_202_ACCEPTED) + + self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED) + + def test_response_with_pagination(self): + """Test response includes pagination in meta""" + pagination = {"page": 1, "total": 100} + response = api_response([1, 2, 3], pagination=pagination) + + self.assertEqual(response.data["meta"]["pagination"], pagination) + + +class APIErrorResponseTest(TestCase): + """Tests for api_error_response function""" + + def test_basic_error_response(self): + """Test basic error response""" + errors = [{"code": "test_error", "message": "Test error message"}] + response = api_error_response(errors) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertFalse(response.data["success"]) + self.assertIsNone(response.data["data"]) + self.assertEqual(response.data["errors"], errors) + + def test_error_response_with_custom_status(self): + """Test error response with custom status code""" + errors = [{"code": "not_found", "message": "Not found"}] + response = api_error_response(errors, status_code=status.HTTP_404_NOT_FOUND) + + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_error_response_with_request_id(self): + """Test error response includes request ID""" + errors = [{"code": "error", "message": "Error"}] + response = api_error_response(errors, request_id="error-id-456") + + self.assertEqual(response.data["meta"]["request_id"], "error-id-456") + + +class APICreatedResponseTest(TestCase): + """Tests for api_created_response function""" + + def test_created_response(self): + """Test 201 created response""" + response = api_created_response({"id": 1, "name": "New item"}) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertTrue(response.data["success"]) + self.assertEqual(response.data["data"]["id"], 1) + + +class APINoContentResponseTest(TestCase): + """Tests for api_no_content_response function""" + + def test_no_content_response(self): + """Test 204 no content response""" + response = api_no_content_response() + + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + self.assertTrue(response.data["success"]) + self.assertIsNone(response.data["data"]) + + +class APIPaginatedResponseTest(TestCase): + """Tests for api_paginated_response function""" + + def test_paginated_response(self): + """Test paginated response with correct metadata""" + data = [{"id": 1}, {"id": 2}] + response = api_paginated_response( + data, page=1, page_size=10, total_count=25 + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["data"], data) + + pagination = response.data["meta"]["pagination"] + self.assertEqual(pagination["page"], 1) + self.assertEqual(pagination["page_size"], 10) + self.assertEqual(pagination["total_count"], 25) + self.assertEqual(pagination["total_pages"], 3) + self.assertTrue(pagination["has_next"]) + self.assertFalse(pagination["has_previous"]) + + def test_paginated_response_last_page(self): + """Test paginated response on last page""" + response = api_paginated_response( + [{"id": 1}], page=3, page_size=10, total_count=25 + ) + + pagination = response.data["meta"]["pagination"] + self.assertFalse(pagination["has_next"]) + self.assertTrue(pagination["has_previous"]) + + def test_paginated_response_single_page(self): + """Test paginated response with single page""" + response = api_paginated_response( + [{"id": 1}], page=1, page_size=10, total_count=5 + ) + + pagination = response.data["meta"]["pagination"] + self.assertEqual(pagination["total_pages"], 1) + self.assertFalse(pagination["has_next"]) + self.assertFalse(pagination["has_previous"]) diff --git a/tests/apps/core/test_services.py b/tests/apps/core/test_services.py new file mode 100644 index 0000000..f4fe1a6 --- /dev/null +++ b/tests/apps/core/test_services.py @@ -0,0 +1,103 @@ +"""Tests for core services""" + +from apps.core.exceptions import NotFoundError +from apps.core.services import BaseService +from django.contrib.auth import get_user_model +from django.test import TestCase + +User = get_user_model() + + +class UserTestService(BaseService[User]): + """Test service using User model""" + + model = User + + +class BaseServiceTest(TestCase): + """Tests for BaseService""" + + def setUp(self): + self.user = User.objects.create_user( + username="testuser", + email="test@example.com", + password="testpass123", + ) + + def test_get_by_id_success(self): + """Test get_by_id returns entity""" + result = UserTestService.get_by_id(self.user.pk) + self.assertEqual(result.pk, self.user.pk) + self.assertEqual(result.email, self.user.email) + + def test_get_by_id_not_found(self): + """Test get_by_id raises NotFoundError for non-existent ID""" + with self.assertRaises(NotFoundError) as context: + UserTestService.get_by_id(99999) + + self.assertEqual(context.exception.code, "not_found") + self.assertIn("User", context.exception.message) + + def test_get_by_id_or_none_found(self): + """Test get_by_id_or_none returns entity when found""" + result = UserTestService.get_by_id_or_none(self.user.pk) + self.assertEqual(result.pk, self.user.pk) + + def test_get_by_id_or_none_not_found(self): + """Test get_by_id_or_none returns None when not found""" + result = UserTestService.get_by_id_or_none(99999) + self.assertIsNone(result) + + def test_get_all(self): + """Test get_all returns queryset""" + User.objects.create_user( + username="testuser2", + email="test2@example.com", + password="testpass123", + ) + + result = UserTestService.get_all() + self.assertEqual(result.count(), 2) + + def test_filter(self): + """Test filter returns filtered queryset""" + result = UserTestService.filter(email="test@example.com") + self.assertEqual(result.count(), 1) + self.assertEqual(result.first().email, "test@example.com") + + def test_exists_true(self): + """Test exists returns True when entity exists""" + result = UserTestService.exists(email="test@example.com") + self.assertTrue(result) + + def test_exists_false(self): + """Test exists returns False when entity does not exist""" + result = UserTestService.exists(email="nonexistent@example.com") + self.assertFalse(result) + + def test_count_all(self): + """Test count returns total count""" + result = UserTestService.count() + self.assertEqual(result, 1) + + def test_count_filtered(self): + """Test count with filter""" + result = UserTestService.count(email="test@example.com") + self.assertEqual(result, 1) + + result = UserTestService.count(email="nonexistent@example.com") + self.assertEqual(result, 0) + + def test_update(self): + """Test update modifies entity fields""" + UserTestService.update(self.user, username="updated_username") + + self.user.refresh_from_db() + self.assertEqual(self.user.username, "updated_username") + + def test_delete(self): + """Test delete removes entity""" + user_pk = self.user.pk + UserTestService.delete(self.user) + + self.assertFalse(User.objects.filter(pk=user_pk).exists()) diff --git a/tests/apps/core/test_signals.py b/tests/apps/core/test_signals.py new file mode 100644 index 0000000..af7f76f --- /dev/null +++ b/tests/apps/core/test_signals.py @@ -0,0 +1,204 @@ +"""Tests for core signals utilities""" + + +from apps.core.signals import ( + SignalDispatcher, + emit_password_changed, + emit_user_registered, + emit_user_verified, + on_post_save, + on_pre_save, + password_changed, + user_registered, + user_verified, +) +from django.contrib.auth import get_user_model +from django.db.models.signals import post_save, pre_save +from django.test import TestCase + +from tests.apps.user.factories import UserFactory + +User = get_user_model() + + +class SignalDispatcherTest(TestCase): + """Tests for SignalDispatcher""" + + def setUp(self): + self.dispatcher = SignalDispatcher() + + def test_register_handler(self): + """Test handler registration""" + + def my_handler(sender, **kwargs): + pass + + self.dispatcher.register( + signal=post_save, + sender="user.User", + handler=my_handler, + description="Test handler", + ) + + self.assertEqual(len(self.dispatcher._handlers), 1) + self.assertEqual(self.dispatcher._handlers[0]["handler"], my_handler) + + def test_list_handlers(self): + """Test listing registered handlers""" + + def handler1(sender, **kwargs): + pass + + def handler2(sender, **kwargs): + pass + + self.dispatcher.register( + signal=post_save, + sender="user.User", + handler=handler1, + description="Handler 1", + ) + self.dispatcher.register( + signal=pre_save, + sender="user.User", + handler=handler2, + description="Handler 2", + ) + + handlers = self.dispatcher.list_handlers() + + self.assertEqual(len(handlers), 2) + self.assertEqual(handlers[0]["description"], "Handler 1") + self.assertEqual(handlers[1]["description"], "Handler 2") + + def test_connect_all(self): + """Test connecting all handlers""" + handler_called = {"value": False} + + def test_handler(sender, instance, created, **kwargs): + handler_called["value"] = True + + self.dispatcher.register( + signal=post_save, + sender=User, + handler=test_handler, + description="Test", + ) + + self.dispatcher.connect_all() + + # Create user to trigger signal + user = UserFactory.create_user() + + self.assertTrue(handler_called["value"]) + + # Cleanup + self.dispatcher.disconnect_all() + + def test_disconnect_all(self): + """Test disconnecting all handlers""" + handler_called = {"value": False} + + def test_handler(sender, instance, created, **kwargs): + handler_called["value"] = True + + self.dispatcher.register( + signal=post_save, + sender=User, + handler=test_handler, + description="Test", + ) + + self.dispatcher.connect_all() + self.dispatcher.disconnect_all() + + # Create user - handler should not be called + handler_called["value"] = False + user = UserFactory.create_user() + + self.assertFalse(handler_called["value"]) + + +class SignalDecoratorsTest(TestCase): + """Tests for signal decorators""" + + def test_on_post_save_registers_handler(self): + """Test @on_post_save registers handler""" + from apps.core.signals import signal_dispatcher + + initial_count = len(signal_dispatcher._handlers) + + @on_post_save("user.User", description="Test decorator") + def my_handler(sender, **kwargs): + pass + + new_count = len(signal_dispatcher._handlers) + self.assertEqual(new_count, initial_count + 1) + + def test_on_pre_save_registers_handler(self): + """Test @on_pre_save registers handler""" + from apps.core.signals import signal_dispatcher + + initial_count = len(signal_dispatcher._handlers) + + @on_pre_save("user.User", description="Test pre_save") + def my_pre_handler(sender, **kwargs): + pass + + new_count = len(signal_dispatcher._handlers) + self.assertEqual(new_count, initial_count + 1) + + +class CustomSignalsTest(TestCase): + """Tests for custom signals""" + + def setUp(self): + self.user = UserFactory.create_user() + + def test_emit_user_registered(self): + """Test user_registered signal emission""" + handler_called = {"value": False, "user": None} + + def handler(sender, user, **kwargs): + handler_called["value"] = True + handler_called["user"] = user + + user_registered.connect(handler) + + try: + emit_user_registered(self.user) + + self.assertTrue(handler_called["value"]) + self.assertEqual(handler_called["user"], self.user) + finally: + user_registered.disconnect(handler) + + def test_emit_user_verified(self): + """Test user_verified signal emission""" + handler_called = {"value": False} + + def handler(sender, user, **kwargs): + handler_called["value"] = True + + user_verified.connect(handler) + + try: + emit_user_verified(self.user) + self.assertTrue(handler_called["value"]) + finally: + user_verified.disconnect(handler) + + def test_emit_password_changed(self): + """Test password_changed signal emission""" + handler_called = {"value": False} + + def handler(sender, user, **kwargs): + handler_called["value"] = True + + password_changed.connect(handler) + + try: + emit_password_changed(self.user) + self.assertTrue(handler_called["value"]) + finally: + password_changed.disconnect(handler) diff --git a/tests/apps/core/test_tasks.py b/tests/apps/core/test_tasks.py new file mode 100644 index 0000000..a50b51c --- /dev/null +++ b/tests/apps/core/test_tasks.py @@ -0,0 +1,82 @@ +"""Tests for core Celery tasks""" + + +from apps.core.tasks import ( + BaseTask, + IdempotentTask, + PeriodicTask, + TimedTask, + TransactionalTask, +) +from celery import Task +from django.test import TestCase + + +class BaseTaskTest(TestCase): + """Tests for BaseTask""" + + def test_inherits_from_celery_task(self): + """Test BaseTask inherits from Celery Task""" + self.assertTrue(issubclass(BaseTask, Task)) + + def test_has_default_retry_settings(self): + """Test BaseTask has default retry settings""" + self.assertEqual(BaseTask.max_retries, 3) + self.assertTrue(BaseTask.retry_backoff) + self.assertEqual(BaseTask.retry_backoff_max, 600) + + def test_acks_late_enabled(self): + """Test acks_late is enabled""" + self.assertTrue(BaseTask.acks_late) + + def test_reject_on_worker_lost(self): + """Test reject_on_worker_lost is enabled""" + self.assertTrue(BaseTask.reject_on_worker_lost) + + +class TransactionalTaskTest(TestCase): + """Tests for TransactionalTask""" + + def test_inherits_from_base_task(self): + """Test TransactionalTask inherits from BaseTask""" + self.assertTrue(issubclass(TransactionalTask, BaseTask)) + + +class IdempotentTaskTest(TestCase): + """Tests for IdempotentTask""" + + def test_inherits_from_base_task(self): + """Test IdempotentTask inherits from BaseTask""" + self.assertTrue(issubclass(IdempotentTask, BaseTask)) + + def test_has_lock_timeout(self): + """Test IdempotentTask has lock_timeout attribute""" + self.assertEqual(IdempotentTask.lock_timeout, 3600) + + +class TimedTaskTest(TestCase): + """Tests for TimedTask""" + + def test_inherits_from_base_task(self): + """Test TimedTask inherits from BaseTask""" + self.assertTrue(issubclass(TimedTask, BaseTask)) + + def test_has_slow_threshold(self): + """Test TimedTask has slow_threshold attribute""" + self.assertEqual(TimedTask.slow_threshold, 60) + + +class PeriodicTaskTest(TestCase): + """Tests for PeriodicTask""" + + def test_inherits_from_timed_task(self): + """Test PeriodicTask inherits from TimedTask""" + self.assertTrue(issubclass(PeriodicTask, TimedTask)) + + def test_max_retries_is_one(self): + """Test max_retries is 1 for periodic tasks""" + self.assertEqual(PeriodicTask.max_retries, 1) + + def test_autoretry_for_is_empty(self): + """Test autoretry_for is empty for periodic tasks""" + self.assertEqual(PeriodicTask.autoretry_for, ()) diff --git a/tests/apps/core/test_views.py b/tests/apps/core/test_views.py new file mode 100644 index 0000000..cbf2400 --- /dev/null +++ b/tests/apps/core/test_views.py @@ -0,0 +1,101 @@ +"""Tests for core views (health checks)""" + +from django.urls import reverse +from rest_framework import status +from rest_framework.test import APITestCase + + +class HealthCheckViewTest(APITestCase): + """Tests for HealthCheckView""" + + def test_health_check_url_reverse(self): + """Test reverse URL resolution for health check""" + url = reverse("core:health") + self.assertEqual(url, "/health/") + + def test_health_check_success(self): + """Test health check returns healthy status""" + url = reverse("core:health") + response = self.client.get(url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("status", response.data) + self.assertIn("version", response.data) + self.assertIn("checks", response.data) + self.assertIn("database", response.data["checks"]) + + def test_health_check_database_up(self): + """Test health check reports database as up""" + url = reverse("core:health") + response = self.client.get(url) + + self.assertEqual(response.data["checks"]["database"]["status"], "up") + self.assertIn("latency_ms", response.data["checks"]["database"]) + + +class LivenessViewTest(APITestCase): + """Tests for LivenessView""" + + def test_liveness_url_reverse(self): + """Test reverse URL resolution for liveness""" + url = reverse("core:liveness") + self.assertEqual(url, "/health/live/") + + def test_liveness_returns_alive(self): + """Test liveness probe returns alive status""" + url = reverse("core:liveness") + response = self.client.get(url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["status"], "alive") + + +class ReadinessViewTest(APITestCase): + """Tests for ReadinessView""" + + def test_readiness_url_reverse(self): + """Test reverse URL resolution for readiness""" + url = reverse("core:readiness") + self.assertEqual(url, "/health/ready/") + + def test_readiness_returns_ready(self): + """Test readiness probe returns ready when DB is available""" + url = reverse("core:readiness") + response = self.client.get(url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["status"], "ready") + + +class APIVersioningURLTest(APITestCase): + """Tests for API versioning URL structure""" + + def test_api_v1_user_register_reverse(self): + """Test reverse URL for user registration""" + url = reverse("api_v1:user:register") + self.assertEqual(url, "/api/v1/users/register/") + + def test_api_v1_user_login_reverse(self): + """Test reverse URL for user login""" + url = reverse("api_v1:user:login") + self.assertEqual(url, "/api/v1/users/login/") + + def test_api_v1_user_logout_reverse(self): + """Test reverse URL for user logout""" + url = reverse("api_v1:user:logout") + self.assertEqual(url, "/api/v1/users/logout/") + + def test_api_v1_user_current_user_reverse(self): + """Test reverse URL for current user""" + url = reverse("api_v1:user:current_user") + self.assertEqual(url, "/api/v1/users/me/") + + def test_api_v1_user_token_refresh_reverse(self): + """Test reverse URL for token refresh""" + url = reverse("api_v1:user:token_refresh") + self.assertEqual(url, "/api/v1/users/token/refresh/") + + def test_api_v1_user_password_change_reverse(self): + """Test reverse URL for password change""" + url = reverse("api_v1:user:password_change") + self.assertEqual(url, "/api/v1/users/password/change/") diff --git a/tests/apps/core/test_viewsets.py b/tests/apps/core/test_viewsets.py new file mode 100644 index 0000000..ab122d1 --- /dev/null +++ b/tests/apps/core/test_viewsets.py @@ -0,0 +1,86 @@ +"""Tests for core ViewSets""" + +from apps.core.pagination import StandardPagination +from apps.core.viewsets import ( + BaseViewSet, + BulkMixin, + OwnerViewSet, + ReadOnlyViewSet, +) +from django.test import TestCase +from rest_framework import viewsets +from rest_framework.permissions import IsAuthenticated + + +class BaseViewSetTest(TestCase): + """Tests for BaseViewSet""" + + def test_inherits_from_model_viewset(self): + """Test BaseViewSet inherits from ModelViewSet""" + self.assertTrue(issubclass(BaseViewSet, viewsets.ModelViewSet)) + + def test_has_pagination_class(self): + """Test BaseViewSet has pagination_class""" + self.assertEqual(BaseViewSet.pagination_class, StandardPagination) + + def test_has_permission_classes(self): + """Test BaseViewSet has permission_classes""" + self.assertIn(IsAuthenticated, BaseViewSet.permission_classes) + + def test_has_filter_backends(self): + """Test BaseViewSet has filter_backends""" + self.assertTrue(hasattr(BaseViewSet, "filter_backends")) + self.assertIsInstance(BaseViewSet.filter_backends, list) + self.assertTrue(len(BaseViewSet.filter_backends) > 0) + + def test_has_default_ordering(self): + """Test BaseViewSet has default ordering""" + self.assertEqual(BaseViewSet.ordering, ["-created_at"]) + + def test_has_serializer_classes_dict(self): + """Test BaseViewSet has serializer_classes dict""" + self.assertTrue(hasattr(BaseViewSet, "serializer_classes")) + self.assertIsInstance(BaseViewSet.serializer_classes, dict) + + +class ReadOnlyViewSetTest(TestCase): + """Tests for ReadOnlyViewSet""" + + def test_inherits_from_readonly_model_viewset(self): + """Test ReadOnlyViewSet inherits from ReadOnlyModelViewSet""" + self.assertTrue(issubclass(ReadOnlyViewSet, viewsets.ReadOnlyModelViewSet)) + + def test_has_pagination_class(self): + """Test ReadOnlyViewSet has pagination_class""" + self.assertEqual(ReadOnlyViewSet.pagination_class, StandardPagination) + + def test_has_filter_backends(self): + """Test ReadOnlyViewSet has filter_backends""" + self.assertTrue(hasattr(ReadOnlyViewSet, "filter_backends")) + self.assertTrue(len(ReadOnlyViewSet.filter_backends) > 0) + + +class OwnerViewSetTest(TestCase): + """Tests for OwnerViewSet""" + + def test_inherits_from_base_viewset(self): + """Test OwnerViewSet inherits from BaseViewSet""" + self.assertTrue(issubclass(OwnerViewSet, BaseViewSet)) + + def test_has_owner_field(self): + """Test OwnerViewSet has owner_field attribute""" + self.assertEqual(OwnerViewSet.owner_field, "user") + + +class BulkMixinTest(TestCase): + """Tests for BulkMixin""" + + def test_has_bulk_create_method(self): + """Test BulkMixin has bulk_create method""" + self.assertTrue(hasattr(BulkMixin, "bulk_create")) + self.assertTrue(callable(BulkMixin.bulk_create)) + + def test_has_bulk_delete_method(self): + """Test BulkMixin has bulk_delete method""" + self.assertTrue(hasattr(BulkMixin, "bulk_delete")) + self.assertTrue(callable(BulkMixin.bulk_delete)) diff --git a/tests/apps/user/__init__.py b/tests/apps/user/__init__.py new file mode 100644 index 0000000..4474db6 --- /dev/null +++ b/tests/apps/user/__init__.py @@ -0,0 +1,10 @@ +""" +Tests for user application + +This package contains all tests for the user app including: +- Model tests +- Serializer tests +- Service tests +- View tests +- Factory classes for test data generation +""" diff --git a/tests/apps/user/factories.py b/tests/apps/user/factories.py new file mode 100644 index 0000000..4fcd031 --- /dev/null +++ b/tests/apps/user/factories.py @@ -0,0 +1,93 @@ +"""Фабрики для создания тестовых объектов с использованием factory_boy и faker""" + +import factory +from apps.user.models import Profile, User +from faker import Faker + +fake = Faker("ru_RU") + + +class UserFactory(factory.django.DjangoModelFactory): + """Фабрика для создания пользователей""" + + class Meta: + model = User + skip_postgeneration_save = True + + email = factory.LazyAttribute(lambda _: fake.unique.email()) + username = factory.LazyAttribute(lambda _: fake.unique.user_name()) + phone = factory.LazyAttribute( + lambda _: f"+7{fake.numerify('##########')}" + ) + is_verified = False + is_staff = False + is_superuser = False + is_active = True + + @factory.lazy_attribute + def password(self): + return "testpass123" + + @classmethod + def _create(cls, model_class, *args, **kwargs): + """Переопределяем создание для корректной установки пароля""" + password = kwargs.pop("password", "testpass123") + obj = super()._create(model_class, *args, **kwargs) + obj.set_password(password) + obj.save() + return obj + + @classmethod + def create_user(cls, **kwargs): + """Создать обычного пользователя (для обратной совместимости)""" + return cls.create(**kwargs) + + @classmethod + def create_superuser(cls, **kwargs): + """Создать суперпользователя""" + defaults = { + "is_staff": True, + "is_superuser": True, + } + defaults.update(kwargs) + return cls.create(**defaults) + + +class ProfileFactory(factory.django.DjangoModelFactory): + """Фабрика для создания профилей""" + + class Meta: + model = Profile + django_get_or_create = ("user",) # Используем get_or_create для избежания дубликатов + + user = factory.SubFactory(UserFactory) + first_name = factory.LazyAttribute(lambda _: fake.first_name()) + last_name = factory.LazyAttribute(lambda _: fake.last_name()) + bio = factory.LazyAttribute(lambda _: fake.text(max_nb_chars=200)) + date_of_birth = factory.LazyAttribute( + lambda _: fake.date_of_birth(minimum_age=18, maximum_age=80) + ) + + @classmethod + def create_profile(cls, user=None, **kwargs): + """Создать профиль (для обратной совместимости)""" + if user is not None: + # Проверяем, существует ли уже профиль (сигнал мог создать) + try: + profile = user.profile + # Обновляем существующий профиль + for key, value in kwargs.items(): + setattr(profile, key, value) + # Заполняем поля faker'ом, если не переданы + if "first_name" not in kwargs: + profile.first_name = fake.first_name() + if "last_name" not in kwargs: + profile.last_name = fake.last_name() + if "bio" not in kwargs: + profile.bio = fake.text(max_nb_chars=200) + profile.save() + return profile + except Profile.DoesNotExist: + pass + kwargs["user"] = user + return cls.create(**kwargs) diff --git a/tests/apps/user/test_models.py b/tests/apps/user/test_models.py new file mode 100644 index 0000000..1ae409a --- /dev/null +++ b/tests/apps/user/test_models.py @@ -0,0 +1,132 @@ +"""Tests for user models""" + +from django.test import TestCase +from faker import Faker + +from .factories import ProfileFactory, UserFactory + +fake = Faker("ru_RU") + + +class UserModelTest(TestCase): + """Tests for User model""" + + def setUp(self): + self.user = UserFactory.create_user() + self.superuser = UserFactory.create_superuser() + + def test_user_creation(self): + """Test user creation""" + self.assertTrue(self.user.email) + self.assertTrue(self.user.username) + + def test_user_str_representation(self): + """Test user string representation""" + expected = f"{self.user.username} ({self.user.email})" + self.assertEqual(str(self.user), expected) + + def test_superuser_creation(self): + """Test superuser creation""" + self.assertTrue(self.superuser.is_staff) + self.assertTrue(self.superuser.is_superuser) + + def test_user_email_unique(self): + """Test email field is unique""" + self.assertTrue(self.user._meta.get_field("email").unique) + + def test_user_username_required(self): + """Test username is required field""" + self.assertFalse(self.user._meta.get_field("username").blank) + + def test_user_phone_optional(self): + """Test phone field is optional""" + phone_field = self.user._meta.get_field("phone") + self.assertTrue(phone_field.blank) + self.assertTrue(phone_field.null) + + def test_user_is_verified_default_false(self): + """Test is_verified defaults to False""" + field = self.user._meta.get_field("is_verified") + self.assertFalse(field.default) + + +class ProfileModelTest(TestCase): + """Tests for Profile model""" + + def setUp(self): + # Создаём профиль через фабрику с заполненными данными + self.user = UserFactory.create_user() + self.profile = ProfileFactory.create_profile(user=self.user) + + def test_profile_creation(self): + """Test profile creation""" + self.assertIsNotNone(self.profile.user) + # Проверяем, что профиль связан с пользователем + self.assertEqual(self.profile.user, self.user) + # Проверяем, что имена заполнены faker'ом + self.assertIsNotNone(self.profile.first_name) + self.assertIsNotNone(self.profile.last_name) + self.assertTrue(len(self.profile.first_name) > 0) + self.assertTrue(len(self.profile.last_name) > 0) + + def test_profile_str_representation(self): + """Test profile string representation""" + expected = f"Profile of {self.profile.user.username}" + self.assertEqual(str(self.profile), expected) + + def test_profile_one_to_one_relationship(self): + """Test OneToOne relationship with User""" + self.assertIsNotNone(self.profile.user) + + def test_profile_first_name_optional(self): + """Test first_name field is optional""" + field = self.profile._meta.get_field("first_name") + self.assertTrue(field.blank) + self.assertTrue(field.null) + + def test_profile_last_name_optional(self): + """Test last_name field is optional""" + field = self.profile._meta.get_field("last_name") + self.assertTrue(field.blank) + self.assertTrue(field.null) + + def test_profile_bio_optional(self): + """Test bio field is optional""" + field = self.profile._meta.get_field("bio") + self.assertTrue(field.blank) + self.assertTrue(field.null) + + def test_profile_avatar_optional(self): + """Test avatar field is optional""" + field = self.profile._meta.get_field("avatar") + self.assertTrue(field.blank) + self.assertTrue(field.null) + + def test_profile_date_of_birth_optional(self): + """Test date_of_birth field is optional""" + field = self.profile._meta.get_field("date_of_birth") + self.assertTrue(field.blank) + self.assertTrue(field.null) + + def test_profile_full_name_property(self): + """Test full_name property""" + # Test with both names + first_name = fake.first_name() + last_name = fake.last_name() + self.profile.first_name = first_name + self.profile.last_name = last_name + self.assertEqual(self.profile.full_name, f"{first_name} {last_name}") + + # Test with only first name + self.profile.last_name = "" + self.assertEqual(self.profile.full_name, first_name) + + # Test with only last name + self.profile.first_name = "" + self.profile.last_name = last_name + self.assertEqual(self.profile.full_name, last_name) + + # Test with no names (fallback to username) + self.profile.first_name = "" + self.profile.last_name = "" + self.assertEqual(self.profile.full_name, self.profile.user.username) diff --git a/tests/apps/user/test_serializers.py b/tests/apps/user/test_serializers.py new file mode 100644 index 0000000..e3ada24 --- /dev/null +++ b/tests/apps/user/test_serializers.py @@ -0,0 +1,291 @@ +"""Tests for user serializers""" + +from apps.user.serializers import ( + LoginSerializer, + PasswordChangeSerializer, + ProfileUpdateSerializer, + TokenSerializer, + UserRegistrationSerializer, + UserSerializer, + UserUpdateSerializer, +) +from django.contrib.auth import get_user_model +from django.test import TestCase +from faker import Faker + +from .factories import ProfileFactory, UserFactory + +User = get_user_model() +fake = Faker("ru_RU") + + +class UserRegistrationSerializerTest(TestCase): + """Tests for UserRegistrationSerializer""" + + def setUp(self): + self.password = fake.password(length=12, special_chars=False) + self.user_data = { + "email": fake.unique.email(), + "username": fake.unique.user_name(), + "password": self.password, + "password_confirm": self.password, + "phone": f"+7{fake.numerify('##########')}", + } + + def test_valid_registration_data(self): + """Test valid registration data""" + serializer = UserRegistrationSerializer(data=self.user_data) + self.assertTrue(serializer.is_valid()) + + def test_passwords_do_not_match(self): + """Test validation fails when passwords don't match""" + data = self.user_data.copy() + data["password_confirm"] = fake.password(length=12, special_chars=False) + + serializer = UserRegistrationSerializer(data=data) + + self.assertFalse(serializer.is_valid()) + self.assertIn("non_field_errors", serializer.errors) + + def test_short_password(self): + """Test validation fails with short password""" + short_password = fake.pystr(min_chars=3, max_chars=5) + data = self.user_data.copy() + data["password"] = short_password + data["password_confirm"] = short_password + + serializer = UserRegistrationSerializer(data=data) + + self.assertFalse(serializer.is_valid()) + self.assertIn("password", serializer.errors) + + def test_duplicate_email(self): + """Test validation fails with duplicate email""" + existing_user = UserFactory.create_user() + data = self.user_data.copy() + data["email"] = existing_user.email + + serializer = UserRegistrationSerializer(data=data) + + self.assertFalse(serializer.is_valid()) + self.assertIn("email", serializer.errors) + + def test_duplicate_username(self): + """Test validation fails with duplicate username""" + existing_user = UserFactory.create_user() + data = self.user_data.copy() + data["username"] = existing_user.username + + serializer = UserRegistrationSerializer(data=data) + + self.assertFalse(serializer.is_valid()) + self.assertIn("username", serializer.errors) + + def test_create_user(self): + """Test user creation through serializer""" + serializer = UserRegistrationSerializer(data=self.user_data) + self.assertTrue(serializer.is_valid()) + + user = serializer.save() + + self.assertIsInstance(user, User) + self.assertEqual(user.email, self.user_data["email"]) + self.assertEqual(user.username, self.user_data["username"]) + self.assertTrue(user.check_password(self.user_data["password"])) + + +class UserSerializerTest(TestCase): + """Tests for UserSerializer""" + + def setUp(self): + self.user = UserFactory.create_user() + ProfileFactory.create_profile(user=self.user) + + def test_user_serialization(self): + """Test user serialization""" + serializer = UserSerializer(self.user) + data = serializer.data + + self.assertEqual(data["id"], self.user.id) + self.assertEqual(data["email"], self.user.email) + self.assertEqual(data["username"], self.user.username) + self.assertEqual(data["phone"], self.user.phone) + self.assertEqual(data["is_verified"], self.user.is_verified) + self.assertIn("profile", data) + self.assertIn("created_at", data) + self.assertIn("updated_at", data) + + def test_read_only_fields(self): + """Test that read-only fields are not writable""" + read_only_fields = ["id", "is_verified", "created_at", "updated_at"] + serializer = UserSerializer() + + for field_name in read_only_fields: + self.assertIn(field_name, serializer.Meta.read_only_fields) + + +class UserUpdateSerializerTest(TestCase): + """Tests for UserUpdateSerializer""" + + def setUp(self): + self.user = UserFactory.create_user() + + def test_valid_update_data(self): + """Test valid update data""" + update_data = { + "username": fake.unique.user_name(), + "phone": f"+7{fake.numerify('##########')}", + } + + serializer = UserUpdateSerializer(self.user, data=update_data, partial=True) + self.assertTrue(serializer.is_valid()) + + updated_user = serializer.save() + self.assertEqual(updated_user.username, update_data["username"]) + self.assertEqual(updated_user.phone, update_data["phone"]) + + def test_fields_allowed(self): + """Test only allowed fields can be updated""" + serializer = UserUpdateSerializer() + allowed_fields = ["username", "phone"] + + self.assertEqual(set(serializer.Meta.fields), set(allowed_fields)) + + +class ProfileUpdateSerializerTest(TestCase): + """Tests for ProfileUpdateSerializer""" + + def setUp(self): + self.user = UserFactory.create_user() + self.profile = ProfileFactory.create_profile(user=self.user) + + def test_valid_profile_update_data(self): + """Test valid profile update data""" + update_data = { + "first_name": fake.first_name(), + "last_name": fake.last_name(), + "bio": fake.text(max_nb_chars=200), + "date_of_birth": str(fake.date_of_birth(minimum_age=18, maximum_age=80)), + } + + serializer = ProfileUpdateSerializer( + self.profile, data=update_data, partial=True + ) + self.assertTrue(serializer.is_valid()) + + updated_profile = serializer.save() + self.assertEqual(updated_profile.first_name, update_data["first_name"]) + self.assertEqual(updated_profile.last_name, update_data["last_name"]) + self.assertEqual(updated_profile.bio, update_data["bio"]) + + def test_fields_allowed(self): + """Test only allowed fields can be updated""" + serializer = ProfileUpdateSerializer() + allowed_fields = ["first_name", "last_name", "bio", "avatar", "date_of_birth"] + + self.assertEqual(set(serializer.Meta.fields), set(allowed_fields)) + + +class LoginSerializerTest(TestCase): + """Tests for LoginSerializer""" + + def setUp(self): + self.login_data = { + "email": fake.email(), + "password": fake.password(length=12, special_chars=False), + } + + def test_valid_login_data(self): + """Test valid login data""" + serializer = LoginSerializer(data=self.login_data) + self.assertTrue(serializer.is_valid()) + + def test_missing_email(self): + """Test validation fails without email""" + data = {"password": fake.password(length=12, special_chars=False)} + serializer = LoginSerializer(data=data) + self.assertFalse(serializer.is_valid()) + self.assertIn("email", serializer.errors) + + def test_missing_password(self): + """Test validation fails without password""" + data = {"email": fake.email()} + serializer = LoginSerializer(data=data) + self.assertFalse(serializer.is_valid()) + self.assertIn("password", serializer.errors) + + +class TokenSerializerTest(TestCase): + """Tests for TokenSerializer""" + + def test_valid_token_data(self): + """Test valid token data""" + token_data = { + "access": fake.pystr(min_chars=50, max_chars=100), + "refresh": fake.pystr(min_chars=50, max_chars=100), + } + + serializer = TokenSerializer(data=token_data) + self.assertTrue(serializer.is_valid()) + + def test_missing_access_token(self): + """Test validation fails without access token""" + data = {"refresh": fake.pystr(min_chars=50, max_chars=100)} + serializer = TokenSerializer(data=data) + self.assertFalse(serializer.is_valid()) + self.assertIn("access", serializer.errors) + + def test_missing_refresh_token(self): + """Test validation fails without refresh token""" + data = {"access": fake.pystr(min_chars=50, max_chars=100)} + serializer = TokenSerializer(data=data) + self.assertFalse(serializer.is_valid()) + self.assertIn("refresh", serializer.errors) + + +class PasswordChangeSerializerTest(TestCase): + """Tests for PasswordChangeSerializer""" + + def setUp(self): + self.old_password = fake.password(length=12, special_chars=False) + self.new_password = fake.password(length=12, special_chars=False) + self.password_data = { + "old_password": self.old_password, + "new_password": self.new_password, + "new_password_confirm": self.new_password, + } + + def test_valid_password_change_data(self): + """Test valid password change data""" + serializer = PasswordChangeSerializer(data=self.password_data) + self.assertTrue(serializer.is_valid()) + + def test_passwords_do_not_match(self): + """Test validation fails when new passwords don't match""" + data = self.password_data.copy() + data["new_password_confirm"] = fake.password(length=12, special_chars=False) + + serializer = PasswordChangeSerializer(data=data) + + self.assertFalse(serializer.is_valid()) + self.assertIn("non_field_errors", serializer.errors) + + def test_short_new_password(self): + """Test validation fails with short new password""" + short_password = fake.pystr(min_chars=3, max_chars=5) + data = self.password_data.copy() + data["new_password"] = short_password + data["new_password_confirm"] = short_password + + serializer = PasswordChangeSerializer(data=data) + + self.assertFalse(serializer.is_valid()) + self.assertIn("new_password", serializer.errors) + + def test_missing_old_password(self): + """Test validation fails without old password""" + new_password = fake.password(length=12, special_chars=False) + data = {"new_password": new_password, "new_password_confirm": new_password} + serializer = PasswordChangeSerializer(data=data) + self.assertFalse(serializer.is_valid()) + self.assertIn("old_password", serializer.errors) diff --git a/tests/apps/user/test_services.py b/tests/apps/user/test_services.py new file mode 100644 index 0000000..4c1d030 --- /dev/null +++ b/tests/apps/user/test_services.py @@ -0,0 +1,224 @@ +"""Tests for user services""" + +from apps.core.exceptions import NotFoundError +from apps.user.services import ProfileService, UserService +from django.contrib.auth import get_user_model +from django.test import TestCase +from faker import Faker +from rest_framework_simplejwt.tokens import RefreshToken + +from .factories import ProfileFactory, UserFactory + +User = get_user_model() +fake = Faker("ru_RU") + + +class UserServiceTest(TestCase): + """Tests for UserService""" + + def setUp(self): + self.user = UserFactory.create_user() + self.user_data = { + "email": fake.unique.email(), + "username": fake.unique.user_name(), + "password": fake.password(length=12, special_chars=False), + } + + def test_create_user_success(self): + """Test successful user creation""" + user = UserService.create_user(**self.user_data) + + self.assertIsInstance(user, User) + self.assertEqual(user.email, self.user_data["email"]) + self.assertEqual(user.username, self.user_data["username"]) + self.assertTrue(user.check_password(self.user_data["password"])) + self.assertFalse(user.is_verified) # Default value + + def test_create_user_with_extra_fields(self): + """Test user creation with extra fields""" + extra_data = self.user_data.copy() + extra_data["email"] = fake.unique.email() + extra_data["username"] = fake.unique.user_name() + extra_data["phone"] = f"+7{fake.numerify('##########')}" + extra_data["is_verified"] = True + + user = UserService.create_user(**extra_data) + + self.assertEqual(user.phone, extra_data["phone"]) + self.assertTrue(user.is_verified) + + def test_get_user_by_email_found(self): + """Test getting user by existing email""" + found_user = UserService.get_user_by_email(self.user.email) + self.assertEqual(found_user, self.user) + + def test_get_user_by_email_not_found(self): + """Test getting user by non-existing email raises NotFoundError""" + nonexistent_email = fake.unique.email() + with self.assertRaises(NotFoundError) as context: + UserService.get_user_by_email(nonexistent_email) + + self.assertEqual(context.exception.code, "not_found") + self.assertIn("email", context.exception.message) + + def test_get_user_by_email_or_none_not_found(self): + """Test getting user by non-existing email returns None""" + found_user = UserService.get_user_by_email_or_none(fake.unique.email()) + self.assertIsNone(found_user) + + def test_get_user_by_id_found(self): + """Test getting user by existing ID""" + found_user = UserService.get_user_by_id(self.user.id) + self.assertEqual(found_user, self.user) + + def test_get_user_by_id_not_found(self): + """Test getting user by non-existing ID raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError) as context: + UserService.get_user_by_id(nonexistent_id) + + self.assertEqual(context.exception.code, "not_found") + self.assertIn(str(nonexistent_id), context.exception.message) + + def test_get_user_by_id_or_none_not_found(self): + """Test getting user by non-existing ID returns None""" + found_user = UserService.get_user_by_id_or_none( + fake.pyint(min_value=900000, max_value=999999) + ) + self.assertIsNone(found_user) + + def test_update_user_success(self): + """Test successful user update""" + new_data = { + "username": fake.unique.user_name(), + "phone": f"+7{fake.numerify('##########')}", + } + + updated_user = UserService.update_user(self.user.id, **new_data) + + self.assertIsNotNone(updated_user) + self.assertEqual(updated_user.username, new_data["username"]) + self.assertEqual(updated_user.phone, new_data["phone"]) + + def test_update_user_not_found(self): + """Test updating non-existing user raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError): + UserService.update_user(nonexistent_id, username=fake.user_name()) + + def test_delete_user_success(self): + """Test successful user deletion""" + user_id = self.user.id + UserService.delete_user(user_id) + + # Verify user is deleted + with self.assertRaises(NotFoundError): + UserService.get_user_by_id(user_id) + + def test_delete_user_not_found(self): + """Test deleting non-existing user raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError): + UserService.delete_user(nonexistent_id) + + def test_get_tokens_for_user(self): + """Test JWT token generation""" + tokens = UserService.get_tokens_for_user(self.user) + + self.assertIn("refresh", tokens) + self.assertIn("access", tokens) + self.assertIsInstance(tokens["refresh"], str) + self.assertIsInstance(tokens["access"], str) + + # Verify tokens are valid + refresh = RefreshToken(tokens["refresh"]) + self.assertEqual(refresh["user_id"], self.user.id) + + def test_verify_email_success(self): + """Test successful email verification""" + self.user.is_verified = False + self.user.save() + + user = UserService.verify_email(self.user.id) + + self.assertEqual(user.id, self.user.id) + self.user.refresh_from_db() + self.assertTrue(self.user.is_verified) + + def test_verify_email_not_found(self): + """Test email verification for non-existing user raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError): + UserService.verify_email(nonexistent_id) + + +class ProfileServiceTest(TestCase): + """Tests for ProfileService""" + + def setUp(self): + self.user = UserFactory.create_user() + self.profile = ProfileFactory.create_profile(user=self.user) + self.profile_data = { + "first_name": fake.first_name(), + "last_name": fake.last_name(), + "bio": fake.text(max_nb_chars=200), + "date_of_birth": str(fake.date_of_birth(minimum_age=18, maximum_age=80)), + } + + def test_get_profile_by_user_id_found(self): + """Test getting profile by existing user ID""" + found_profile = ProfileService.get_profile_by_user_id(self.user.id) + self.assertEqual(found_profile, self.profile) + # Check that user is selected related + self.assertIsNotNone(found_profile.user) + + def test_get_profile_by_user_id_not_found(self): + """Test getting profile by non-existing user ID raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError) as context: + ProfileService.get_profile_by_user_id(nonexistent_id) + + self.assertEqual(context.exception.code, "not_found") + + def test_get_profile_by_user_id_or_none_not_found(self): + """Test getting profile by non-existing user ID returns None""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + found_profile = ProfileService.get_profile_by_user_id_or_none(nonexistent_id) + self.assertIsNone(found_profile) + + def test_update_profile_success(self): + """Test successful profile update""" + updated_profile = ProfileService.update_profile( + self.user.id, **self.profile_data + ) + + self.assertIsNotNone(updated_profile) + self.assertEqual(updated_profile.first_name, self.profile_data["first_name"]) + self.assertEqual(updated_profile.last_name, self.profile_data["last_name"]) + self.assertEqual(updated_profile.bio, self.profile_data["bio"]) + + def test_update_profile_not_found(self): + """Test updating profile for non-existing user raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError): + ProfileService.update_profile(nonexistent_id, first_name=fake.first_name()) + + def test_get_full_profile_data_success(self): + """Test getting full profile data""" + profile_data = ProfileService.get_full_profile_data(self.user.id) + + self.assertIsNotNone(profile_data) + self.assertEqual(profile_data["id"], self.user.id) + self.assertEqual(profile_data["email"], self.user.email) + self.assertEqual(profile_data["username"], self.user.username) + self.assertEqual(profile_data["first_name"], self.profile.first_name) + self.assertEqual(profile_data["last_name"], self.profile.last_name) + self.assertEqual(profile_data["full_name"], self.profile.full_name) + self.assertEqual(profile_data["bio"], self.profile.bio) + self.assertEqual(profile_data["is_verified"], self.user.is_verified) + + def test_get_full_profile_data_not_found(self): + """Test getting full profile data for non-existing user raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError): + ProfileService.get_full_profile_data(nonexistent_id) diff --git a/tests/apps/user/test_views.py b/tests/apps/user/test_views.py new file mode 100644 index 0000000..4e3aa44 --- /dev/null +++ b/tests/apps/user/test_views.py @@ -0,0 +1,312 @@ +"""Tests for user DRF views""" + +from apps.user.models import Profile +from apps.user.services import UserService +from django.contrib.auth import get_user_model +from django.urls import reverse +from faker import Faker +from rest_framework import status +from rest_framework.test import APITestCase + +from .factories import ProfileFactory, UserFactory + +User = get_user_model() +fake = Faker("ru_RU") + + +class RegisterViewTest(APITestCase): + """Tests for RegisterView""" + + def setUp(self): + self.register_url = reverse("api_v1:user:register") + self.password = fake.password(length=12, special_chars=False) + self.user_data = { + "email": fake.unique.email(), + "username": fake.unique.user_name(), + "password": self.password, + "password_confirm": self.password, + "phone": f"+7{fake.numerify('##########')}", + } + + def test_register_success(self): + """Test successful user registration""" + response = self.client.post(self.register_url, self.user_data, format="json") + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertIn("user", response.data) + self.assertIn("tokens", response.data) + self.assertIn("refresh", response.data["tokens"]) + self.assertIn("access", response.data["tokens"]) + + # Verify user was created + self.assertTrue(User.objects.filter(email=self.user_data["email"]).exists()) + + def test_register_passwords_do_not_match(self): + """Test registration fails when passwords don't match""" + data = self.user_data.copy() + data["password_confirm"] = fake.password(length=12, special_chars=False) + + response = self.client.post(self.register_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("non_field_errors", response.data) + + def test_register_duplicate_email(self): + """Test registration fails with duplicate email""" + # Create existing user + existing_user = UserFactory.create_user() + + # Use the same email as existing user + data = self.user_data.copy() + data["email"] = existing_user.email + + response = self.client.post(self.register_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("email", response.data) + + def test_register_short_password(self): + """Test registration fails with short password""" + short_password = fake.pystr(min_chars=3, max_chars=5) + data = self.user_data.copy() + data["password"] = short_password + data["password_confirm"] = short_password + + response = self.client.post(self.register_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("password", response.data) + + +class LoginViewTest(APITestCase): + """Tests for LoginView""" + + def setUp(self): + self.login_url = reverse("api_v1:user:login") + self.password = fake.password(length=12, special_chars=False) + self.user = UserFactory.create_user(password=self.password) + + self.login_data = {"email": self.user.email, "password": self.password} + + def test_login_success(self): + """Test successful login""" + response = self.client.post(self.login_url, self.login_data, format="json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("refresh", response.data) + self.assertIn("access", response.data) + + def test_login_invalid_credentials(self): + """Test login fails with invalid credentials""" + data = self.login_data.copy() + data["password"] = fake.password(length=12, special_chars=False) + + response = self.client.post(self.login_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + self.assertIn("error", response.data) + + def test_login_nonexistent_user(self): + """Test login fails for nonexistent user""" + data = { + "email": fake.unique.email(), + "password": fake.password(length=12, special_chars=False), + } + + response = self.client.post(self.login_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + + +class CurrentUserViewTest(APITestCase): + """Tests for CurrentUserView""" + + def setUp(self): + self.user = UserFactory.create_user() + ProfileFactory.create_profile(user=self.user) + self.current_user_url = reverse("api_v1:user:current_user") + self.tokens = UserService.get_tokens_for_user(self.user) + self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {self.tokens['access']}") + + def test_get_current_user_authenticated(self): + """Test getting current user when authenticated""" + response = self.client.get(self.current_user_url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["id"], self.user.id) + self.assertEqual(response.data["email"], self.user.email) + self.assertIn("profile", response.data) + + def test_get_current_user_unauthenticated(self): + """Test getting current user when unauthenticated""" + self.client.credentials() # Remove auth header + response = self.client.get(self.current_user_url) + + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + + +class UserUpdateViewTest(APITestCase): + """Tests for UserUpdateView""" + + def setUp(self): + self.user = UserFactory.create_user() + self.update_url = reverse("api_v1:user:user_update") + self.tokens = UserService.get_tokens_for_user(self.user) + self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {self.tokens['access']}") + + self.update_data = { + "username": fake.unique.user_name(), + "phone": f"+7{fake.numerify('##########')}", + } + + def test_update_user_success(self): + """Test successful user update""" + response = self.client.patch(self.update_url, self.update_data, format="json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["username"], self.update_data["username"]) + self.assertEqual(response.data["phone"], self.update_data["phone"]) + + # Verify in database + self.user.refresh_from_db() + self.assertEqual(self.user.username, self.update_data["username"]) + + def test_update_user_unauthenticated(self): + """Test user update fails when unauthenticated""" + self.client.credentials() # Remove auth header + response = self.client.patch(self.update_url, self.update_data, format="json") + + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + + +class ProfileDetailViewTest(APITestCase): + """Tests for ProfileDetailView""" + + def setUp(self): + self.user = UserFactory.create_user() + self.profile = ProfileFactory.create_profile(user=self.user) + self.profile_url = reverse("api_v1:user:profile_detail") + self.tokens = UserService.get_tokens_for_user(self.user) + self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {self.tokens['access']}") + + self.update_data = { + "first_name": fake.first_name(), + "last_name": fake.last_name(), + "bio": fake.text(max_nb_chars=200), + } + + def test_get_profile_success(self): + """Test successful profile retrieval""" + response = self.client.get(self.profile_url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["first_name"], self.profile.first_name) + + def test_update_profile_success(self): + """Test successful profile update""" + response = self.client.patch(self.profile_url, self.update_data, format="json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["first_name"], self.update_data["first_name"]) + self.assertEqual(response.data["last_name"], self.update_data["last_name"]) + + # Verify in database + self.profile.refresh_from_db() + self.assertEqual(self.profile.first_name, self.update_data["first_name"]) + + def test_profile_created_if_not_exists(self): + """Test profile is created if it doesn't exist""" + # Delete existing profile + self.profile.delete() + + response = self.client.get(self.profile_url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + # Profile should be created automatically + self.assertTrue(Profile.objects.filter(user=self.user).exists()) + + +class PasswordChangeViewTest(APITestCase): + """Tests for PasswordChangeView""" + + def setUp(self): + self.old_password = fake.password(length=12, special_chars=False) + self.new_password = fake.password(length=12, special_chars=False) + self.user = UserFactory.create_user(password=self.old_password) + self.password_change_url = reverse("api_v1:user:password_change") + self.tokens = UserService.get_tokens_for_user(self.user) + self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {self.tokens['access']}") + + self.password_data = { + "old_password": self.old_password, + "new_password": self.new_password, + "new_password_confirm": self.new_password, + } + + def test_change_password_success(self): + """Test successful password change""" + response = self.client.post( + self.password_change_url, self.password_data, format="json" + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("message", response.data) + + # Verify password was changed + self.user.refresh_from_db() + self.assertTrue(self.user.check_password(self.new_password)) + + def test_change_password_wrong_old_password(self): + """Test password change fails with wrong old password""" + data = self.password_data.copy() + data["old_password"] = fake.password(length=12, special_chars=False) + + response = self.client.post(self.password_change_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("error", response.data) + + def test_change_password_passwords_do_not_match(self): + """Test password change fails when new passwords don't match""" + data = self.password_data.copy() + data["new_password_confirm"] = fake.password(length=12, special_chars=False) + + response = self.client.post(self.password_change_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("non_field_errors", response.data) + + +class TokenRefreshViewTest(APITestCase): + """Tests for TokenRefreshView""" + + def setUp(self): + self.user = UserFactory.create_user() + self.refresh_url = reverse("api_v1:user:token_refresh") + self.tokens = UserService.get_tokens_for_user(self.user) + + def test_refresh_token_success(self): + """Test successful token refresh""" + data = {"refresh": self.tokens["refresh"]} + response = self.client.post(self.refresh_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("access", response.data) + self.assertIn("refresh", response.data) + # New refresh token should be different + # Refresh token may be the same or different depending on implementation + + def test_refresh_token_invalid(self): + """Test token refresh fails with invalid refresh token""" + data = {"refresh": fake.pystr(min_chars=20, max_chars=50)} + response = self.client.post(self.refresh_url, data, format="json") + + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + self.assertIn("error", response.data) + + def test_refresh_token_missing(self): + """Test token refresh fails without refresh token""" + response = self.client.post(self.refresh_url, {}, format="json") + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertIn("error", response.data)