diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e831679..9594179 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,17 +5,17 @@ repos: - id: ruff name: ruff lint (src only) files: ^src/.*\.py$ - args: [--config=ruff.toml, --fix, --exit-non-zero-on-fix] + args: [--fix, --exit-non-zero-on-fix] exclude: | (?x)^( src/.*/migrations/.*| src/.*/__pycache__/.* )$ - + - id: ruff-format name: ruff format (src only) files: ^src/.*\.py$ - args: [--config=ruff.toml] + args: [] exclude: | (?x)^( src/.*/migrations/.*| @@ -28,18 +28,18 @@ repos: - id: trailing-whitespace name: check trailing whitespace (src only) files: ^src/.*\.(py|txt|md|yaml|yml)$ - + - id: end-of-file-fixer name: fix end of file (src only) files: ^src/.*\.(py|txt|md|yaml|yml)$ - + - id: check-yaml name: check yaml syntax (src only) files: ^src/.*\.ya?ml$ - + - id: check-added-large-files name: check large files - args: ['--maxkb=500'] + args: ["--maxkb=500"] - repo: local hooks: @@ -53,4 +53,4 @@ repos: (?x)^( src/.*/migrations/.*| src/.*/__pycache__/.* - )$ \ No newline at end of file + )$ diff --git a/.qoder/rules/main.md b/.qoder/rules/main.md index cd92a0e..de8d1c1 100644 --- a/.qoder/rules/main.md +++ b/.qoder/rules/main.md @@ -1,9 +1,6 @@ --- trigger: always_on --- ---- -trigger: always_on ---- ## 0) Язык и стиль общения (СТРОГО) - ИИ-агент **ВСЕГДА отвечает на русском языке** @@ -264,5 +261,162 @@ systemctl restart apache2 - объясняет, почему оно не подходит - запрашивает разрешение в чате -## 15) Структура проекта -- +## 15) Структура проекта и миксины (ОБЯЗАТЕЛЬНО) + +### 15.0 Правило Core-First (КРИТИЧНО) +**ПЕРЕД созданием любого нового компонента** агент ОБЯЗАН проверить модуль `apps.core`: + +``` +src/apps/core/ +├── mixins.py # Model mixins (TimestampMixin, SoftDeleteMixin, etc.) +├── services.py # BaseService, BackgroundJobService +├── views.py # Health checks, BackgroundJob API +├── viewsets.py # BaseViewSet, ReadOnlyViewSet +├── exceptions.py # APIError, NotFoundError, ValidationError +├── permissions.py # IsOwner, IsAdminOrReadOnly, etc. +├── pagination.py # CursorPagination +├── filters.py # BaseFilterSet +├── cache.py # cache_result, invalidate_cache +├── tasks.py # BaseTask для Celery +├── logging.py # StructuredLogger +├── middleware.py # RequestIDMiddleware +├── signals.py # SignalDispatcher +├── responses.py # APIResponse wrapper +├── openapi.py # api_docs decorator +└── management/commands/base.py # BaseAppCommand +``` + +**Порядок действий:** +1. Проверить `apps.core` на наличие нужного базового класса/миксина +2. Наследоваться от существующего, а не создавать с нуля +3. Если нужного нет — обсудить добавление в core + +❌ **ЗАПРЕЩЕНО:** создавать дублирующую функциональность в app-модулях + +--- + +### 15.1 Model Mixins +При создании моделей **ОБЯЗАТЕЛЬНО** использовать миксины из `apps.core.mixins`: + +| Миксин | Когда использовать | Поля | +|--------|-------------------|------| +| `TimestampMixin` | **ВСЕГДА** для любой модели | `created_at`, `updated_at` | +| `UUIDPrimaryKeyMixin` | Когда нужен UUID вместо int ID | `id` (UUID) | +| `SoftDeleteMixin` | Когда нельзя физически удалять | `is_deleted`, `deleted_at` | +| `AuditMixin` | Когда нужно знать кто создал/изменил | `created_by`, `updated_by` | +| `OrderableMixin` | Для сортируемых списков | `order` | +| `StatusMixin` | Для моделей со статусами | `status` | +| `SlugMixin` | Для URL-friendly идентификаторов | `slug` | + +**Пример правильного использования:** +```python +from apps.core.mixins import TimestampMixin, SoftDeleteMixin, AuditMixin + +class Document(TimestampMixin, SoftDeleteMixin, AuditMixin, models.Model): + """Документ с историей и мягким удалением.""" + title = models.CharField(max_length=200) + + class Meta: + ordering = ['-created_at'] +``` + +**Порядок наследования миксинов:** +1. `UUIDPrimaryKeyMixin` (если нужен) +2. `TimestampMixin` +3. `SoftDeleteMixin` (если нужен) +4. `AuditMixin` (если нужен) +5. `OrderableMixin` / `StatusMixin` / `SlugMixin` +6. `models.Model` (последним) + +--- + +### 15.2 Management Commands +Все management commands наследуются от `BaseAppCommand`: + +```python +from apps.core.management.commands.base import BaseAppCommand + +class Command(BaseAppCommand): + help = 'Описание команды' + use_transaction = True # Обернуть в транзакцию + + def add_arguments(self, parser): + super().add_arguments(parser) # Добавляет --dry-run, --silent + parser.add_argument('--my-arg', type=str) + + def execute_command(self, *args, **options): + items = MyModel.objects.all() + + for item in self.progress_iter(items, desc="Обработка"): + if not self.dry_run: + self.process(item) + + return "Обработано успешно" +``` + +**Возможности BaseAppCommand:** +- `--dry-run` — тестовый запуск без изменений +- `--silent` — минимальный вывод +- `self.progress_iter()` — прогресс-бар +- `self.timed_operation()` — измерение времени +- `self.confirm()` — подтверждение +- `self.log_info/success/warning/error()` — логирование + +--- + +### 15.3 Background Jobs (Celery) +Для отслеживания статуса фоновых задач использовать `BackgroundJob`: + +```python +# В сервисе при запуске задачи +from apps.core.services import BackgroundJobService + +job = BackgroundJobService.create_job( + task_id=task.id, + task_name="apps.myapp.tasks.process_data", + user_id=request.user.id, +) + +# В Celery таске +from apps.core.models import BackgroundJob + +@shared_task(bind=True) +def my_task(self, data): + job = BackgroundJob.objects.get(task_id=self.request.id) + job.mark_started() + + for i, item in enumerate(items): + process(item) + job.update_progress(i * 100 // len(items), "Обработка...") + + job.complete(result={"processed": len(items)}) +``` + +**API эндпоинты:** +- `GET /api/v1/jobs/` — список задач пользователя +- `GET /api/v1/jobs/{task_id}/` — статус конкретной задачи + +--- + +### 15.4 Factories (тестирование) +Все фабрики используют `factory_boy` + `faker`: + +```python +import factory +from faker import Faker + +fake = Faker("ru_RU") + +class MyModelFactory(factory.django.DjangoModelFactory): + class Meta: + model = MyModel + + name = factory.LazyAttribute(lambda _: fake.word()) + email = factory.LazyAttribute(lambda _: fake.unique.email()) +``` + +**Правила:** +- Никакого хардкода в тестах (`"test@example.com"` → `fake.email()`) +- Использовать `fake.unique.*` для уникальных полей +- Локаль: `Faker("ru_RU")` для русских данных + diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..5717a2b --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,91 @@ +# Changelog + +Все значимые изменения в проекте документируются в этом файле. + +Формат основан на [Keep a Changelog](https://keepachangelog.com/ru/1.0.0/). + +--- + +## [0.2.0] - 2026-01-21 + +### Добавлено + +#### Core Module (`apps.core`) +- **Model Mixins** (`mixins.py`): + - `TimestampMixin` — автоматические `created_at`, `updated_at` + - `UUIDPrimaryKeyMixin` — UUID вместо auto-increment ID + - `SoftDeleteMixin` — мягкое удаление с `is_deleted`, `deleted_at` + - `AuditMixin` — отслеживание `created_by`, `updated_by` + - `OrderableMixin` — поле `order` для сортировки + - `StatusMixin` — статусы draft/active/inactive/archived + - `SlugMixin` — URL-friendly идентификаторы + +- **Base Services** (`services.py`): + - `BaseService` — базовый CRUD сервис + - `BaseReadOnlyService` — только чтение + - `TransactionMixin` — управление транзакциями + - `BulkOperationsMixin` — массовые операции (bulk_create_chunked, bulk_update_or_create, bulk_delete, bulk_update_fields) + - `QueryOptimizerMixin` — декларативная оптимизация запросов (select_related, prefetch_related, only, defer) + - `BackgroundJobService` — управление фоновыми задачами + +- **Base ViewSets** (`viewsets.py`): + - `BaseViewSet` — базовый CRUD ViewSet с пагинацией, фильтрацией, логированием + - `ReadOnlyViewSet` — только чтение + - `OwnerViewSet` — фильтрация по владельцу + - `BulkMixin` — массовые операции через API (bulk_create, bulk_update, bulk_delete) + +- **Background Job Tracking** (`models.py`): + - Модель `BackgroundJob` для отслеживания Celery задач + - API endpoints: `GET /api/v1/jobs/`, `GET /api/v1/jobs/{task_id}/` + - Статусы: pending, started, success, failure, revoked, retry + - Прогресс выполнения и результаты + +- **Management Commands** (`management/commands/base.py`): + - `BaseAppCommand` — базовый класс для команд + - Поддержка `--dry-run`, `--silent` + - Прогресс-бар, измерение времени, логирование + +- **Прочее**: + - `exceptions.py` — кастомные исключения API + - `permissions.py` — базовые permissions (IsOwner, IsAdminOrReadOnly и др.) + - `pagination.py` — CursorPagination + - `filters.py` — BaseFilterSet + - `cache.py` — декораторы кэширования + - `tasks.py` — BaseTask для Celery + - `logging.py` — структурированное логирование + - `middleware.py` — RequestIDMiddleware + - `signals.py` — SignalDispatcher + - `response.py` — унифицированные API ответы + - `openapi.py` — декоратор для документации + +#### Тестирование +- Переход на `factory_boy` + `faker` (вместо model_bakery + uuid) +- Фабрики: `UserFactory`, `ProfileFactory` +- 297 тестов + +#### Конфигурация +- API versioning: `/api/v1/` +- Отдельный `test.py` для настроек тестов +- Обновлён `pyproject.toml` +- Правила разработки в `.qoder/rules/main.md` + +### Изменено +- Структура тестов перенесена в `/tests/apps/` +- Удалён `conftest.py` (pytest не используется) +- Обновлены URLs с namespace + +### Удалено +- `ruff.toml` (конфигурация перенесена в pyproject.toml) +- `CI_CD_SUMMARY.md` +- `custom_test_runner.py` + +--- + +## [0.1.0] - 2026-01-20 + +### Добавлено +- Начальная структура проекта +- Приложение `user` с JWT аутентификацией +- Базовые модели User и Profile +- Docker конфигурация +- Pre-commit hooks diff --git a/CI_CD_SUMMARY.md b/CI_CD_SUMMARY.md deleted file mode 100644 index 700f754..0000000 --- a/CI_CD_SUMMARY.md +++ /dev/null @@ -1,120 +0,0 @@ -# CI/CD Pipeline Summary - -## Что было сделано - -### 1. Реализован Gitea Actions Pipeline -Создан файл конфигурации: `.gitea/workflows/ci-cd.yml` - -Этапы pipeline: -- **lint** - проверка кода с помощью ruff -- **test** - запуск тестов Django с PostgreSQL и Redis -- **build** - сборка Docker образов (web и celery) -- **push** - пуш образов в Gitea Container Registry - -### 2. Настройка зависимостей -- Созданы файлы `requirements.txt` и `requirements-dev.txt` из `pyproject.toml` -- Удалены проблемные зависимости (ipdb, pdbpp) из-за конфликтов -- Обновлена конфигурация ruff для игнорирования Django-специфичных ошибок - -### 3. Исправления в коде -- Обновлен `ruff.toml` для корректной работы с Django -- Добавлен `TEST_RUNNER` в настройки Django -- Исправлен кастомный test runner - -### 4. Docker-образы -- Проверена сборка обоих образов: - - `mostovik-web` - основное приложение - - `mostovik-celery` - worker и beat сервисы - -## Как использовать - -### 1. Настройка Gitea -В настройках репозитория установите: - -**Secrets:** -``` -GITEA_USERNAME = ваш_пользователь -GITEA_TOKEN = ваш_токен_доступа -``` - -**Variables:** -``` -GITEA_REGISTRY_URL = адрес_вашего_gitea_registry -``` - -### 2. Локальная проверка -Перед коммитом запускайте: -```bash -# Линтинг -uv run ruff check . -uv run ruff format . --check - -# Тесты (если нужно) -uv run python run_tests.py - -# Pre-commit -pre-commit run --all-files -``` - -### 3. Pipeline запускается автоматически -- При пуше в ветки `main` и `develop` -- При создании Pull Request'ов - -## Особенности реализации - -### Линтинг -- Используется **ruff** как основной линтер -- Проверяются все файлы проекта -- Автоматическое форматирование отключено в CI (только проверка) - -### Тестирование -- Запускаются в изолированном окружении -- Используются сервисы PostgreSQL и Redis -- Для обхода проблем с ipdb используется специальный скрипт `run_tests.py` - -### Сборка Docker -- Используются существующие Dockerfile'ы -- Кэширование слоев для ускорения сборки -- Multi-stage сборка не используется (по требованиям проекта) - -### Пуш образов -- Только для веток `main` и `develop` -- Теги: - - `latest` для основной ветки - - `{branch-name}` для feature-веток - - `{commit-sha}` для каждого коммита - -## Безопасность -- Все токены хранятся в Secrets -- Переменные окружения не попадают в логи -- Используются минимальные права для сервисов - -## Мониторинг -Pipeline можно отслеживать в интерфейсе Gitea: -`Repository → Actions` - -Каждый job имеет подробные логи для диагностики проблем. - -## Возможные проблемы и решения - -### 1. Ошибки линтинга -```bash -# Локальное исправление -uv run ruff check . --fix -uv run ruff format . -``` - -### 2. Проблемы с тестами -- Проверьте запущены ли PostgreSQL и Redis -- Убедитесь в корректности переменных окружения -- Проверьте наличие миграций - -### 3. Ошибки сборки Docker -```bash -# Локальная проверка -docker build -f docker/Dockerfile.web -t test-web . -docker build -f docker/Dockerfile.celery -t test-celery . -``` - -## Поддержка -Документация по настройке находится в `docs/ci-cd-setup.md` diff --git a/Makefile b/Makefile index 2cbdd30..20281b2 100644 --- a/Makefile +++ b/Makefile @@ -4,16 +4,37 @@ help: @echo "Доступные команды:" + @echo "" + @echo "🔧 Установка и настройка:" @echo " make install - Установка зависимостей" + @echo "" + @echo "🐳 Docker управление:" @echo " make dev-up - Запуск разработческого окружения (Docker)" @echo " make dev-down - Остановка разработческого окружения" - @echo " make migrate - Выполнение миграций Django" - @echo " make createsuperuser - Создание суперпользователя" - @echo " make test - Запуск тестов" + @echo " make logs - Просмотр логов (Docker)" + @echo "" + @echo "🧪 Тестирование:" + @echo " make test - Запуск тестов (по умолчанию все)" + + @echo " Примеры:" + @echo " make test # Все тесты" + @echo " make test TARGET=user # Только user app" + @echo " make test TARGET=test_models # Только модели" + @echo "" + @echo "🔍 Качество кода:" @echo " make lint - Проверка кода линтерами" @echo " make format - Форматирование кода" + @echo " make type-check - Проверка типов" + @echo " make security-check - Проверка безопасности" + @echo " make pre-commit - Запуск pre-commit hooks" + @echo "" + @echo "🗄️ База данных:" + @echo " make migrate - Выполнение миграций Django" + @echo " make createsuperuser - Создание суперпользователя" + @echo "" + @echo "🛠️ Утилиты:" @echo " make shell - Запуск Django shell" - @echo " make logs - Просмотр логов (Docker)" + @echo " make setup-dev - Настройка окружения разработки" @echo " make clean - Очистка временных файлов" install: @@ -27,36 +48,102 @@ dev-up: dev-down: docker-compose down +# Универсальная команда для тестирования с поддержкой аргументов +# Использование: +# make test # Все тесты +# make test TARGET=user # Тесты user app +# make test TARGET=models # Тесты моделей +# make test TARGET=views # Тесты представлений +# make test TARGET=serializers # Тесты сериализаторов +# make test TARGET=services # Тесты сервисов test: - pytest src/ -v + @if [ "$(TARGET)" ]; then \ + echo "🧪 Запуск тестов: $(TARGET)"; \ + python run_tests_simple.py $(TARGET); \ + else \ + echo "🧪 Запуск всех тестов..."; \ + python run_tests_simple.py; \ + fi lint: - flake8 src/ + @echo "🔍 Проверка кода линтерами..." + ruff check src/ black --check src/ isort --check-only src/ + @echo "✅ Линтинг завершен" format: + @echo "🎨 Форматирование кода..." black src/ isort src/ - flake8 src/ + ruff check --fix src/ + @echo "✅ Форматирование завершено" migrate: + @echo "🗄️ Выполнение миграций..." cd src && python manage.py makemigrations cd src && python manage.py migrate createsuperuser: + @echo "👤 Создание суперпользователя..." cd src && python manage.py createsuperuser shell: + @echo "🐚 Запуск Django shell..." cd src && python manage.py shell logs: + @echo "📋 Просмотр логов..." docker-compose logs -f +# Дополнительные команды для тестирования +test-cov: + @echo "🧪 Запуск тестов с покрытием..." + python run_tests_simple.py --coverage + +test-fast: + @echo "🚀 Быстрые тесты (без медленных)..." + python run_tests_simple.py --fast + +test-parallel: + @echo "⚡ Параллельный запуск тестов..." + python run_tests_simple.py --parallel=auto + +test-failfast: + @echo "❌ Тесты с остановкой при первой ошибке..." + python run_tests_simple.py --failfast + +# Дополнительные команды для качества кода +type-check: + @echo "🔍 Проверка типов с mypy..." + mypy src/ + +security-check: + @echo "🔒 Проверка безопасности..." + bandit -r src/ -f json -o bandit-report.json || bandit -r src/ + +pre-commit: + @echo "🔧 Запуск pre-commit hooks..." + pre-commit run --all-files + +# Установка и настройка +setup-dev: + @echo "⚙️ Настройка окружения разработки..." + pre-commit install + @echo "✅ Окружение настроено" + clean: + @echo "🧹 Очистка временных файлов..." find . -type f -name "*.pyc" -delete find . -type d -name "__pycache__" -delete rm -rf *.log rm -rf htmlcov/ rm -rf .coverage - rm -rf .pytest_cache/ \ No newline at end of file + rm -rf coverage.xml + rm -rf bandit-report.json + rm -rf .pytest_cache/ + rm -rf .mypy_cache/ + rm -rf tests/__pycache__/ + rm -rf tests/apps/__pycache__/ + rm -rf tests/apps/user/__pycache__/ + @echo "✅ Очистка завершена" diff --git a/README.md b/README.md index d672c80..2d9f74c 100644 --- a/README.md +++ b/README.md @@ -17,21 +17,24 @@ ## Структура проекта ``` -src/ -├── config/ # Конфигурация Django -│ ├── settings/ # Настройки (base, dev, prod) -│ ├── celery.py # Конфигурация Celery -│ └── urls.py # URL маршруты -├── apps/ -│ ├── data_processor/ # Приложение обработки данных -│ ├── scraping/ # Приложение веб-скрапинга -│ └── api/ # API endpoints -└── manage.py # Управление Django - -docker/ # Docker конфигурации -deploy/ # Файлы развертывания -requirements.txt # Основные зависимости -requirements-dev.txt # Зависимости для разработки +mostovik-backend/ +├── src/ # Исходный код Django +│ ├── config/ # Конфигурация Django +│ │ ├── settings/ # Настройки (base, dev, prod, test) +│ │ ├── celery.py # Конфигурация Celery +│ │ └── urls.py # URL маршруты +│ ├── apps/ # Django приложения +│ │ └── user/ # Приложение пользователей +│ └── manage.py # Управление Django +├── tests/ # Тесты (в корне проекта) +│ ├── apps/user/ # Тесты для user app +│ ├── conftest.py # Конфигурация pytest +│ └── README.md # Документация по тестам +├── docker/ # Docker конфигурации +├── deploy/ # Файлы развертывания +├── pyproject.toml # Конфигурация проекта и инструментов +├── Makefile # Команды для разработки +└── docker-compose.yml # Docker Compose для разработки ``` ## Быстрый старт (локальная разработка) @@ -44,12 +47,17 @@ curl -LsSf https://astral.sh/uv/install.sh | sh source $HOME/.cargo/env # Создание виртуального окружения с uv -uv venv venv -source venv/bin/activate +uv venv .venv +source .venv/bin/activate # Установка зависимостей через uv -uv pip install -r requirements.txt -uv pip install -r requirements-dev.txt +uv pip install -e ".[dev]" + +# Или через Makefile +make install + +# Настройка окружения разработки (pre-commit hooks) +make setup-dev ``` ### 2. Настройка окружения @@ -207,20 +215,40 @@ celery -A config flower ### Запуск тестов ```bash # Запуск всех тестов -pytest +make test # Запуск с покрытием -pytest --cov=src +make test-cov -# Запуск линтеров -flake8 src/ -black src/ +# Запуск только быстрых тестов +make test-fast + +# Запуск тестов конкретного модуля +make test TARGET=user + +# Линтинг и форматирование +make lint +make format + +# Проверка типов +make type-check + +# Проверка безопасности +make security-check ``` ### Создание миграций ```bash +# Через Makefile +make migrate + +# Или напрямую +cd src python manage.py makemigrations python manage.py migrate + +# Создание суперпользователя +make createsuperuser ``` ### Работа с задачами Celery @@ -234,6 +262,39 @@ result = process_extracted_data.delay() print(result.id) # ID задачи ``` +## Конфигурация инструментов + +Все конфигурации инструментов разработки централизованы в файле `pyproject.toml`: + +- **pytest**: настройки тестирования +- **coverage**: отчеты о покрытии кода +- **ruff**: линтинг и форматирование +- **black**: форматирование кода +- **isort**: сортировка импортов +- **mypy**: проверка типов +- **bandit**: проверка безопасности + +### Полезные команды Make + +```bash +# Качество кода +make lint # Проверка линтерами +make format # Форматирование кода +make type-check # Проверка типов +make security-check # Проверка безопасности +make pre-commit # Запуск всех pre-commit hooks + +# Тестирование +make test # Все тесты +make test-cov # Тесты с покрытием +make test-fast # Только быстрые тесты + +# Разработка +make shell # Django shell +make migrate # Миграции +make clean # Очистка временных файлов +``` + ## Безопасность - Все секретные ключи хранятся в переменных окружения diff --git a/pyproject.toml b/pyproject.toml index 808a65f..fb661b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,8 @@ dependencies = [ "coreapi>=2.3.3", "django-rest-swagger>=2.2.0", "model-bakery>=1.17.0", + "faker>=40.1.2", + "factory-boy>=3.3.0", ] [project.optional-dependencies] @@ -52,43 +54,39 @@ dev = [ # WSGI server "gunicorn==21.2.0", "gevent==23.9.1", - + # Development "django-extensions==3.2.3", "werkzeug==3.0.1", "django-debug-toolbar==4.2.0", - + # Testing "pytest==7.4.4", "pytest-django==4.7.0", "pytest-cov==4.1.0", "factory-boy==3.3.0", "coverage==7.4.0", - + # Linters and formatters "flake8==6.1.0", "black==23.12.1", "isort==5.13.2", "ruff==0.1.14", - + # Documentation "sphinx==7.2.6", "sphinx-rtd-theme==2.0.0", - + # Monitoring "flower==2.0.1", - + # CLI tools "click==8.1.7", "typer==0.9.0", - - # Debugging (removed due to compatibility issues) - # "ipdb==0.13.13", - # "pdbpp==0.10.3", - + # Additional tools "watchdog==3.0.0", - + # Pre-commit hooks "pre-commit==3.6.0", ] @@ -100,32 +98,101 @@ build-backend = "setuptools.build_meta" [tool.setuptools] packages = ["src"] +# ================================================================================== +# PYTEST CONFIGURATION +# ================================================================================== +[tool.pytest.ini_options] +DJANGO_SETTINGS_MODULE = "config.settings.test" +python_paths = ["src"] +testpaths = ["tests"] +addopts = [ + "--verbose", + "--tb=short", + "--reuse-db", + "--nomigrations", + "--strict-markers", + "--strict-config", + "--color=yes", +] + +markers = [ + "slow: marks tests as slow (deselect with '-m \"not slow\"')", + "integration: marks tests as integration tests", + "unit: marks tests as unit tests", + "models: marks tests for models", + "views: marks tests for views", + "serializers: marks tests for serializers", + "services: marks tests for services", + "factories: marks tests for factories", +] + +filterwarnings = [ + "ignore::django.utils.deprecation.RemovedInDjango40Warning", + "ignore::django.utils.deprecation.RemovedInDjango41Warning", + "ignore::DeprecationWarning", + "ignore::PendingDeprecationWarning", +] + +norecursedirs = [ + ".git", + ".venv", + "__pycache__", + "*.egg-info", + ".pytest_cache", + "node_modules", + "migrations", +] + +# ================================================================================== +# COVERAGE CONFIGURATION +# ================================================================================== +[tool.coverage.run] +source = ["src"] +omit = [ + "*/migrations/*", + "*/tests/*", + "*/venv/*", + "*/virtualenv/*", + "*/site-packages/*", + "manage.py", + "*/settings/*", + "*/config/wsgi.py", + "*/config/asgi.py", + "*/__pycache__/*", +] +branch = true +relative_files = true + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "def __repr__", + "if self.debug:", + "if settings.DEBUG", + "raise AssertionError", + "raise NotImplementedError", + "if 0:", + "if __name__ == .__main__.:", + "class .*\\bProtocol\\):", + "@(abc\\.)?abstractmethod", +] +show_missing = true +skip_covered = false +precision = 2 + +[tool.coverage.html] +directory = "htmlcov" + +[tool.coverage.xml] +output = "coverage.xml" + +# ================================================================================== +# RUFF CONFIGURATION (Linting and Code Quality) +# ================================================================================== [tool.ruff] -# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default. -lint.select = [ - "E", # pycodestyle errors - "W", # pycodestyle warnings - "F", # pyflakes - "I", # isort - "C", # mccabe - "B", # flake8-bugbear - "Q", # flake8-quotes - "DJ", # flake8-django -] +line-length = 88 +target-version = "py311" -lint.extend-ignore = [ - "E501", # line too long, handled by formatter - "DJ01", # Missing docstring (too strict for Django) -] - -# Allow autofix for all enabled rules (when `--fix`) is provided. -lint.fixable = ["ALL"] -lint.unfixable = [] - -# Allow unused variables when underscore-prefixed. -lint.dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" - -# Exclude a variety of commonly ignored directories. exclude = [ ".bzr", ".direnv", @@ -152,11 +219,40 @@ exclude = [ "*/__pycache__/*", ] -# Same as Black. -line-length = 88 +[tool.ruff.lint] +# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default. +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "C", # mccabe + "B", # flake8-bugbear + "Q", # flake8-quotes + "DJ", # flake8-django + "UP", # pyupgrade + "S", # bandit security + "T20", # flake8-print + "SIM", # flake8-simplify +] -# Assume Python 3.11. -target-version = "py311" +extend-ignore = [ + "E501", # line too long, handled by formatter + "DJ01", # Missing docstring (too strict for Django) + "DJ001", # null=True on string fields (architectural decision) + "F403", # star imports (common in Django settings) + "F405", # name may be undefined from star imports (Django settings) + "E402", # module level import not at top (Django settings) + "S101", # Use of assert (common in tests) + "T201", # print statements (useful for debugging) +] + +# Allow autofix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" [tool.ruff.lint.mccabe] # Unlike Flake8, default to a complexity level of 10. @@ -165,10 +261,19 @@ max-complexity = 10 [tool.ruff.lint.per-file-ignores] # Ignore `E402` (import violations) in all `__init__.py` files "__init__.py" = ["E402"] +# Ignore star imports and related errors in settings +"src/config/settings/*" = ["F403", "F405", "E402"] +# Ignore star imports in test runner files +"check_tests.py" = ["F403"] +"run_tests.py" = ["F403"] +"run_tests_simple.py" = ["F403"] # Ignore complexity issues in tests -"tests/*" = ["C901"] -"**/test_*" = ["C901"] -"**/tests.py" = ["C901"] +"tests/*" = ["C901", "S101"] +"**/test_*" = ["C901", "S101"] +"**/tests.py" = ["C901", "S101"] +# Ignore security warnings in test factories +"tests/**/factories.py" = ["S311"] +"**/factories.py" = ["S311"] [tool.ruff.format] # Like Black, use double quotes for strings. @@ -183,6 +288,132 @@ skip-magic-trailing-comma = false # Like Black, automatically detect the appropriate line ending. line-ending = "auto" +# ================================================================================== +# BLACK CONFIGURATION (Code Formatting) +# ================================================================================== +[tool.black] +line-length = 88 +target-version = ['py311'] +include = '\.pyi?$' +extend-exclude = ''' +/( + # directories + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | _build + | buck-out + | build + | dist + | migrations +)/ +''' + +# ================================================================================== +# ISORT CONFIGURATION (Import Sorting) +# ================================================================================== +[tool.isort] +profile = "black" +multi_line_output = 3 +line_length = 88 +include_trailing_comma = true +force_grid_wrap = 0 +use_parentheses = true +ensure_newline_before_comments = true +src_paths = ["src"] +skip = ["migrations"] +known_django = ["django"] +known_third_party = [ + "celery", + "redis", + "requests", + "pandas", + "numpy", + "scrapy", + "selenium", + "beautifulsoup4", + "rest_framework", + "django_filters", + "corsheaders", + "drf_yasg", + "model_bakery", + "factory", + "pytest", +] +sections = [ + "FUTURE", + "STDLIB", + "DJANGO", + "THIRDPARTY", + "FIRSTPARTY", + "LOCALFOLDER", +] + +# ================================================================================== +# MYPY CONFIGURATION (Type Checking) +# ================================================================================== +[tool.mypy] +python_version = "3.11" +check_untyped_defs = true +ignore_missing_imports = true +warn_unused_ignores = true +warn_redundant_casts = true +warn_unused_configs = true +warn_return_any = true +warn_unreachable = true +strict_optional = true +no_implicit_reexport = true +show_error_codes = true +plugins = ["mypy_django_plugin.main"] + +[[tool.mypy.overrides]] +module = "*.migrations.*" +ignore_errors = true + +[[tool.mypy.overrides]] +module = "tests.*" +disallow_untyped_defs = false + +[tool.django-stubs] +django_settings_module = "config.settings.development" + +# ================================================================================== +# BANDIT CONFIGURATION (Security) +# ================================================================================== +[tool.bandit] +exclude_dirs = ["tests", "migrations"] +tests = ["B201", "B301"] +skips = ["B101", "B601"] + +# ================================================================================== +# PYLINT CONFIGURATION +# ================================================================================== +[tool.pylint.messages_control] +disable = [ + "C0114", # missing-module-docstring + "C0115", # missing-class-docstring + "C0116", # missing-function-docstring + "R0903", # too-few-public-methods (Django models) + "R0901", # too-many-ancestors (Django views) + "W0613", # unused-argument (Django views) + "C0103", # invalid-name (Django field names) +] + +[tool.pylint.format] +max-line-length = 88 + +[tool.pylint.design] +max-args = 10 +max-locals = 25 +max-returns = 10 +max-branches = 20 + +# ================================================================================== +# DEPENDENCY GROUPS (Alternative to optional-dependencies) +# ================================================================================== [dependency-groups] dev = [ "gunicorn==21.2.0", @@ -204,8 +435,38 @@ dev = [ "flower==2.0.1", "click==8.1.7", "typer==0.9.0", - "ipdb==0.13.13", - "pdbpp==0.10.3", "watchdog==3.0.0", "pre-commit==3.6.0", + "mypy==1.8.0", + "django-stubs==4.2.7", + "types-requests==2.31.0.20240125", + "bandit==1.7.5", +] + +test = [ + "pytest==7.4.4", + "pytest-django==4.7.0", + "pytest-cov==4.1.0", + "pytest-xdist==3.5.0", + "pytest-mock==3.12.0", + "factory-boy==3.3.0", + "model-bakery>=1.17.0", + "coverage==7.4.0", +] + +docs = [ + "sphinx==7.2.6", + "sphinx-rtd-theme==2.0.0", + "sphinx-autodoc-typehints==1.25.2", + "myst-parser==2.0.0", +] + +lint = [ + "ruff==0.1.14", + "black==23.12.1", + "isort==5.13.2", + "mypy==1.8.0", + "django-stubs==4.2.7", + "bandit==1.7.5", + "pre-commit==3.6.0", ] diff --git a/ruff.toml b/ruff.toml deleted file mode 100644 index 5eb1053..0000000 --- a/ruff.toml +++ /dev/null @@ -1,90 +0,0 @@ -# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default. -select = [ - "E", # pycodestyle errors - "W", # pycodestyle warnings - "F", # pyflakes - "I", # isort - "C", # mccabe - "B", # flake8-bugbear - "Q", # flake8-quotes - "DJ", # flake8-django -] - -extend-ignore = [ - "E501", # line too long, handled by formatter - "DJ01", # Missing docstring (too strict for Django) - "DJ001", # null=True on string fields (architectural decision) - "F403", # star imports (common in Django settings) - "F405", # name may be undefined from star imports (Django settings) - "E402", # module level import not at top (Django settings) -] - -# Allow autofix for all enabled rules (when `--fix`) is provided. -fixable = ["ALL"] -unfixable = [] - -# Exclude a variety of commonly ignored directories. -exclude = [ - ".bzr", - ".direnv", - ".eggs", - ".git", - ".git-rewrite", - ".hg", - ".mypy_cache", - ".nox", - ".pants.d", - ".pytype", - ".ruff_cache", - ".svn", - ".tox", - ".venv", - "__pypackages__", - "_build", - "buck-out", - "build", - "dist", - "node_modules", - "venv", - "*/migrations/*", - "*/__pycache__/*", -] - -# Same as Black. -line-length = 88 - -# Allow unused variables when underscore-prefixed. -dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" - -# Assume Python 3.11. -target-version = "py311" - -[mccabe] -# Unlike Flake8, default to a complexity level of 10. -max-complexity = 10 - -[per-file-ignores] -# Ignore `E402` (import violations) in all `__init__.py` files -"__init__.py" = ["E402"] -# Ignore star imports and related errors in settings -"src/config/settings/*" = ["F403", "F405", "E402"] -# Ignore star imports in test runner files -"check_tests.py" = ["F403"] -"run_tests.py" = ["F403"] -# Ignore complexity issues in tests -"tests/*" = ["C901"] -"**/test_*" = ["C901"] -"**/tests.py" = ["C901"] - -[format] -# Like Black, use double quotes for strings. -quote-style = "double" - -# Like Black, indent with spaces, rather than tabs. -indent-style = "space" - -# Like Black, respect magic trailing commas. -skip-magic-trailing-comma = false - -# Like Black, automatically detect the appropriate line ending. -line-ending = "auto" \ No newline at end of file diff --git a/run_tests.py b/run_tests.py old mode 100755 new mode 100644 index 38c02f8..db61014 --- a/run_tests.py +++ b/run_tests.py @@ -1,31 +1,268 @@ #!/usr/bin/env python -"""Скрипт для запуска тестов с обходом проблемы ipdb""" +""" +Простой скрипт для запуска тестов, обходящий проблемы с pytest и pdbpp +Использует стандартный Django test runner с улучшенными возможностями +Поддерживает coverage и дополнительные опции +""" import os import sys +from io import StringIO +import argparse import django -# Монкипатчим ipdb до импорта Django -sys.modules["ipdb"] = type("MockModule", (), {"__getattr__": lambda s, n: None})() -# Настройка Django -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src")) -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.development") -django.setup() +def setup_django(): + """Настройка Django окружения""" + # Монкипатчим проблематичные модули + sys.modules["ipdb"] = type("MockModule", (), {"__getattr__": lambda s, n: None})() + + # Добавляем src в PYTHONPATH + src_path = os.path.join(os.path.dirname(__file__), "src") + if src_path not in sys.path: + sys.path.insert(0, src_path) + + # Устанавливаем настройки Django (принудительно для тестов) + os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.test" + + # Инициализируем Django + django.setup() + + +def run_tests_with_args(test_args, options): + """Запуск тестов с заданными аргументами""" + from django.conf import settings + from django.test.utils import get_runner + + # Получаем test runner + TestRunner = get_runner(settings) + + # Настройки для test runner + runner_kwargs = { + "verbosity": options.verbose, + "interactive": False, + "keepdb": options.keepdb, + "failfast": options.failfast, + } + + # Добавляем parallel если указано + if options.parallel: + runner_kwargs["parallel"] = options.parallel + + test_runner = TestRunner(**runner_kwargs) + + # Запускаем тесты + failures = test_runner.run_tests(test_args) + return failures + + +def parse_arguments(): + """Парсинг аргументов командной строки""" + parser = argparse.ArgumentParser(description="Запуск Django тестов с дополнительными возможностями") + + parser.add_argument( + "targets", + nargs="*", + help="Цели тестирования (по умолчанию: все тесты)", + default=["tests"] + ) + + parser.add_argument( + "--coverage", "--cov", + action="store_true", + help="Запуск тестов с измерением покрытия кода" + ) + + parser.add_argument( + "--fast", + action="store_true", + help="Запуск только быстрых тестов (исключает медленные)" + ) + + parser.add_argument( + "--failfast", + action="store_true", + help="Остановка при первой ошибке" + ) + + parser.add_argument( + "--verbose", "-v", + action="count", + default=2, + help="Уровень детализации вывода" + ) + + parser.add_argument( + "--keepdb", + action="store_true", + help="Сохранить тестовую базу данных" + ) + + parser.add_argument( + "--parallel", + type=int, + metavar="N", + help="Запуск тестов в N параллельных процессах" + ) + + args = parser.parse_args() + + # Преобразуем пути для удобства использования + test_targets = [] + + for target in args.targets: + # Преобразование путей файлов в модули Django + if target.endswith(".py"): + # Убираем расширение .py + target = target[:-3] + + # Заменяем слеши на точки для модульных путей + if "/" in target: + target = target.replace("/", ".") + + # Добавляем префикс tests если его нет + if not target.startswith("tests"): + if target == "user": + # Если просто "user", запускаем все тесты user app + target = "tests.apps.user" + elif target in ["models", "views", "serializers", "services"]: + # Если это простые ключевые слова, добавляем test_ префикс + target = f"tests.apps.user.test_{target}" + elif ( + "test_" in target + or "models" in target + or "views" in target + or "serializers" in target + or "services" in target + ): + # Если это конкретный файл тестов с префиксом или содержит ключевые слова + if not target.startswith("test_"): + target = f"tests.apps.user.test_{target}" + else: + target = f"tests.apps.user.{target}" + else: + # Общий случай + target = f"tests.{target}" + + test_targets.append(target) + + args.targets = test_targets if test_targets else ["tests"] + return args + + +def print_test_info(test_targets, options): + """Вывод информации о запуске тестов""" + print("🧪 Запуск тестов (Django test runner)...") + + if test_targets == ["tests"]: + print("📁 Цель: Все тесты в проекте") + else: + print(f"📁 Цели: {', '.join(test_targets)}") + + print(f"⚙️ Настройки Django: {os.environ.get('DJANGO_SETTINGS_MODULE')}") + print(f"📦 Путь к исходникам: {os.path.join(os.path.dirname(__file__), 'src')}") + + # Дополнительные опции + options_info = [] + if options.coverage: + options_info.append("📊 Измерение покрытия") + if options.fast: + options_info.append("🚀 Только быстрые тесты") + if options.failfast: + options_info.append("❌ Остановка при первой ошибке") + if options.keepdb: + options_info.append("💾 Сохранение тестовой БД") + if options.parallel: + options_info.append(f"⚡ Параллельность: {options.parallel}") + + if options_info: + print("🔧 Опции:", " | ".join(options_info)) + + print("-" * 60) + + +def setup_coverage(): + """Настройка coverage""" + try: + import coverage + cov = coverage.Coverage(config_file="pyproject.toml") + cov.start() + return cov + except ImportError: + print("⚠️ Модуль coverage не установлен. Измерение покрытия недоступно.") + return None + + +def finalize_coverage(cov): + """Завершение измерения покрытия""" + if cov: + cov.stop() + cov.save() + + print("\n📊 Отчет о покрытии кода:") + print("-" * 40) + cov.report() + + # Создание HTML отчета + try: + cov.html_report() + print("\n📄 HTML отчет создан в директории: htmlcov/") + except Exception as e: + print(f"⚠️ Не удалось создать HTML отчет: {e}") + + +def main(): + """Основная функция""" + cov = None + try: + # Парсинг аргументов + options = parse_arguments() + + # Настройка coverage если нужно + if options.coverage: + cov = setup_coverage() + + # Настройка Django + setup_django() + + # Настройка фильтрации тестов + if options.fast: + os.environ["PYTEST_CURRENT_TEST_FILTER"] = "not slow" + + # Вывод информации + print_test_info(options.targets, options) + + # Запуск тестов + failures = run_tests_with_args(options.targets, options) + + # Завершение coverage + if cov: + finalize_coverage(cov) + + # Результат + if failures: + print(f"\n❌ Тесты завершились с ошибками: {failures} неудачных тестов") + sys.exit(1) + else: + print(f"\n✅ Все тесты прошли успешно!") + if cov: + print("📊 Отчет о покрытии сохранен") + sys.exit(0) + + except KeyboardInterrupt: + print("\n❌ Тесты прерваны пользователем") + if cov: + cov.stop() + sys.exit(1) + except Exception as e: + print(f"\n❌ Ошибка при запуске тестов: {e}") + if cov: + cov.stop() + import traceback + traceback.print_exc() + sys.exit(1) -# Теперь можем безопасно импортировать и запускать тесты -from django.core.management import execute_from_command_line if __name__ == "__main__": - # Добавляем аргументы командной строки - args = sys.argv[1:] # Убираем имя скрипта - if not args: - # По умолчанию запускаем все тесты user app - args = ["test", "apps.user"] - - # Подготовка аргументов для Django - django_args = ["manage.py"] + args - sys.argv = django_args - - execute_from_command_line(sys.argv) + main() diff --git a/src/apps/core/__init__.py b/src/apps/core/__init__.py new file mode 100644 index 0000000..d8ed62a --- /dev/null +++ b/src/apps/core/__init__.py @@ -0,0 +1 @@ +default_app_config = "apps.core.apps.CoreConfig" diff --git a/src/apps/core/apps.py b/src/apps/core/apps.py new file mode 100644 index 0000000..9c3e6ff --- /dev/null +++ b/src/apps/core/apps.py @@ -0,0 +1,7 @@ +from django.apps import AppConfig + + +class CoreConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "apps.core" + verbose_name = "Core" diff --git a/src/apps/core/cache.py b/src/apps/core/cache.py new file mode 100644 index 0000000..dc2cc31 --- /dev/null +++ b/src/apps/core/cache.py @@ -0,0 +1,254 @@ +""" +Декораторы кэширования для сервисного слоя. + +Предоставляет удобные декораторы для кэширования результатов методов. +""" + +import functools +import hashlib +import json +import logging +from collections.abc import Callable +from typing import Any, TypeVar + +from django.core.cache import cache + +logger = logging.getLogger(__name__) + +F = TypeVar("F", bound=Callable[..., Any]) + + +def cache_result( + timeout: int = 300, + key_prefix: str = "", + key_builder: Callable[..., str] | None = None, +) -> Callable[[F], F]: + """ + Декоратор для кэширования результата функции/метода. + + Args: + timeout: Время жизни кэша в секундах (по умолчанию 5 минут) + key_prefix: Префикс для ключа кэша + key_builder: Функция для построения ключа кэша + + Пример использования: + class UserService: + @classmethod + @cache_result(timeout=600, key_prefix="user") + def get_user_stats(cls, user_id: int) -> dict: + # Тяжёлый запрос + return heavy_calculation(user_id) + + Ключ кэша формируется как: + {key_prefix}:{function_name}:{hash_of_args} + """ + + def decorator(func: F) -> F: + @functools.wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> Any: + # Формируем ключ кэша + if key_builder: + cache_key = key_builder(*args, **kwargs) + else: + cache_key = _build_cache_key(func, key_prefix, args, kwargs) + + # Пробуем получить из кэша + cached_value = cache.get(cache_key) + if cached_value is not None: + logger.debug(f"Cache hit: {cache_key}") + return cached_value + + # Вычисляем и кэшируем + logger.debug(f"Cache miss: {cache_key}") + result = func(*args, **kwargs) + cache.set(cache_key, result, timeout) + return result + + # Добавляем метод для инвалидации кэша + wrapper.invalidate = lambda *a, **kw: _invalidate_cache( # type: ignore + func, key_prefix, key_builder, a, kw + ) + return wrapper # type: ignore + + return decorator + + +def cache_method( + timeout: int = 300, + key_prefix: str = "", +) -> Callable[[F], F]: + """ + Декоратор для кэширования результата метода класса. + + Автоматически исключает self/cls из ключа кэша. + + Пример использования: + class ArticleService: + @classmethod + @cache_method(timeout=300, key_prefix="articles") + def get_popular_articles(cls, limit: int = 10) -> list: + return Article.objects.order_by('-views')[:limit] + """ + + def decorator(func: F) -> F: + @functools.wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> Any: + # Пропускаем self/cls (первый аргумент) + cache_args = args[1:] if args else args + + cache_key = _build_cache_key(func, key_prefix, cache_args, kwargs) + + cached_value = cache.get(cache_key) + if cached_value is not None: + logger.debug(f"Cache hit: {cache_key}") + return cached_value + + logger.debug(f"Cache miss: {cache_key}") + result = func(*args, **kwargs) + cache.set(cache_key, result, timeout) + return result + + return wrapper # type: ignore + + return decorator + + +def invalidate_cache(key_pattern: str) -> None: + """ + Инвалидирует кэш по паттерну ключа. + + Args: + key_pattern: Паттерн ключа для удаления + + Пример: + invalidate_cache("user:get_user_stats:*") + """ + # Для простых backend'ов (locmem, db) + # Для Redis можно использовать SCAN + DELETE + try: + cache.delete_pattern(key_pattern) # type: ignore + except AttributeError: + # Fallback для backend'ов без delete_pattern + cache.delete(key_pattern.replace("*", "")) + logger.warning( + f"Cache backend не поддерживает delete_pattern, " + f"удалён только точный ключ: {key_pattern}" + ) + + +def invalidate_prefix(prefix: str) -> None: + """ + Инвалидирует весь кэш с указанным префиксом. + + Args: + prefix: Префикс ключей для удаления + + Пример: + invalidate_prefix("user") # Удалит все ключи user:* + """ + invalidate_cache(f"{prefix}:*") + + +def _build_cache_key( + func: Callable[..., Any], + prefix: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], +) -> str: + """Формирует ключ кэша из функции и аргументов.""" + func_name = f"{func.__module__}.{func.__qualname__}" + + # Сериализуем аргументы + try: + args_str = json.dumps(args, sort_keys=True, default=str) + kwargs_str = json.dumps(kwargs, sort_keys=True, default=str) + except (TypeError, ValueError): + args_str = str(args) + kwargs_str = str(kwargs) + + # Хэшируем для компактности (usedforsecurity=False - не криптографическое использование) + args_hash = hashlib.md5( # noqa: S324 + f"{args_str}:{kwargs_str}".encode(), + usedforsecurity=False, + ).hexdigest()[:12] + + if prefix: + return f"{prefix}:{func_name}:{args_hash}" + return f"{func_name}:{args_hash}" + + +def _invalidate_cache( + func: Callable[..., Any], + prefix: str, + key_builder: Callable[..., str] | None, + args: tuple[Any, ...], + kwargs: dict[str, Any], +) -> None: + """Инвалидирует кэш для конкретного вызова.""" + if key_builder: + cache_key = key_builder(*args, **kwargs) + else: + cache_key = _build_cache_key(func, prefix, args, kwargs) + cache.delete(cache_key) + logger.debug(f"Cache invalidated: {cache_key}") + + +class CacheManager: + """ + Менеджер кэша для групповых операций. + + Пример использования: + cache_manager = CacheManager("articles") + + # Кэшировать + cache_manager.set("popular", data, timeout=300) + + # Получить + data = cache_manager.get("popular") + + # Очистить весь префикс + cache_manager.clear() + """ + + def __init__(self, prefix: str): + self.prefix = prefix + + def _make_key(self, key: str) -> str: + """Формирует полный ключ с префиксом.""" + return f"{self.prefix}:{key}" + + def get(self, key: str, default: Any = None) -> Any: + """Получает значение из кэша.""" + return cache.get(self._make_key(key), default) + + def set(self, key: str, value: Any, timeout: int = 300) -> None: + """Сохраняет значение в кэш.""" + cache.set(self._make_key(key), value, timeout) + + def delete(self, key: str) -> None: + """Удаляет значение из кэша.""" + cache.delete(self._make_key(key)) + + def clear(self) -> None: + """Очищает весь кэш с данным префиксом.""" + invalidate_prefix(self.prefix) + + def get_or_set( + self, + key: str, + default_func: Callable[[], Any], + timeout: int = 300, + ) -> Any: + """ + Получает значение из кэша или вычисляет и сохраняет. + + Args: + key: Ключ кэша + default_func: Функция для вычисления значения + timeout: Время жизни кэша + """ + value = self.get(key) + if value is None: + value = default_func() + self.set(key, value, timeout) + return value diff --git a/src/apps/core/exception_handler.py b/src/apps/core/exception_handler.py new file mode 100644 index 0000000..00cda96 --- /dev/null +++ b/src/apps/core/exception_handler.py @@ -0,0 +1,152 @@ +""" +Custom exception handler for DRF. + +Converts all exceptions to a unified API response format. +""" + +import logging +from typing import Any + +from apps.core.exceptions import BaseAPIException +from apps.core.middleware import get_request_id +from apps.core.response import api_error_response +from django.core.exceptions import PermissionDenied +from django.http import Http404 +from rest_framework import status +from rest_framework.exceptions import APIException +from rest_framework.response import Response +from rest_framework.views import exception_handler as drf_exception_handler + +logger = logging.getLogger(__name__) + + +def custom_exception_handler( + exc: Exception, context: dict[str, Any] +) -> Response | None: + """ + Custom exception handler that wraps all errors in a unified format. + + Response format: + { + "success": false, + "data": null, + "errors": [ + { + "code": "error_code", + "message": "Human readable message", + "details": {...} // optional + } + ], + "meta": { + "request_id": "uuid" + } + } + """ + request_id = get_request_id() + + # Handle our custom exceptions + if isinstance(exc, BaseAPIException): + logger.warning( + f"[{request_id}] API Exception: {exc.code} - {exc.message}", + extra={ + "request_id": request_id, + "error_code": exc.code, + "error_details": exc.details, + }, + ) + return api_error_response( + errors=[exc.to_dict()], + status_code=exc.status_code, + request_id=request_id, + ) + + # Handle Django's Http404 + if isinstance(exc, Http404): + return api_error_response( + errors=[{"code": "not_found", "message": str(exc) or "Resource not found"}], + status_code=status.HTTP_404_NOT_FOUND, + request_id=request_id, + ) + + # Handle Django's PermissionDenied + if isinstance(exc, PermissionDenied): + return api_error_response( + errors=[ + { + "code": "permission_denied", + "message": str(exc) or "Permission denied", + } + ], + status_code=status.HTTP_403_FORBIDDEN, + request_id=request_id, + ) + + # Handle DRF exceptions + if isinstance(exc, APIException): + # Get the default DRF response first + response = drf_exception_handler(exc, context) + if response is not None: + errors = _parse_drf_errors(exc, response.data) + return api_error_response( + errors=errors, + status_code=response.status_code, + request_id=request_id, + ) + + # Unhandled exceptions - log and return 500 + logger.exception( + f"[{request_id}] Unhandled exception", + extra={"request_id": request_id, "exception_type": type(exc).__name__}, + ) + + return api_error_response( + errors=[ + { + "code": "internal_error", + "message": "An unexpected error occurred", + } + ], + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request_id=request_id, + ) + + +def _parse_drf_errors(exc: APIException, data: Any) -> list[dict[str, Any]]: + """Parse DRF exception data into our error format.""" + errors = [] + + # Get error code from exception + code = getattr(exc, "default_code", "error") + + if isinstance(data, dict): + # Validation errors with field details + if "detail" in data: + errors.append( + { + "code": code, + "message": str(data["detail"]), + } + ) + else: + # Field-level validation errors + field_errors = {} + for field, messages in data.items(): + if isinstance(messages, list): + field_errors[field] = [str(m) for m in messages] + else: + field_errors[field] = [str(messages)] + + errors.append( + { + "code": "validation_error", + "message": "Validation failed", + "details": {"fields": field_errors}, + } + ) + elif isinstance(data, list): + for item in data: + errors.append({"code": code, "message": str(item)}) + else: + errors.append({"code": code, "message": str(data)}) + + return errors diff --git a/src/apps/core/exceptions.py b/src/apps/core/exceptions.py new file mode 100644 index 0000000..7999f56 --- /dev/null +++ b/src/apps/core/exceptions.py @@ -0,0 +1,169 @@ +""" +Core exceptions for the application. + +Provides a hierarchy of business logic exceptions that are automatically +converted to appropriate API responses by the exception handler. +""" + +from typing import Any + + +class BaseAPIException(Exception): + """ + Base exception for all API-related errors. + + Attributes: + message: Human-readable error message + code: Machine-readable error code (e.g., 'validation_error') + status_code: HTTP status code + details: Additional error details (optional) + """ + + message: str = "An error occurred" + code: str = "error" + status_code: int = 400 + details: dict[str, Any] | None = None + + def __init__( + self, + message: str | None = None, + code: str | None = None, + details: dict[str, Any] | None = None, + ): + self.message = message or self.message + self.code = code or self.code + self.details = details + super().__init__(self.message) + + def to_dict(self) -> dict[str, Any]: + """Convert exception to dictionary for API response.""" + result = { + "code": self.code, + "message": self.message, + } + if self.details: + result["details"] = self.details + return result + + +# ============================================================================ +# Client Errors (4xx) +# ============================================================================ + + +class ValidationError(BaseAPIException): + """Raised when input data fails validation.""" + + message = "Validation error" + code = "validation_error" + status_code = 400 + + +class BadRequestError(BaseAPIException): + """Raised when request is malformed or invalid.""" + + message = "Bad request" + code = "bad_request" + status_code = 400 + + +class AuthenticationError(BaseAPIException): + """Raised when authentication fails.""" + + message = "Authentication failed" + code = "authentication_error" + status_code = 401 + + +class PermissionDeniedError(BaseAPIException): + """Raised when user lacks required permissions.""" + + message = "Permission denied" + code = "permission_denied" + status_code = 403 + + +class NotFoundError(BaseAPIException): + """Raised when requested resource is not found.""" + + message = "Resource not found" + code = "not_found" + status_code = 404 + + +class ConflictError(BaseAPIException): + """Raised when action conflicts with current state.""" + + message = "Conflict with current state" + code = "conflict" + status_code = 409 + + +class RateLimitError(BaseAPIException): + """Raised when rate limit is exceeded.""" + + message = "Rate limit exceeded" + code = "rate_limit_exceeded" + status_code = 429 + + +# ============================================================================ +# Server Errors (5xx) +# ============================================================================ + + +class InternalError(BaseAPIException): + """Raised for unexpected internal errors.""" + + message = "Internal server error" + code = "internal_error" + status_code = 500 + + +class ServiceUnavailableError(BaseAPIException): + """Raised when a dependent service is unavailable.""" + + message = "Service temporarily unavailable" + code = "service_unavailable" + status_code = 503 + + +# ============================================================================ +# Business Logic Errors +# ============================================================================ + + +class BusinessLogicError(BaseAPIException): + """ + Base class for business logic errors. + + Use this for domain-specific errors that should return 400/422. + """ + + message = "Business logic error" + code = "business_error" + status_code = 400 + + +class InvalidStateError(BusinessLogicError): + """Raised when entity is in invalid state for requested operation.""" + + message = "Invalid state for this operation" + code = "invalid_state" + status_code = 400 + + +class DuplicateError(BusinessLogicError): + """Raised when attempting to create a duplicate resource.""" + + message = "Resource already exists" + code = "duplicate" + status_code = 409 + + +class QuotaExceededError(BusinessLogicError): + """Raised when a resource quota is exceeded.""" + + message = "Quota exceeded" + code = "quota_exceeded" + status_code = 400 diff --git a/src/apps/core/filters.py b/src/apps/core/filters.py new file mode 100644 index 0000000..aea1434 --- /dev/null +++ b/src/apps/core/filters.py @@ -0,0 +1,121 @@ +""" +Настройка фильтрации для API. + +Предоставляет базовые классы и утилиты для фильтрации данных. +""" + +from typing import Any + +from django.db.models import QuerySet +from django_filters import rest_framework as filters +from rest_framework.filters import OrderingFilter, SearchFilter + + +class BaseFilterSet(filters.FilterSet): + """ + Базовый класс для фильтров с общими полями. + + Автоматически добавляет фильтрацию по датам создания/обновления, + если модель имеет соответствующие поля. + + Пример использования: + class ArticleFilter(BaseFilterSet): + title = filters.CharFilter(lookup_expr='icontains') + + class Meta: + model = Article + fields = ['title', 'status', 'author'] + """ + + created_at_after = filters.DateTimeFilter( + field_name="created_at", + lookup_expr="gte", + label="Создано после", + ) + created_at_before = filters.DateTimeFilter( + field_name="created_at", + lookup_expr="lte", + label="Создано до", + ) + updated_at_after = filters.DateTimeFilter( + field_name="updated_at", + lookup_expr="gte", + label="Обновлено после", + ) + updated_at_before = filters.DateTimeFilter( + field_name="updated_at", + lookup_expr="lte", + label="Обновлено до", + ) + + +class StandardSearchFilter(SearchFilter): + """ + Расширенный фильтр поиска с настраиваемыми параметрами. + + Пример использования: + class MyViewSet(viewsets.ModelViewSet): + filter_backends = [StandardSearchFilter] + search_fields = ['title', 'description', 'author__username'] + """ + + search_param = "search" + search_title = "Поиск" + search_description = "Поиск по текстовым полям" + + +class StandardOrderingFilter(OrderingFilter): + """ + Расширенный фильтр сортировки. + + Пример использования: + class MyViewSet(viewsets.ModelViewSet): + filter_backends = [StandardOrderingFilter] + ordering_fields = ['created_at', 'title', 'price'] + ordering = ['-created_at'] # Сортировка по умолчанию + """ + + ordering_param = "ordering" + ordering_title = "Сортировка" + ordering_description = "Поле для сортировки (префикс '-' для убывания)" + + +def get_filter_backends() -> list[type]: + """ + Возвращает стандартный набор filter backends. + + Используется для быстрой настройки ViewSet: + class MyViewSet(viewsets.ModelViewSet): + filter_backends = get_filter_backends() + """ + return [ + filters.DjangoFilterBackend, + StandardSearchFilter, + StandardOrderingFilter, + ] + + +class FilterMixin: + """ + Миксин для добавления стандартной фильтрации к ViewSet. + + Автоматически настраивает filter_backends и базовые параметры. + + Пример использования: + class MyViewSet(FilterMixin, viewsets.ModelViewSet): + filterset_class = MyFilterSet + search_fields = ['title', 'description'] + ordering_fields = ['created_at', 'title'] + """ + + filter_backends = [ + filters.DjangoFilterBackend, + StandardSearchFilter, + StandardOrderingFilter, + ] + ordering = ["-created_at"] # Сортировка по умолчанию + + def get_queryset(self) -> QuerySet[Any]: + """Возвращает queryset с применёнными фильтрами.""" + queryset = super().get_queryset() # type: ignore + return queryset diff --git a/src/apps/core/logging.py b/src/apps/core/logging.py new file mode 100644 index 0000000..c8c8341 --- /dev/null +++ b/src/apps/core/logging.py @@ -0,0 +1,287 @@ +""" +Настройка структурированного логирования. + +Предоставляет JSON-форматтер и утилиты для production логов. +""" + +import json +import logging +import traceback +from datetime import UTC, datetime +from typing import Any + +from apps.core.middleware import get_request_id + + +class JSONFormatter(logging.Formatter): + """ + Форматтер логов в JSON формате. + + Формирует структурированные логи для удобного парсинга + в системах мониторинга (ELK, Grafana Loki, etc.). + + Пример вывода: + { + "timestamp": "2024-01-15T10:30:45.123456Z", + "level": "INFO", + "logger": "apps.user.services", + "message": "User created", + "request_id": "abc-123", + "user_id": 42, + "extra": {"email": "user@example.com"} + } + """ + + def format(self, record: logging.LogRecord) -> str: + """Форматирует запись лога в JSON.""" + log_data: dict[str, Any] = { + "timestamp": datetime.now(UTC).isoformat(), + "level": record.levelname, + "logger": record.name, + "message": record.getMessage(), + } + + # Добавляем request_id если доступен + request_id = get_request_id() + if request_id: + log_data["request_id"] = request_id + + # Добавляем информацию о месте вызова + log_data["location"] = { + "file": record.filename, + "line": record.lineno, + "function": record.funcName, + } + + # Добавляем extra данные + extra_fields = {} + for key, value in record.__dict__.items(): + if key not in { + "name", + "msg", + "args", + "created", + "filename", + "funcName", + "levelname", + "levelno", + "lineno", + "module", + "msecs", + "pathname", + "process", + "processName", + "relativeCreated", + "stack_info", + "exc_info", + "exc_text", + "thread", + "threadName", + "message", + "taskName", + }: + extra_fields[key] = value + + if extra_fields: + log_data["extra"] = extra_fields + + # Добавляем информацию об исключении + if record.exc_info: + log_data["exception"] = { + "type": record.exc_info[0].__name__ if record.exc_info[0] else None, + "message": str(record.exc_info[1]) if record.exc_info[1] else None, + "traceback": traceback.format_exception(*record.exc_info), + } + + return json.dumps(log_data, ensure_ascii=False, default=str) + + +class ContextLogger: + """ + Логгер с автоматическим добавлением контекста. + + Пример использования: + logger = ContextLogger(__name__) + logger.set_context(user_id=42, action="login") + logger.info("User logged in") # Автоматически добавит user_id и action + """ + + def __init__(self, name: str): + self._logger = logging.getLogger(name) + self._context: dict[str, Any] = {} + + def set_context(self, **kwargs: Any) -> None: + """Устанавливает контекст для всех последующих логов.""" + self._context.update(kwargs) + + def clear_context(self) -> None: + """Очищает контекст.""" + self._context.clear() + + def _log( + self, + level: int, + message: str, + *args: Any, + exc_info: bool = False, + **kwargs: Any, + ) -> None: + """Логирует сообщение с контекстом.""" + extra = {**self._context, **kwargs.pop("extra", {})} + self._logger.log( + level, message, *args, extra=extra, exc_info=exc_info, **kwargs + ) + + def debug(self, message: str, *args: Any, **kwargs: Any) -> None: + """Логирует DEBUG сообщение.""" + self._log(logging.DEBUG, message, *args, **kwargs) + + def info(self, message: str, *args: Any, **kwargs: Any) -> None: + """Логирует INFO сообщение.""" + self._log(logging.INFO, message, *args, **kwargs) + + def warning(self, message: str, *args: Any, **kwargs: Any) -> None: + """Логирует WARNING сообщение.""" + self._log(logging.WARNING, message, *args, **kwargs) + + def error( + self, message: str, *args: Any, exc_info: bool = True, **kwargs: Any + ) -> None: + """Логирует ERROR сообщение.""" + self._log(logging.ERROR, message, *args, exc_info=exc_info, **kwargs) + + def exception(self, message: str, *args: Any, **kwargs: Any) -> None: + """Логирует исключение.""" + self._log(logging.ERROR, message, *args, exc_info=True, **kwargs) + + +def get_json_logging_config( + log_level: str = "INFO", + log_file: str | None = None, +) -> dict[str, Any]: + """ + Возвращает конфигурацию логирования для production. + + Args: + log_level: Уровень логирования + log_file: Путь к файлу логов (опционально) + + Пример использования в settings.py: + from apps.core.logging import get_json_logging_config + + LOGGING = get_json_logging_config( + log_level="INFO", + log_file="/var/log/app/app.log", + ) + """ + handlers = { + "console": { + "class": "logging.StreamHandler", + "formatter": "json", + }, + } + + root_handlers = ["console"] + + if log_file: + handlers["file"] = { + "class": "logging.handlers.RotatingFileHandler", + "filename": log_file, + "maxBytes": 10 * 1024 * 1024, # 10 MB + "backupCount": 5, + "formatter": "json", + } + root_handlers.append("file") + + return { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "json": { + "()": "apps.core.logging.JSONFormatter", + }, + "standard": { + "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s", + }, + }, + "handlers": handlers, + "root": { + "handlers": root_handlers, + "level": log_level, + }, + "loggers": { + "django": { + "handlers": root_handlers, + "level": "WARNING", + "propagate": False, + }, + "django.request": { + "handlers": root_handlers, + "level": "WARNING", + "propagate": False, + }, + "celery": { + "handlers": root_handlers, + "level": "INFO", + "propagate": False, + }, + "apps": { + "handlers": root_handlers, + "level": log_level, + "propagate": False, + }, + }, + } + + +def log_request( + logger: logging.Logger, + request: Any, + response: Any | None = None, + duration_ms: float | None = None, +) -> None: + """ + Логирует HTTP запрос/ответ. + + Пример использования: + from apps.core.logging import log_request + + def my_middleware(get_response): + def middleware(request): + start = time.time() + response = get_response(request) + duration = (time.time() - start) * 1000 + log_request(logger, request, response, duration) + return response + return middleware + """ + extra: dict[str, Any] = { + "method": request.method, + "path": request.path, + "user_id": getattr(request.user, "id", None) + if hasattr(request, "user") + else None, + } + + if response: + extra["status_code"] = response.status_code + + if duration_ms: + extra["duration_ms"] = round(duration_ms, 2) + + request_id = get_request_id() + if request_id: + extra["request_id"] = request_id + + message = f"{request.method} {request.path}" + if response: + message += f" -> {response.status_code}" + if duration_ms: + message += f" ({duration_ms:.0f}ms)" + + if response and response.status_code >= 500: + logger.error(message, extra=extra) + elif response and response.status_code >= 400: + logger.warning(message, extra=extra) + else: + logger.info(message, extra=extra) diff --git a/src/apps/core/management/__init__.py b/src/apps/core/management/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/apps/core/management/commands/__init__.py b/src/apps/core/management/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/apps/core/management/commands/base.py b/src/apps/core/management/commands/base.py new file mode 100644 index 0000000..ed775a4 --- /dev/null +++ b/src/apps/core/management/commands/base.py @@ -0,0 +1,252 @@ +""" +Базовый класс для management commands. + +Предоставляет: +- Структурированное логирование +- Отображение прогресса +- Обработку ошибок +- Измерение времени выполнения +- Dry-run режим +""" + +import logging +import time +from abc import abstractmethod +from collections.abc import Generator +from contextlib import contextmanager +from typing import Any + +from django.core.management.base import BaseCommand, CommandError +from django.db import transaction + +logger = logging.getLogger(__name__) + + +class BaseAppCommand(BaseCommand): + """ + Базовый класс для management commands проекта. + + Возможности: + - Автоматическое логирование начала и завершения + - Измерение времени выполнения + - Поддержка dry-run режима + - Прогресс-бар для итераций + - Транзакционное выполнение + - Обработка ошибок с правильными кодами выхода + + Использование: + class Command(BaseAppCommand): + help = 'Описание команды' + + def add_arguments(self, parser): + super().add_arguments(parser) # Добавляет --dry-run + parser.add_argument('--my-arg', type=str) + + def execute_command(self, *args, **options): + # Основная логика команды + items = MyModel.objects.all() + + for item in self.progress_iter(items, desc="Обработка"): + self.process_item(item) + + return "Обработано успешно" + """ + + # Переопределяемые атрибуты + requires_migrations_checks = True + requires_system_checks = "__all__" + use_transaction = False # Обернуть в транзакцию + + def add_arguments(self, parser) -> None: + """Добавление базовых аргументов.""" + parser.add_argument( + "--dry-run", + action="store_true", + default=False, + help="Режим тестового запуска без изменений в базе данных", + ) + parser.add_argument( + "--silent", + action="store_true", + default=False, + help="Минимальный вывод (только ошибки)", + ) + + def handle(self, *args: Any, **options: Any) -> str | None: + """Основной обработчик команды.""" + self.dry_run = options.get("dry_run", False) + self.silent = options.get("silent", False) + self.verbosity = options.get("verbosity", 1) + + command_name = self.__class__.__module__.split(".")[-1] + + # Логирование старта + self.log_info(f"Запуск команды: {command_name}") + if self.dry_run: + self.log_warning("Режим dry-run: изменения НЕ будут сохранены") + + start_time = time.time() + + try: + if self.use_transaction: + with transaction.atomic(): + result = self._execute_with_rollback(*args, **options) + else: + result = self.execute_command(*args, **options) + + # Логирование успеха + elapsed = time.time() - start_time + self.log_success(f"Команда завершена за {elapsed:.2f}с") + + return result + + except CommandError: + raise + except Exception as e: + elapsed = time.time() - start_time + self.log_error(f"Ошибка после {elapsed:.2f}с: {e}") + logger.exception("Command failed", extra={"command": command_name}) + raise CommandError(str(e)) from e + + def _execute_with_rollback(self, *args: Any, **options: Any) -> str | None: + """Выполнение с откатом в dry-run режиме.""" + result = self.execute_command(*args, **options) + + if self.dry_run: + # Откатываем транзакцию в dry-run + transaction.set_rollback(True) + self.log_warning("Dry-run: транзакция откачена") + + return result + + @abstractmethod + def execute_command(self, *args: Any, **options: Any) -> str | None: + """ + Основная логика команды. Переопределяется в наследниках. + + Returns: + Строка с результатом или None + """ + raise NotImplementedError("Метод execute_command должен быть реализован") + + # ==================== Методы вывода ==================== + + def log_info(self, message: str) -> None: + """Информационное сообщение.""" + if not self.silent: + self.stdout.write(message) + logger.info(message) + + def log_success(self, message: str) -> None: + """Сообщение об успехе (зелёное).""" + if not self.silent: + self.stdout.write(self.style.SUCCESS(message)) + logger.info(message) + + def log_warning(self, message: str) -> None: + """Предупреждение (жёлтое).""" + if not self.silent: + self.stdout.write(self.style.WARNING(message)) + logger.warning(message) + + def log_error(self, message: str) -> None: + """Ошибка (красное).""" + self.stderr.write(self.style.ERROR(message)) + logger.error(message) + + def log_debug(self, message: str) -> None: + """Отладочное сообщение (только при verbosity >= 2).""" + if self.verbosity >= 2: + self.stdout.write(self.style.HTTP_INFO(message)) + logger.debug(message) + + # ==================== Прогресс ==================== + + def progress_iter( + self, + iterable, + desc: str = "Обработка", + total: int | None = None, + ) -> Generator: + """ + Итератор с отображением прогресса. + + Args: + iterable: Итерируемый объект + desc: Описание операции + total: Общее количество (если известно) + + Yields: + Элементы итератора + + Использование: + for item in self.progress_iter(items, "Обработка записей"): + process(item) + """ + if total is None: + try: + total = len(iterable) + except TypeError: + total = None + + processed = 0 + last_percent = -1 + + for item in iterable: + yield item + processed += 1 + + if total and not self.silent: + percent = int(processed * 100 / total) + if percent != last_percent and percent % 10 == 0: + self.stdout.write(f"{desc}: {percent}% ({processed}/{total})") + last_percent = percent + + if not self.silent: + self.log_info(f"{desc}: завершено ({processed} элементов)") + + @contextmanager + def timed_operation(self, operation_name: str) -> Generator: + """ + Контекстный менеджер для измерения времени операции. + + Использование: + with self.timed_operation("Загрузка данных"): + load_data() + """ + start = time.time() + self.log_debug(f"Начало: {operation_name}") + + try: + yield + finally: + elapsed = time.time() - start + self.log_debug(f"Завершено: {operation_name} ({elapsed:.2f}с)") + + # ==================== Утилиты ==================== + + def confirm(self, message: str) -> bool: + """ + Запрос подтверждения у пользователя. + + Args: + message: Текст вопроса + + Returns: + True если пользователь подтвердил + """ + if self.dry_run: + self.log_warning(f"[Dry-run] Пропуск подтверждения: {message}") + return True + + self.stdout.write(f"\n{message} [y/N]: ", ending="") + response = input().strip().lower() + return response in ("y", "yes", "да", "д") + + def abort(self, message: str) -> None: + """Прерывание команды с сообщением.""" + raise CommandError(message) + + def check_dry_run(self) -> bool: + """Проверка режима dry-run (для условного выполнения).""" + return self.dry_run diff --git a/src/apps/core/middleware.py b/src/apps/core/middleware.py new file mode 100644 index 0000000..8effb6e --- /dev/null +++ b/src/apps/core/middleware.py @@ -0,0 +1,131 @@ +""" +Core middleware components. + +Provides Request ID tracking and other cross-cutting concerns. +""" + +import logging +import threading +import uuid + +from django.utils.deprecation import MiddlewareMixin + +logger = logging.getLogger(__name__) + +# Thread-local storage for request context +_request_context = threading.local() + + +def get_request_id() -> str | None: + """Get current request ID from thread-local storage.""" + return getattr(_request_context, "request_id", None) + + +def get_current_request(): + """Get current request from thread-local storage.""" + return getattr(_request_context, "request", None) + + +class RequestIDMiddleware(MiddlewareMixin): + """ + Middleware that generates or extracts a unique request ID for each request. + + The request ID is: + 1. Extracted from X-Request-ID header if present + 2. Generated as UUID4 if not present + 3. Added to response headers + 4. Available via get_request_id() for logging + + Usage in logging: + from apps.core.middleware import get_request_id + logger.info(f"[{get_request_id()}] Processing request") + """ + + REQUEST_ID_HEADER = "X-Request-ID" + + def process_request(self, request): + """Extract or generate request ID and store in thread-local.""" + request_id = request.headers.get(self.REQUEST_ID_HEADER) + + if not request_id: + request_id = str(uuid.uuid4()) + + request.request_id = request_id + _request_context.request_id = request_id + _request_context.request = request + + return None + + def process_response(self, request, response): + """Add request ID to response headers.""" + request_id = getattr(request, "request_id", None) + if request_id: + response[self.REQUEST_ID_HEADER] = request_id + + # Clean up thread-local storage + _request_context.request_id = None + _request_context.request = None + + return response + + def process_exception(self, request, exception): + """Log exception with request ID.""" + request_id = getattr(request, "request_id", "unknown") + logger.error(f"[{request_id}] Unhandled exception: {exception}") + return None + + +class RequestLoggingMiddleware(MiddlewareMixin): + """ + Middleware for logging request/response details. + + Logs: + - Request method, path, and request ID + - Response status code and timing (if available) + """ + + def process_request(self, request): + """Log incoming request.""" + import time + + request._start_time = time.time() + request_id = getattr(request, "request_id", "N/A") + + logger.info( + f"[{request_id}] {request.method} {request.path} - Started", + extra={ + "request_id": request_id, + "method": request.method, + "path": request.path, + "user": getattr(request.user, "id", None) + if hasattr(request, "user") + else None, + }, + ) + return None + + def process_response(self, request, response): + """Log response details.""" + import time + + duration = None + if hasattr(request, "_start_time"): + duration = time.time() - request._start_time + + request_id = getattr(request, "request_id", "N/A") + + logger.info( + f"[{request_id}] {request.method} {request.path} - " + f"{response.status_code} ({duration:.3f}s)" + if duration + else f"[{request_id}] {request.method} {request.path} - " + f"{response.status_code}", + extra={ + "request_id": request_id, + "method": request.method, + "path": request.path, + "status_code": response.status_code, + "duration": duration, + }, + ) + return response diff --git a/src/apps/core/migrations/0001_background_job.py b/src/apps/core/migrations/0001_background_job.py new file mode 100644 index 0000000..42a6386 --- /dev/null +++ b/src/apps/core/migrations/0001_background_job.py @@ -0,0 +1,52 @@ +# Generated by Django 3.2.25 on 2026-01-21 10:19 + +from django.db import migrations, models +import uuid + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='BackgroundJob', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, db_index=True, help_text='Дата и время создания записи', verbose_name='создано')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Дата и время последнего обновления', verbose_name='обновлено')), + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('task_id', models.CharField(db_index=True, help_text='Идентификатор задачи в Celery', max_length=255, unique=True, verbose_name='ID задачи Celery')), + ('task_name', models.CharField(db_index=True, help_text='Полное имя задачи (например, apps.myapp.tasks.process_data)', max_length=255, verbose_name='имя задачи')), + ('status', models.CharField(choices=[('pending', 'Ожидает'), ('started', 'Выполняется'), ('success', 'Успешно'), ('failure', 'Ошибка'), ('revoked', 'Отменена'), ('retry', 'Повтор')], db_index=True, default='pending', max_length=20, verbose_name='статус')), + ('progress', models.PositiveSmallIntegerField(default=0, help_text='Прогресс выполнения в процентах (0-100)', verbose_name='прогресс')), + ('progress_message', models.CharField(blank=True, default='', max_length=500, verbose_name='сообщение о прогрессе')), + ('result', models.JSONField(blank=True, help_text='Результат выполнения задачи (JSON)', null=True, verbose_name='результат')), + ('error', models.TextField(blank=True, default='', help_text='Текст ошибки при неудачном выполнении', verbose_name='ошибка')), + ('traceback', models.TextField(blank=True, default='', help_text='Полный traceback ошибки', verbose_name='traceback')), + ('started_at', models.DateTimeField(blank=True, null=True, verbose_name='время начала')), + ('completed_at', models.DateTimeField(blank=True, null=True, verbose_name='время завершения')), + ('user_id', models.PositiveIntegerField(blank=True, db_index=True, help_text='ID пользователя, запустившего задачу', null=True, verbose_name='ID пользователя')), + ('meta', models.JSONField(blank=True, default=dict, help_text='Дополнительные данные задачи', verbose_name='метаданные')), + ], + options={ + 'verbose_name': 'фоновая задача', + 'verbose_name_plural': 'фоновые задачи', + 'ordering': ['-created_at'], + }, + ), + migrations.AddIndex( + model_name='backgroundjob', + index=models.Index(fields=['status', 'created_at'], name='core_backgr_status_e66a68_idx'), + ), + migrations.AddIndex( + model_name='backgroundjob', + index=models.Index(fields=['user_id', 'status'], name='core_backgr_user_id_d81ce2_idx'), + ), + migrations.AddIndex( + model_name='backgroundjob', + index=models.Index(fields=['task_name', 'status'], name='core_backgr_task_na_fb8e14_idx'), + ), + ] diff --git a/src/apps/core/migrations/__init__.py b/src/apps/core/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/apps/core/mixins.py b/src/apps/core/mixins.py new file mode 100644 index 0000000..68bc228 --- /dev/null +++ b/src/apps/core/mixins.py @@ -0,0 +1,391 @@ +""" +Миксины для моделей Django. + +Предоставляют переиспользуемые поля и поведение для моделей: +- TimestampMixin: created_at, updated_at +- UUIDPrimaryKeyMixin: UUID вместо auto-increment ID +- SoftDeleteMixin: мягкое удаление (is_deleted + deleted_at) +- AuditMixin: created_by, updated_by (кто создал/изменил) +- OrderableMixin: поле order для сортировки +""" + +import uuid +from typing import TYPE_CHECKING + +from django.conf import settings +from django.db import models +from django.utils import timezone +from django.utils.translation import gettext_lazy as _ + +if TYPE_CHECKING: + from django.db.models import QuerySet + + +class TimestampMixin(models.Model): + """ + Миксин для автоматических временных меток. + + Поля: + created_at: Дата и время создания (автоматически) + updated_at: Дата и время последнего обновления (автоматически) + + Использование: + class MyModel(TimestampMixin, models.Model): + name = models.CharField(max_length=100) + + class Meta: + ordering = ['-created_at'] + """ + + created_at = models.DateTimeField( + _("создано"), + auto_now_add=True, + db_index=True, + help_text=_("Дата и время создания записи"), + ) + updated_at = models.DateTimeField( + _("обновлено"), + auto_now=True, + help_text=_("Дата и время последнего обновления"), + ) + + class Meta: + abstract = True + + +class UUIDPrimaryKeyMixin(models.Model): + """ + Миксин для использования UUID в качестве первичного ключа. + + Преимущества: + - Глобальная уникальность + - Безопасность (нельзя угадать ID) + - Возможность генерации на клиенте + + Использование: + class MyModel(UUIDPrimaryKeyMixin, models.Model): + name = models.CharField(max_length=100) + + Примечание: + При использовании этого миксина НЕ нужно определять поле id. + """ + + id = models.UUIDField( + primary_key=True, + default=uuid.uuid4, + editable=False, + verbose_name=_("ID"), + ) + + class Meta: + abstract = True + + +class SoftDeleteQuerySet(models.QuerySet): + """QuerySet с поддержкой мягкого удаления.""" + + def delete(self) -> tuple[int, dict[str, int]]: + """Мягкое удаление всех объектов в QuerySet.""" + return self.update(is_deleted=True, deleted_at=timezone.now()), {} + + def hard_delete(self) -> tuple[int, dict[str, int]]: + """Полное удаление из базы данных.""" + return super().delete() + + def alive(self) -> "QuerySet": + """Только активные (не удалённые) записи.""" + return self.filter(is_deleted=False) + + def dead(self) -> "QuerySet": + """Только удалённые записи.""" + return self.filter(is_deleted=True) + + +class SoftDeleteManager(models.Manager): + """ + Менеджер с поддержкой мягкого удаления. + + По умолчанию возвращает только активные записи. + Для доступа ко всем записям используйте `all_objects`. + """ + + def get_queryset(self) -> SoftDeleteQuerySet: + """Возвращает только активные записи.""" + return SoftDeleteQuerySet(self.model, using=self._db).alive() + + def all_with_deleted(self) -> SoftDeleteQuerySet: + """Возвращает все записи, включая удалённые.""" + return SoftDeleteQuerySet(self.model, using=self._db) + + def deleted_only(self) -> SoftDeleteQuerySet: + """Возвращает только удалённые записи.""" + return SoftDeleteQuerySet(self.model, using=self._db).dead() + + +class AllObjectsManager(models.Manager): + """Менеджер для доступа ко всем записям (включая удалённые).""" + + def get_queryset(self) -> SoftDeleteQuerySet: + return SoftDeleteQuerySet(self.model, using=self._db) + + +class SoftDeleteMixin(models.Model): + """ + Миксин для мягкого удаления записей. + + Вместо физического удаления записи помечаются как удалённые. + Это позволяет: + - Восстанавливать удалённые данные + - Вести историю удалений + - Сохранять ссылочную целостность + + Поля: + is_deleted: Флаг удаления + deleted_at: Дата и время удаления + + Менеджеры: + objects: Только активные записи (по умолчанию) + all_objects: Все записи, включая удалённые + + Использование: + class MyModel(SoftDeleteMixin, models.Model): + name = models.CharField(max_length=100) + + # Мягкое удаление + obj.delete() # is_deleted=True, deleted_at=now() + + # Восстановление + obj.restore() + + # Полное удаление + obj.hard_delete() + + # Получить все записи (включая удалённые) + MyModel.all_objects.all() + + # Получить только удалённые + MyModel.all_objects.deleted_only() + """ + + is_deleted = models.BooleanField( + _("удалено"), + default=False, + db_index=True, + help_text=_("Помечена ли запись как удалённая"), + ) + deleted_at = models.DateTimeField( + _("дата удаления"), + null=True, + blank=True, + help_text=_("Дата и время удаления записи"), + ) + + # Менеджеры + objects = SoftDeleteManager() + all_objects = AllObjectsManager() + + class Meta: + abstract = True + + def delete(self, using=None, keep_parents=False) -> tuple[int, dict[str, int]]: + """Мягкое удаление записи.""" + self.is_deleted = True + self.deleted_at = timezone.now() + self.save(update_fields=["is_deleted", "deleted_at"]) + return 1, {self._meta.label: 1} + + def hard_delete(self, using=None, keep_parents=False) -> tuple[int, dict[str, int]]: + """Полное удаление записи из базы данных.""" + return super().delete(using=using, keep_parents=keep_parents) + + def restore(self) -> None: + """Восстановление удалённой записи.""" + self.is_deleted = False + self.deleted_at = None + self.save(update_fields=["is_deleted", "deleted_at"]) + + @property + def is_active(self) -> bool: + """Проверка, активна ли запись (не удалена).""" + return not self.is_deleted + + +class AuditMixin(models.Model): + """ + Миксин для отслеживания автора создания и изменения. + + Поля: + created_by: Пользователь, создавший запись + updated_by: Пользователь, последний изменивший запись + + Использование: + class MyModel(AuditMixin, TimestampMixin, models.Model): + name = models.CharField(max_length=100) + + # В сервисе или view: + obj = MyModel.objects.create(name="Test", created_by=request.user) + obj.updated_by = request.user + obj.save() + + Примечание: + Поля created_by и updated_by нужно заполнять вручную + (в сервисе или через middleware). + """ + + created_by = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="%(app_label)s_%(class)s_created", + verbose_name=_("создано пользователем"), + help_text=_("Пользователь, создавший запись"), + ) + updated_by = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="%(app_label)s_%(class)s_updated", + verbose_name=_("обновлено пользователем"), + help_text=_("Пользователь, последний изменивший запись"), + ) + + class Meta: + abstract = True + + +class OrderableMixin(models.Model): + """ + Миксин для упорядочивания записей. + + Поля: + order: Порядковый номер для сортировки + + Использование: + class MenuItem(OrderableMixin, models.Model): + name = models.CharField(max_length=100) + + class Meta: + ordering = ['order'] + + # Перемещение элемента + item.move_up() + item.move_down() + item.move_to(5) + """ + + order = models.PositiveIntegerField( + _("порядок"), + default=0, + db_index=True, + help_text=_("Порядковый номер для сортировки"), + ) + + class Meta: + abstract = True + ordering = ["order"] + + def move_up(self) -> None: + """Переместить элемент вверх (уменьшить order).""" + if self.order > 0: + self.order -= 1 + self.save(update_fields=["order"]) + + def move_down(self) -> None: + """Переместить элемент вниз (увеличить order).""" + self.order += 1 + self.save(update_fields=["order"]) + + def move_to(self, position: int) -> None: + """Переместить элемент на указанную позицию.""" + if position >= 0: + self.order = position + self.save(update_fields=["order"]) + + +class SlugMixin(models.Model): + """ + Миксин для URL-friendly идентификатора. + + Поля: + slug: Уникальный slug для URL + + Использование: + class Article(SlugMixin, models.Model): + title = models.CharField(max_length=200) + + def save(self, *args, **kwargs): + if not self.slug: + self.slug = slugify(self.title) + super().save(*args, **kwargs) + """ + + slug = models.SlugField( + _("slug"), + max_length=255, + unique=True, + db_index=True, + help_text=_("URL-friendly идентификатор"), + ) + + class Meta: + abstract = True + + +class StatusMixin(models.Model): + """ + Миксин для статусов с типичными значениями. + + Использование: + class Order(StatusMixin, models.Model): + total = models.DecimalField(...) + + order = Order.objects.create(total=100) + order.activate() + order.deactivate() + """ + + class Status(models.TextChoices): + DRAFT = "draft", _("Черновик") + ACTIVE = "active", _("Активно") + INACTIVE = "inactive", _("Неактивно") + ARCHIVED = "archived", _("В архиве") + + status = models.CharField( + _("статус"), + max_length=20, + choices=Status.choices, + default=Status.DRAFT, + db_index=True, + ) + + class Meta: + abstract = True + + def activate(self) -> None: + """Активировать запись.""" + self.status = self.Status.ACTIVE + self.save(update_fields=["status"]) + + def deactivate(self) -> None: + """Деактивировать запись.""" + self.status = self.Status.INACTIVE + self.save(update_fields=["status"]) + + def archive(self) -> None: + """Отправить в архив.""" + self.status = self.Status.ARCHIVED + self.save(update_fields=["status"]) + + @property + def is_draft(self) -> bool: + return self.status == self.Status.DRAFT + + @property + def is_active_status(self) -> bool: + return self.status == self.Status.ACTIVE + + @property + def is_archived(self) -> bool: + return self.status == self.Status.ARCHIVED diff --git a/src/apps/core/models.py b/src/apps/core/models.py new file mode 100644 index 0000000..9b4cb7c --- /dev/null +++ b/src/apps/core/models.py @@ -0,0 +1,237 @@ +""" +Background Job Tracking - отслеживание статуса Celery задач. + +Предоставляет: +- Модель для хранения информации о задачах +- Сервис для управления задачами +- API эндпоинты для получения статуса +""" + +import uuid +from typing import Any + +from apps.core.mixins import TimestampMixin +from django.db import models +from django.utils import timezone +from django.utils.translation import gettext_lazy as _ + + +class JobStatus(models.TextChoices): + """Статусы фоновых задач.""" + + PENDING = "pending", _("Ожидает") + STARTED = "started", _("Выполняется") + SUCCESS = "success", _("Успешно") + FAILURE = "failure", _("Ошибка") + REVOKED = "revoked", _("Отменена") + RETRY = "retry", _("Повтор") + + +class BackgroundJob(TimestampMixin, models.Model): + """ + Модель для отслеживания фоновых задач Celery. + + Позволяет: + - Отслеживать статус выполнения + - Хранить результат или ошибку + - Отображать прогресс выполнения + - Связывать задачу с пользователем + + Использование в таске: + @shared_task(bind=True, base=TrackedTask) + def my_task(self, data): + job = BackgroundJob.objects.get(task_id=self.request.id) + job.update_progress(50, "Обработка...") + # ... логика + job.complete(result={"count": 100}) + """ + + id = models.UUIDField( + primary_key=True, + default=uuid.uuid4, + editable=False, + ) + task_id = models.CharField( + _("ID задачи Celery"), + max_length=255, + unique=True, + db_index=True, + help_text=_("Идентификатор задачи в Celery"), + ) + task_name = models.CharField( + _("имя задачи"), + max_length=255, + db_index=True, + help_text=_("Полное имя задачи (например, apps.myapp.tasks.process_data)"), + ) + status = models.CharField( + _("статус"), + max_length=20, + choices=JobStatus.choices, + default=JobStatus.PENDING, + db_index=True, + ) + progress = models.PositiveSmallIntegerField( + _("прогресс"), + default=0, + help_text=_("Прогресс выполнения в процентах (0-100)"), + ) + progress_message = models.CharField( + _("сообщение о прогрессе"), + max_length=500, + blank=True, + default="", + ) + result = models.JSONField( + _("результат"), + null=True, + blank=True, + help_text=_("Результат выполнения задачи (JSON)"), + ) + error = models.TextField( + _("ошибка"), + blank=True, + default="", + help_text=_("Текст ошибки при неудачном выполнении"), + ) + traceback = models.TextField( + _("traceback"), + blank=True, + default="", + help_text=_("Полный traceback ошибки"), + ) + started_at = models.DateTimeField( + _("время начала"), + null=True, + blank=True, + ) + completed_at = models.DateTimeField( + _("время завершения"), + null=True, + blank=True, + ) + # Опционально: связь с пользователем + user_id = models.PositiveIntegerField( + _("ID пользователя"), + null=True, + blank=True, + db_index=True, + help_text=_("ID пользователя, запустившего задачу"), + ) + # Метаданные + meta = models.JSONField( + _("метаданные"), + default=dict, + blank=True, + help_text=_("Дополнительные данные задачи"), + ) + + class Meta: + verbose_name = _("фоновая задача") + verbose_name_plural = _("фоновые задачи") + ordering = ["-created_at"] + indexes = [ + models.Index(fields=["status", "created_at"]), + models.Index(fields=["user_id", "status"]), + models.Index(fields=["task_name", "status"]), + ] + + def __str__(self) -> str: + return f"{self.task_name} ({self.status})" + + # ==================== Методы обновления статуса ==================== + + def mark_started(self) -> None: + """Отметить задачу как начатую.""" + self.status = JobStatus.STARTED + self.started_at = timezone.now() + self.save(update_fields=["status", "started_at", "updated_at"]) + + def update_progress(self, progress: int, message: str = "") -> None: + """ + Обновить прогресс выполнения. + + Args: + progress: Процент выполнения (0-100) + message: Описание текущего этапа + """ + self.progress = min(max(progress, 0), 100) + self.progress_message = message + self.save(update_fields=["progress", "progress_message", "updated_at"]) + + def complete(self, result: Any = None) -> None: + """ + Отметить задачу как успешно завершённую. + + Args: + result: Результат выполнения (сериализуемый в JSON) + """ + self.status = JobStatus.SUCCESS + self.progress = 100 + self.result = result + self.completed_at = timezone.now() + self.save( + update_fields=[ + "status", + "progress", + "result", + "completed_at", + "updated_at", + ] + ) + + def fail(self, error: str, traceback_str: str = "") -> None: + """ + Отметить задачу как завершённую с ошибкой. + + Args: + error: Текст ошибки + traceback_str: Полный traceback + """ + self.status = JobStatus.FAILURE + self.error = str(error) + self.traceback = traceback_str + self.completed_at = timezone.now() + self.save( + update_fields=[ + "status", + "error", + "traceback", + "completed_at", + "updated_at", + ] + ) + + def revoke(self) -> None: + """Отметить задачу как отменённую.""" + self.status = JobStatus.REVOKED + self.completed_at = timezone.now() + self.save(update_fields=["status", "completed_at", "updated_at"]) + + def mark_retry(self) -> None: + """Отметить, что задача будет повторена.""" + self.status = JobStatus.RETRY + self.save(update_fields=["status", "updated_at"]) + + # ==================== Свойства ==================== + + @property + def is_finished(self) -> bool: + """Проверка, завершена ли задача.""" + return self.status in ( + JobStatus.SUCCESS, + JobStatus.FAILURE, + JobStatus.REVOKED, + ) + + @property + def is_successful(self) -> bool: + """Проверка успешного завершения.""" + return self.status == JobStatus.SUCCESS + + @property + def duration(self) -> float | None: + """Длительность выполнения в секундах.""" + if self.started_at and self.completed_at: + return (self.completed_at - self.started_at).total_seconds() + return None diff --git a/src/apps/core/openapi.py b/src/apps/core/openapi.py new file mode 100644 index 0000000..59055b1 --- /dev/null +++ b/src/apps/core/openapi.py @@ -0,0 +1,374 @@ +""" +Утилиты для документирования API (OpenAPI/Swagger). + +Предоставляет декораторы и утилиты для улучшения +автоматически генерируемой документации. +""" + +from typing import Any + +from drf_yasg import openapi +from drf_yasg.utils import swagger_auto_schema + + +def api_docs( + *, + summary: str, + description: str | None = None, + request_body: Any = None, + responses: dict[int, Any] | None = None, + tags: list[str] | None = None, + operation_id: str | None = None, + deprecated: bool = False, + security: list[dict[str, list[str]]] | None = None, + manual_parameters: list[openapi.Parameter] | None = None, +): + """ + Декоратор для документирования API эндпоинтов. + + Упрощённая обёртка над swagger_auto_schema с поддержкой + типовых паттернов документирования. + + Args: + summary: Краткое описание эндпоинта (отображается в списке) + description: Подробное описание (отображается при раскрытии) + request_body: Схема тела запроса (serializer или openapi.Schema) + responses: Словарь возможных ответов {status_code: schema} + tags: Теги для группировки в документации + operation_id: Уникальный идентификатор операции + deprecated: Пометить как устаревший + security: Требования безопасности + manual_parameters: Дополнительные параметры запроса + + Пример использования: + class UserView(APIView): + @api_docs( + summary="Получить текущего пользователя", + description="Возвращает данные аутентифицированного пользователя", + responses={ + 200: UserSerializer, + 401: "Не авторизован", + }, + tags=["Пользователи"], + ) + def get(self, request): + ... + """ + # Преобразуем упрощённые responses в формат openapi + formatted_responses = {} + if responses: + for code, schema in responses.items(): + if isinstance(schema, str): + # Простое текстовое описание + formatted_responses[code] = openapi.Response(description=schema) + elif isinstance(schema, type): + # Serializer class + formatted_responses[code] = openapi.Response( + description=_get_status_description(code), + schema=schema, + ) + elif isinstance(schema, openapi.Response): + formatted_responses[code] = schema + else: + formatted_responses[code] = schema + + return swagger_auto_schema( + operation_summary=summary, + operation_description=description, + request_body=request_body, + responses=formatted_responses or None, + tags=tags, + operation_id=operation_id, + deprecated=deprecated, + security=security, + manual_parameters=manual_parameters, + ) + + +def _get_status_description(status_code: int) -> str: + """Возвращает описание HTTP статуса на русском.""" + descriptions = { + 200: "Успешный запрос", + 201: "Ресурс создан", + 204: "Успешно, без содержимого", + 400: "Некорректный запрос", + 401: "Не авторизован", + 403: "Доступ запрещён", + 404: "Ресурс не найден", + 409: "Конфликт", + 422: "Ошибка валидации", + 429: "Слишком много запросов", + 500: "Внутренняя ошибка сервера", + } + return descriptions.get(status_code, f"HTTP {status_code}") + + +# Предопределённые схемы ответов +class CommonResponses: + """ + Общие схемы ответов для документации. + + Пример использования: + @api_docs( + summary="Удалить ресурс", + responses={ + 204: CommonResponses.NO_CONTENT, + 404: CommonResponses.NOT_FOUND, + }, + ) + def delete(self, request, pk): + ... + """ + + SUCCESS = openapi.Response( + description="Успешный запрос", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=True), + "data": openapi.Schema(type=openapi.TYPE_OBJECT), + }, + ), + ) + + CREATED = openapi.Response( + description="Ресурс успешно создан", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=True), + "data": openapi.Schema(type=openapi.TYPE_OBJECT), + }, + ), + ) + + NO_CONTENT = openapi.Response(description="Успешно, без содержимого") + + BAD_REQUEST = openapi.Response( + description="Некорректный запрос", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=False), + "errors": openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "code": openapi.Schema(type=openapi.TYPE_STRING), + "message": openapi.Schema(type=openapi.TYPE_STRING), + }, + ), + ), + }, + ), + ) + + UNAUTHORIZED = openapi.Response( + description="Требуется аутентификация", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "detail": openapi.Schema( + type=openapi.TYPE_STRING, + default="Учётные данные не предоставлены.", + ), + }, + ), + ) + + FORBIDDEN = openapi.Response( + description="Доступ запрещён", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "detail": openapi.Schema( + type=openapi.TYPE_STRING, + default="У вас нет прав для выполнения этого действия.", + ), + }, + ), + ) + + NOT_FOUND = openapi.Response( + description="Ресурс не найден", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=False), + "errors": openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "code": openapi.Schema( + type=openapi.TYPE_STRING, default="not_found" + ), + "message": openapi.Schema( + type=openapi.TYPE_STRING, default="Ресурс не найден" + ), + }, + ), + ), + }, + ), + ) + + VALIDATION_ERROR = openapi.Response( + description="Ошибка валидации", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=False), + "errors": openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "code": openapi.Schema(type=openapi.TYPE_STRING), + "message": openapi.Schema(type=openapi.TYPE_STRING), + "details": openapi.Schema(type=openapi.TYPE_OBJECT), + }, + ), + ), + }, + ), + ) + + RATE_LIMITED = openapi.Response( + description="Превышен лимит запросов", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "detail": openapi.Schema( + type=openapi.TYPE_STRING, + default="Превышен лимит запросов. Повторите позже.", + ), + }, + ), + ) + + SERVER_ERROR = openapi.Response( + description="Внутренняя ошибка сервера", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=False), + "errors": openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "code": openapi.Schema( + type=openapi.TYPE_STRING, default="internal_error" + ), + "message": openapi.Schema( + type=openapi.TYPE_STRING, + default="Внутренняя ошибка сервера", + ), + }, + ), + ), + }, + ), + ) + + +# Параметры запроса +class CommonParameters: + """ + Общие параметры для документации API. + + Пример использования: + @api_docs( + summary="Список ресурсов", + manual_parameters=[ + CommonParameters.PAGE, + CommonParameters.PAGE_SIZE, + CommonParameters.SEARCH, + ], + ) + def get(self, request): + ... + """ + + PAGE = openapi.Parameter( + name="page", + in_=openapi.IN_QUERY, + type=openapi.TYPE_INTEGER, + description="Номер страницы", + default=1, + ) + + PAGE_SIZE = openapi.Parameter( + name="page_size", + in_=openapi.IN_QUERY, + type=openapi.TYPE_INTEGER, + description="Количество элементов на странице", + default=20, + ) + + SEARCH = openapi.Parameter( + name="search", + in_=openapi.IN_QUERY, + type=openapi.TYPE_STRING, + description="Поисковый запрос", + ) + + ORDERING = openapi.Parameter( + name="ordering", + in_=openapi.IN_QUERY, + type=openapi.TYPE_STRING, + description="Поле сортировки (префикс '-' для убывания)", + ) + + ID = openapi.Parameter( + name="id", + in_=openapi.IN_PATH, + type=openapi.TYPE_INTEGER, + description="ID ресурса", + required=True, + ) + + +def paginated_response(serializer_class: type) -> openapi.Response: + """ + Создаёт схему пагинированного ответа. + + Пример использования: + @api_docs( + summary="Список пользователей", + responses={200: paginated_response(UserSerializer)}, + ) + def get(self, request): + ... + """ + return openapi.Response( + description="Пагинированный список", + schema=openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "success": openapi.Schema(type=openapi.TYPE_BOOLEAN, default=True), + "data": openapi.Schema( + type=openapi.TYPE_ARRAY, + items=openapi.Schema(type=openapi.TYPE_OBJECT), + ), + "meta": openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "pagination": openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + "count": openapi.Schema(type=openapi.TYPE_INTEGER), + "page": openapi.Schema(type=openapi.TYPE_INTEGER), + "page_size": openapi.Schema(type=openapi.TYPE_INTEGER), + "total_pages": openapi.Schema( + type=openapi.TYPE_INTEGER + ), + }, + ), + }, + ), + }, + ), + ) diff --git a/src/apps/core/pagination.py b/src/apps/core/pagination.py new file mode 100644 index 0000000..eaa7ec3 --- /dev/null +++ b/src/apps/core/pagination.py @@ -0,0 +1,202 @@ +""" +Custom pagination classes for DRF. + +Provides cursor-based and standard pagination with unified response format. +""" + +from collections import OrderedDict +from typing import Any + +from rest_framework.pagination import CursorPagination, PageNumberPagination +from rest_framework.response import Response + + +class StandardPagination(PageNumberPagination): + """ + Standard page number pagination with unified response format. + + Query params: + - page: Page number (default: 1) + - page_size: Items per page (default: 20, max: 100) + + Response format: + { + "success": true, + "data": [...], + "errors": null, + "meta": { + "pagination": { + "page": 1, + "page_size": 20, + "total_count": 100, + "total_pages": 5, + "has_next": true, + "has_previous": false + } + } + } + """ + + page_size = 20 + page_size_query_param = "page_size" + max_page_size = 100 + + def get_paginated_response(self, data: list[Any]) -> Response: + """Return response in unified format.""" + return Response( + OrderedDict( + [ + ("success", True), + ("data", data), + ("errors", None), + ( + "meta", + { + "pagination": { + "page": self.page.number, + "page_size": self.get_page_size(self.request), + "total_count": self.page.paginator.count, + "total_pages": self.page.paginator.num_pages, + "has_next": self.page.has_next(), + "has_previous": self.page.has_previous(), + } + }, + ), + ] + ) + ) + + def get_paginated_response_schema(self, schema: dict) -> dict: + """OpenAPI schema for paginated response.""" + return { + "type": "object", + "properties": { + "success": {"type": "boolean", "example": True}, + "data": schema, + "errors": {"type": "null"}, + "meta": { + "type": "object", + "properties": { + "pagination": { + "type": "object", + "properties": { + "page": {"type": "integer", "example": 1}, + "page_size": {"type": "integer", "example": 20}, + "total_count": {"type": "integer", "example": 100}, + "total_pages": {"type": "integer", "example": 5}, + "has_next": {"type": "boolean", "example": True}, + "has_previous": {"type": "boolean", "example": False}, + }, + }, + }, + }, + }, + } + + +class StandardCursorPagination(CursorPagination): + """ + Cursor-based pagination for large datasets. + + Benefits over offset pagination: + - Consistent results even when data changes + - Better performance on large tables + - No "page drift" issues + + Query params: + - cursor: Opaque cursor string for next/previous page + - page_size: Items per page (default: 20, max: 100) + + Response format: + { + "success": true, + "data": [...], + "errors": null, + "meta": { + "pagination": { + "next_cursor": "...", + "previous_cursor": "...", + "page_size": 20 + } + } + } + """ + + page_size = 20 + page_size_query_param = "page_size" + max_page_size = 100 + ordering = "-created_at" # Default ordering, override per-view + + def get_paginated_response(self, data: list[Any]) -> Response: + """Return response in unified format.""" + return Response( + OrderedDict( + [ + ("success", True), + ("data", data), + ("errors", None), + ( + "meta", + { + "pagination": { + "next_cursor": self.get_next_link(), + "previous_cursor": self.get_previous_link(), + "page_size": self.get_page_size(self.request), + } + }, + ), + ] + ) + ) + + def get_paginated_response_schema(self, schema: dict) -> dict: + """OpenAPI schema for cursor paginated response.""" + return { + "type": "object", + "properties": { + "success": {"type": "boolean", "example": True}, + "data": schema, + "errors": {"type": "null"}, + "meta": { + "type": "object", + "properties": { + "pagination": { + "type": "object", + "properties": { + "next_cursor": { + "type": "string", + "nullable": True, + "example": "cD0yMDIxLTAxLTAxKzAwJTNBMDAl", + }, + "previous_cursor": { + "type": "string", + "nullable": True, + "example": None, + }, + "page_size": {"type": "integer", "example": 20}, + }, + }, + }, + }, + }, + } + + +class LargeResultSetPagination(StandardCursorPagination): + """ + Pagination optimized for very large result sets. + + Uses smaller page size and stricter limits. + """ + + page_size = 50 + max_page_size = 200 + + +class SmallResultSetPagination(StandardPagination): + """ + Pagination for small result sets where total count is acceptable. + """ + + page_size = 10 + max_page_size = 50 diff --git a/src/apps/core/permissions.py b/src/apps/core/permissions.py new file mode 100644 index 0000000..daaf397 --- /dev/null +++ b/src/apps/core/permissions.py @@ -0,0 +1,128 @@ +""" +Базовые классы разрешений для API. + +Предоставляет переиспользуемые permission classes для контроля доступа. +""" + +from rest_framework import permissions +from rest_framework.request import Request +from rest_framework.views import APIView + + +class IsOwner(permissions.BasePermission): + """ + Разрешает доступ только владельцу объекта. + + Объект должен иметь атрибут `user` или `owner`, + который сравнивается с текущим пользователем. + + Пример использования: + class MyView(APIView): + permission_classes = [IsAuthenticated, IsOwner] + """ + + owner_field = "user" # Можно переопределить в подклассах + + def has_object_permission(self, request: Request, view: APIView, obj) -> bool: + """Проверяет, является ли пользователь владельцем объекта.""" + owner = getattr(obj, self.owner_field, None) + if owner is None: + owner = getattr(obj, "owner", None) + return owner == request.user + + +class IsOwnerOrReadOnly(permissions.BasePermission): + """ + Разрешает изменение только владельцу, остальным - только чтение. + + SAFE_METHODS (GET, HEAD, OPTIONS) доступны всем аутентифицированным. + Изменяющие методы (POST, PUT, PATCH, DELETE) - только владельцу. + """ + + owner_field = "user" + + def has_object_permission(self, request: Request, view: APIView, obj) -> bool: + """Проверяет права на объект.""" + # Чтение разрешено всем + if request.method in permissions.SAFE_METHODS: + return True + + # Изменение - только владельцу + owner = getattr(obj, self.owner_field, None) + if owner is None: + owner = getattr(obj, "owner", None) + return owner == request.user + + +class IsAdminOrReadOnly(permissions.BasePermission): + """ + Разрешает изменение только админам, остальным - только чтение. + + SAFE_METHODS доступны всем (включая анонимных). + Изменяющие методы - только staff/superuser. + """ + + def has_permission(self, request: Request, view: APIView) -> bool: + """Проверяет права на уровне view.""" + if request.method in permissions.SAFE_METHODS: + return True + return request.user and request.user.is_staff + + +class IsAdmin(permissions.BasePermission): + """ + Разрешает доступ только администраторам (is_staff=True). + """ + + def has_permission(self, request: Request, view: APIView) -> bool: + """Проверяет, является ли пользователь администратором.""" + return request.user and request.user.is_staff + + +class IsSuperuser(permissions.BasePermission): + """ + Разрешает доступ только суперпользователям (is_superuser=True). + """ + + def has_permission(self, request: Request, view: APIView) -> bool: + """Проверяет, является ли пользователь суперпользователем.""" + return request.user and request.user.is_superuser + + +class IsVerified(permissions.BasePermission): + """ + Разрешает доступ только пользователям с подтверждённым email. + + Требует наличие поля `is_verified` у модели пользователя. + """ + + message = "Email не подтверждён" + + def has_permission(self, request: Request, view: APIView) -> bool: + """Проверяет, подтверждён ли email пользователя.""" + return ( + request.user + and request.user.is_authenticated + and getattr(request.user, "is_verified", False) + ) + + +class IsOwnerOrAdmin(permissions.BasePermission): + """ + Разрешает доступ владельцу объекта или администратору. + + Полезно для случаев, когда админ должен иметь доступ + к любым объектам, а пользователь - только к своим. + """ + + owner_field = "user" + + def has_object_permission(self, request: Request, view: APIView, obj) -> bool: + """Проверяет права на объект.""" + if request.user and request.user.is_staff: + return True + + owner = getattr(obj, self.owner_field, None) + if owner is None: + owner = getattr(obj, "owner", None) + return owner == request.user diff --git a/src/apps/core/response.py b/src/apps/core/response.py new file mode 100644 index 0000000..b03a74b --- /dev/null +++ b/src/apps/core/response.py @@ -0,0 +1,160 @@ +""" +Unified API response wrapper. + +Provides consistent response format across all API endpoints: +{ + "success": bool, + "data": {...} | [...] | null, + "errors": [...] | null, + "meta": { + "request_id": "uuid", + "pagination": {...} // optional + } +} +""" + +from typing import Any + +from rest_framework import status +from rest_framework.response import Response + + +def api_response( + data: Any = None, + *, + status_code: int = status.HTTP_200_OK, + request_id: str | None = None, + pagination: dict[str, Any] | None = None, + headers: dict[str, str] | None = None, +) -> Response: + """ + Create a successful API response. + + Args: + data: Response data (dict, list, or None) + status_code: HTTP status code (default 200) + request_id: Request tracking ID + pagination: Pagination metadata + headers: Additional response headers + + Returns: + DRF Response with unified format + """ + meta = {} + if request_id: + meta["request_id"] = request_id + if pagination: + meta["pagination"] = pagination + + response_data = { + "success": True, + "data": data, + "errors": None, + "meta": meta if meta else None, + } + + return Response(response_data, status=status_code, headers=headers) + + +def api_error_response( + errors: list[dict[str, Any]], + *, + status_code: int = status.HTTP_400_BAD_REQUEST, + request_id: str | None = None, + headers: dict[str, str] | None = None, +) -> Response: + """ + Create an error API response. + + Args: + errors: List of error dictionaries, each with 'code' and 'message' + status_code: HTTP status code (default 400) + request_id: Request tracking ID + headers: Additional response headers + + Returns: + DRF Response with unified error format + """ + meta = {} + if request_id: + meta["request_id"] = request_id + + response_data = { + "success": False, + "data": None, + "errors": errors, + "meta": meta if meta else None, + } + + return Response(response_data, status=status_code, headers=headers) + + +def api_created_response( + data: Any = None, + *, + request_id: str | None = None, + headers: dict[str, str] | None = None, +) -> Response: + """Shortcut for 201 Created response.""" + return api_response( + data, + status_code=status.HTTP_201_CREATED, + request_id=request_id, + headers=headers, + ) + + +def api_no_content_response( + *, + request_id: str | None = None, + headers: dict[str, str] | None = None, +) -> Response: + """Shortcut for 204 No Content response.""" + meta = {} + if request_id: + meta["request_id"] = request_id + + return Response( + {"success": True, "data": None, "errors": None, "meta": meta if meta else None}, + status=status.HTTP_204_NO_CONTENT, + headers=headers, + ) + + +def api_paginated_response( + data: list[Any], + *, + page: int, + page_size: int, + total_count: int, + request_id: str | None = None, + headers: dict[str, str] | None = None, +) -> Response: + """ + Create a paginated API response. + + Args: + data: List of items for current page + page: Current page number + page_size: Number of items per page + total_count: Total number of items + request_id: Request tracking ID + headers: Additional response headers + """ + total_pages = (total_count + page_size - 1) // page_size if page_size > 0 else 0 + + pagination = { + "page": page, + "page_size": page_size, + "total_count": total_count, + "total_pages": total_pages, + "has_next": page < total_pages, + "has_previous": page > 1, + } + + return api_response( + data, + request_id=request_id, + pagination=pagination, + headers=headers, + ) diff --git a/src/apps/core/serializers.py b/src/apps/core/serializers.py new file mode 100644 index 0000000..e4a13c9 --- /dev/null +++ b/src/apps/core/serializers.py @@ -0,0 +1,47 @@ +""" +Сериализаторы для apps.core. + +Содержит сериализаторы для: +- BackgroundJob - статус фоновых задач +""" + +from rest_framework import serializers + + +class BackgroundJobSerializer(serializers.Serializer): + """ + Сериализатор для отображения статуса фоновой задачи. + + Используется для API ответов о статусе задач. + """ + + id = serializers.UUIDField(read_only=True) + task_id = serializers.CharField(read_only=True) + task_name = serializers.CharField(read_only=True) + status = serializers.CharField(read_only=True) + progress = serializers.IntegerField(read_only=True) + progress_message = serializers.CharField(read_only=True) + result = serializers.JSONField(read_only=True) + error = serializers.CharField(read_only=True) + started_at = serializers.DateTimeField(read_only=True) + completed_at = serializers.DateTimeField(read_only=True) + created_at = serializers.DateTimeField(read_only=True) + duration = serializers.FloatField(read_only=True, source="duration") + + # Вычисляемые поля + is_finished = serializers.BooleanField(read_only=True) + is_successful = serializers.BooleanField(read_only=True) + + +class BackgroundJobListSerializer(serializers.Serializer): + """ + Краткий сериализатор для списка задач. + """ + + id = serializers.UUIDField(read_only=True) + task_id = serializers.CharField(read_only=True) + task_name = serializers.CharField(read_only=True) + status = serializers.CharField(read_only=True) + progress = serializers.IntegerField(read_only=True) + created_at = serializers.DateTimeField(read_only=True) + is_finished = serializers.BooleanField(read_only=True) diff --git a/src/apps/core/services.py b/src/apps/core/services.py new file mode 100644 index 0000000..48aeedf --- /dev/null +++ b/src/apps/core/services.py @@ -0,0 +1,671 @@ +""" +Base service classes for business logic layer. + +Services encapsulate business logic and are independent of HTTP layer. +They are easily testable and can manage transactions. +""" + +import logging +from typing import Any, Generic, TypeVar + +from apps.core.exceptions import NotFoundError +from django.db import models, transaction +from django.db.models import QuerySet + +logger = logging.getLogger(__name__) + +# Type variable for model +M = TypeVar("M", bound=models.Model) + + +class BaseService(Generic[M]): + """ + Base service class providing common CRUD operations. + + Usage: + class UserService(BaseService[User]): + model = User + + @classmethod + def create_user(cls, *, email: str, password: str) -> User: + # Business logic here + user = cls.model.objects.create_user(email=email, password=password) + return user + """ + + model: type[M] + + @classmethod + def get_queryset(cls) -> QuerySet[M]: + """Get base queryset for the model. Override to add default filters.""" + return cls.model.objects.all() + + @classmethod + def get_by_id(cls, pk: Any) -> M: + """ + Get entity by primary key. + + Raises: + NotFoundError: If entity not found + """ + try: + return cls.get_queryset().get(pk=pk) + except cls.model.DoesNotExist as e: + raise NotFoundError( + message=f"{cls.model.__name__} with id={pk} not found", + code="not_found", + details={"model": cls.model.__name__, "id": pk}, + ) from e + + @classmethod + def get_by_id_or_none(cls, pk: Any) -> M | None: + """Get entity by primary key or None if not found.""" + try: + return cls.get_queryset().get(pk=pk) + except cls.model.DoesNotExist: + return None + + @classmethod + def get_all(cls) -> QuerySet[M]: + """Get all entities.""" + return cls.get_queryset() + + @classmethod + def filter(cls, **kwargs: Any) -> QuerySet[M]: + """Filter entities by given criteria.""" + return cls.get_queryset().filter(**kwargs) + + @classmethod + def exists(cls, **kwargs: Any) -> bool: + """Check if entity with given criteria exists.""" + return cls.get_queryset().filter(**kwargs).exists() + + @classmethod + def count(cls, **kwargs: Any) -> int: + """Count entities matching criteria.""" + if kwargs: + return cls.get_queryset().filter(**kwargs).count() + return cls.get_queryset().count() + + @classmethod + @transaction.atomic + def create(cls, **kwargs: Any) -> M: + """ + Create new entity. + + Override this method to add business logic before/after creation. + """ + return cls.model.objects.create(**kwargs) + + @classmethod + @transaction.atomic + def update(cls, instance: M, **kwargs: Any) -> M: + """ + Update entity fields. + + Override this method to add business logic before/after update. + """ + for field, value in kwargs.items(): + setattr(instance, field, value) + instance.save(update_fields=list(kwargs.keys())) + return instance + + @classmethod + @transaction.atomic + def delete(cls, instance: M) -> None: + """ + Delete entity. + + Override this method to implement soft delete or add business logic. + """ + instance.delete() + + @classmethod + @transaction.atomic + def bulk_create(cls, instances: list[M], **kwargs: Any) -> list[M]: + """Bulk create entities.""" + return cls.model.objects.bulk_create(instances, **kwargs) + + @classmethod + @transaction.atomic + def bulk_update(cls, instances: list[M], fields: list[str], **kwargs: Any) -> int: + """Bulk update entities.""" + return cls.model.objects.bulk_update(instances, fields, **kwargs) + + +class BaseReadOnlyService(Generic[M]): + """ + Read-only service for entities that should not be modified via API. + + Useful for reference data, logs, audit trails, etc. + """ + + model: type[M] + + @classmethod + def get_queryset(cls) -> QuerySet[M]: + """Get base queryset for the model.""" + return cls.model.objects.all() + + @classmethod + def get_by_id(cls, pk: Any) -> M: + """Get entity by primary key.""" + try: + return cls.get_queryset().get(pk=pk) + except cls.model.DoesNotExist as e: + raise NotFoundError( + message=f"{cls.model.__name__} with id={pk} not found", + code="not_found", + ) from e + + @classmethod + def get_all(cls) -> QuerySet[M]: + """Get all entities.""" + return cls.get_queryset() + + @classmethod + def filter(cls, **kwargs: Any) -> QuerySet[M]: + """Filter entities by given criteria.""" + return cls.get_queryset().filter(**kwargs) + + +class TransactionMixin: + """ + Mixin providing transaction helpers for services. + + Usage: + class PaymentService(TransactionMixin, BaseService[Payment]): + @classmethod + def process_payment(cls, order_id: int) -> Payment: + with cls.atomic(): + # Multiple operations in single transaction + ... + """ + + @classmethod + def atomic(cls): + """Get atomic transaction context manager.""" + return transaction.atomic() + + @classmethod + def on_commit(cls, func): + """Register function to be called after transaction commits.""" + transaction.on_commit(func) + + @classmethod + def savepoint(cls): + """Create a savepoint within current transaction.""" + return transaction.savepoint() + + @classmethod + def savepoint_rollback(cls, sid): + """Rollback to a savepoint.""" + transaction.savepoint_rollback(sid) + + @classmethod + def savepoint_commit(cls, sid): + """Commit a savepoint.""" + transaction.savepoint_commit(sid) + + +class BulkOperationsMixin: + """ + Миксин для расширенных массовых операций. + + Дополняет BaseService методами: + - bulk_create_chunked: создание чанками для больших данных + - bulk_update_or_create: upsert операция + - bulk_delete: удаление по списку ID + - bulk_update_fields: обновление полей по фильтру + + Использование: + class ProductService(BulkOperationsMixin, BaseService[Product]): + model = Product + + # Создание 10000 записей чанками по 500 + ProductService.bulk_create_chunked(products, chunk_size=500) + + # Upsert по уникальному полю + ProductService.bulk_update_or_create( + items=data, + unique_fields=['sku'], + update_fields=['price', 'quantity'] + ) + """ + + model: type[models.Model] + + @classmethod + @transaction.atomic + def bulk_create_chunked( + cls, + instances: list, + *, + chunk_size: int = 500, + ignore_conflicts: bool = False, + update_conflicts: bool = False, + update_fields: list[str] | None = None, + unique_fields: list[str] | None = None, + ) -> int: + """ + Массовое создание чанками для больших объёмов. + + Args: + instances: Список объектов для создания + chunk_size: Размер чанка (по умолчанию 500) + ignore_conflicts: Игнорировать конфликты + update_conflicts: Обновлять при конфликтах (upsert) + update_fields: Поля для обновления при конфликте + unique_fields: Уникальные поля для определения конфликта + + Returns: + Количество созданных записей + """ + total_created = 0 + + for i in range(0, len(instances), chunk_size): + chunk = instances[i : i + chunk_size] + kwargs = { + "ignore_conflicts": ignore_conflicts, + } + + # Django 4.1+ поддерживает update_conflicts + if update_conflicts and update_fields and unique_fields: + kwargs["update_conflicts"] = True + kwargs["update_fields"] = update_fields + kwargs["unique_fields"] = unique_fields + + created = cls.model.objects.bulk_create(chunk, **kwargs) + total_created += len(created) + + return total_created + + @classmethod + @transaction.atomic + def bulk_update_or_create( + cls, + items: list[dict], + *, + unique_fields: list[str], + update_fields: list[str], + create_defaults: dict | None = None, + ) -> tuple[int, int]: + """ + Upsert: обновить существующие или создать новые. + + Args: + items: Список словарей с данными + unique_fields: Поля для поиска существующих + update_fields: Поля для обновления + create_defaults: Значения по умолчанию для создания + + Returns: + (created_count, updated_count) + """ + created_count = 0 + updated_count = 0 + defaults = create_defaults or {} + + for item in items: + lookup = {field: item[field] for field in unique_fields} + update_data = { + field: item[field] for field in update_fields if field in item + } + + obj, created = cls.model.objects.update_or_create( + **lookup, + defaults={**update_data, **defaults}, + ) + + if created: + created_count += 1 + else: + updated_count += 1 + + return created_count, updated_count + + @classmethod + @transaction.atomic + def bulk_delete( + cls, + ids: list, + *, + hard_delete: bool = True, + ) -> int: + """ + Массовое удаление по списку ID. + + Args: + ids: Список ID для удаления + hard_delete: Физическое удаление (игнорирует SoftDelete) + + Returns: + Количество удалённых записей + """ + queryset = cls.model.objects.filter(pk__in=ids) + + if hard_delete: + # Для SoftDelete моделей используем all_objects + if hasattr(cls.model, "all_objects"): + queryset = cls.model.all_objects.filter(pk__in=ids) + deleted, _ = queryset.delete() + else: + # Мягкое удаление + from django.utils import timezone + + deleted = queryset.update(is_deleted=True, deleted_at=timezone.now()) + + return deleted + + @classmethod + @transaction.atomic + def bulk_update_fields( + cls, + filters: dict, + updates: dict, + ) -> int: + """ + Массовое обновление полей по фильтру. + + Args: + filters: Фильтры для выборки + updates: Поля и значения для обновления + + Returns: + Количество обновлённых записей + + Пример: + ProductService.bulk_update_fields( + filters={'category': 'electronics'}, + updates={'discount': 10, 'is_featured': True} + ) + """ + return cls.model.objects.filter(**filters).update(**updates) + + +class QueryOptimizerMixin: + """ + Миксин для автоматической оптимизации запросов. + + Декларативный подход к select_related/prefetch_related. + + Атрибуты: + select_related: Список полей для select_related + prefetch_related: Список полей для prefetch_related + default_only: Поля для only() (ограничение столбцов) + default_defer: Поля для defer() (исключение столбцов) + + Использование: + class OrderService(QueryOptimizerMixin, BaseService[Order]): + model = Order + select_related = ['user', 'shipping_address'] + prefetch_related = ['items', 'items__product'] + default_defer = ['description', 'internal_notes'] + + # Автоматически применяет оптимизации + orders = OrderService.get_optimized_queryset() + """ + + model: type[models.Model] + select_related: list[str] = [] + prefetch_related: list[str] = [] + default_only: list[str] = [] + default_defer: list[str] = [] + + @classmethod + def get_optimized_queryset(cls) -> QuerySet: + """ + Получить оптимизированный queryset. + + Применяет все объявленные оптимизации. + """ + queryset = cls.model.objects.all() + return cls.apply_optimizations(queryset) + + @classmethod + def apply_optimizations( + cls, + queryset: QuerySet, + *, + include_select: bool = True, + include_prefetch: bool = True, + include_only: bool = True, + include_defer: bool = True, + ) -> QuerySet: + """ + Применить оптимизации к queryset. + + Args: + queryset: Исходный queryset + include_select: Применять select_related + include_prefetch: Применять prefetch_related + include_only: Применять only() + include_defer: Применять defer() + """ + if include_select and cls.select_related: + queryset = queryset.select_related(*cls.select_related) + + if include_prefetch and cls.prefetch_related: + queryset = queryset.prefetch_related(*cls.prefetch_related) + + if include_only and cls.default_only: + queryset = queryset.only(*cls.default_only) + + if include_defer and cls.default_defer: + queryset = queryset.defer(*cls.default_defer) + + return queryset + + @classmethod + def get_list_queryset(cls) -> QuerySet: + """ + Queryset для списков (может исключать тяжёлые поля). + """ + return cls.apply_optimizations( + cls.model.objects.all(), + include_only=True, + include_defer=True, + ) + + @classmethod + def get_detail_queryset(cls) -> QuerySet: + """ + Queryset для детального просмотра (все поля). + """ + return cls.apply_optimizations( + cls.model.objects.all(), + include_only=False, + include_defer=False, + ) + + @classmethod + def with_counts(cls, queryset: QuerySet, *count_fields: str) -> QuerySet: + """ + Добавить аннотации Count. + + Args: + queryset: Исходный queryset + count_fields: Поля для подсчёта + + Пример: + # Добавит items_count и reviews_count + qs = ProductService.with_counts(qs, 'items', 'reviews') + """ + from django.db.models import Count + + annotations = {f"{field}_count": Count(field) for field in count_fields} + return queryset.annotate(**annotations) + + @classmethod + def with_exists(cls, queryset: QuerySet, **subqueries: QuerySet) -> QuerySet: + """ + Добавить аннотации Exists. + + Пример: + from apps.reviews.models import Review + qs = ProductService.with_exists( + qs, + has_reviews=Review.objects.filter(product=OuterRef('pk')) + ) + """ + from django.db.models import Exists + + annotations = {name: Exists(subquery) for name, subquery in subqueries.items()} + return queryset.annotate(**annotations) + + +class BackgroundJobService(BaseReadOnlyService): + """ + Сервис для управления фоновыми задачами. + + Использование: + # Создание задачи + job = BackgroundJobService.create_job( + task_id="abc-123", + task_name="apps.myapp.tasks.process_data", + user_id=request.user.id, + ) + + # Получение статуса + job = BackgroundJobService.get_by_task_id("abc-123") + + # Список задач пользователя + jobs = BackgroundJobService.get_user_jobs(user_id=1) + """ + + # Импорт модели внутри методов для избежания circular import + + @classmethod + def get_model(cls): + """Ленивый импорт модели.""" + from apps.core.models import BackgroundJob + + return BackgroundJob + + @classmethod + def get_queryset(cls): + """Get base queryset.""" + return cls.get_model().objects.all() + + @classmethod + def create_job( + cls, + *, + task_id: str, + task_name: str, + user_id: int | None = None, + meta: dict | None = None, + ): + """ + Создать запись о фоновой задаче. + + Args: + task_id: ID задачи Celery + task_name: Имя задачи + user_id: ID пользователя (опционально) + meta: Дополнительные метаданные + + Returns: + BackgroundJob instance + """ + BackgroundJob = cls.get_model() + return BackgroundJob.objects.create( + task_id=task_id, + task_name=task_name, + user_id=user_id, + meta=meta or {}, + ) + + @classmethod + def get_by_task_id(cls, task_id: str): + """ + Получить задачу по ID Celery. + + Raises: + NotFoundError: Если задача не найдена + """ + BackgroundJob = cls.get_model() + try: + return BackgroundJob.objects.get(task_id=task_id) + except BackgroundJob.DoesNotExist as e: + raise NotFoundError( + message=f"Job with task_id={task_id} not found", + code="job_not_found", + ) from e + + @classmethod + def get_by_task_id_or_none(cls, task_id: str): + """Получить задачу по ID или None.""" + BackgroundJob = cls.get_model() + try: + return BackgroundJob.objects.get(task_id=task_id) + except BackgroundJob.DoesNotExist: + return None + + @classmethod + def get_user_jobs( + cls, + user_id: int, + *, + status: str | None = None, + limit: int = 50, + ): + """ + Получить задачи пользователя. + + Args: + user_id: ID пользователя + status: Фильтр по статусу (опционально) + limit: Максимальное количество записей + + Returns: + QuerySet задач + """ + qs = cls.get_queryset().filter(user_id=user_id) + if status: + qs = qs.filter(status=status) + return qs[:limit] + + @classmethod + def get_active_jobs(cls, user_id: int | None = None): + """ + Получить активные (незавершённые) задачи. + + Args: + user_id: Фильтр по пользователю (опционально) + """ + from apps.core.models import JobStatus + + qs = cls.get_queryset().filter( + status__in=[JobStatus.PENDING, JobStatus.STARTED, JobStatus.RETRY] + ) + if user_id: + qs = qs.filter(user_id=user_id) + return qs + + @classmethod + def cleanup_old_jobs(cls, *, days: int = 30) -> int: + """ + Удалить старые завершённые задачи. + + Args: + days: Количество дней (задачи старше будут удалены) + + Returns: + Количество удалённых записей + """ + from datetime import timedelta + + from apps.core.models import JobStatus + from django.utils import timezone + + cutoff = timezone.now() - timedelta(days=days) + deleted, _ = ( + cls.get_queryset() + .filter( + status__in=[JobStatus.SUCCESS, JobStatus.FAILURE, JobStatus.REVOKED], + completed_at__lt=cutoff, + ) + .delete() + ) + return deleted diff --git a/src/apps/core/signals.py b/src/apps/core/signals.py new file mode 100644 index 0000000..5d831d4 --- /dev/null +++ b/src/apps/core/signals.py @@ -0,0 +1,295 @@ +""" +Централизованная система управления сигналами Django. + +Предоставляет удобный способ регистрации и управления сигналами +в одном месте для лучшей организации кода. +""" + +import logging +from collections.abc import Callable +from typing import Any + +from django.db.models.signals import ( + post_delete, + post_save, + pre_delete, + pre_save, +) +from django.dispatch import Signal + +logger = logging.getLogger(__name__) + + +class SignalDispatcher: + """ + Диспетчер сигналов для централизованной регистрации. + + Позволяет регистрировать все обработчики сигналов в одном месте, + что упрощает отладку и понимание потока данных. + + Пример использования в apps.py: + from apps.core.signals import signal_dispatcher + + class UserConfig(AppConfig): + def ready(self): + from apps.user.signals import register_signals + register_signals(signal_dispatcher) + + Пример в signals.py приложения: + def register_signals(dispatcher): + dispatcher.register( + signal=post_save, + sender='user.User', + handler=create_user_profile, + description="Создаёт профиль при создании пользователя", + ) + """ + + def __init__(self) -> None: + self._handlers: list[dict[str, Any]] = [] + self._connected = False + + def register( + self, + signal: Signal, + sender: str | type, + handler: Callable[..., Any], + description: str = "", + dispatch_uid: str | None = None, + ) -> None: + """ + Регистрирует обработчик сигнала. + + Args: + signal: Django signal (post_save, pre_delete, etc.) + sender: Модель-отправитель (строка 'app.Model' или класс) + handler: Функция-обработчик + description: Описание для документации + dispatch_uid: Уникальный ID для предотвращения дублирования + """ + self._handlers.append( + { + "signal": signal, + "sender": sender, + "handler": handler, + "description": description, + "dispatch_uid": dispatch_uid + or f"{handler.__module__}.{handler.__name__}", + } + ) + + logger.debug( + f"Зарегистрирован обработчик сигнала: {handler.__name__} " + f"для {sender} ({description})" + ) + + def connect_all(self) -> None: + """Подключает все зарегистрированные обработчики.""" + if self._connected: + logger.warning("Сигналы уже подключены") + return + + for handler_info in self._handlers: + sender = handler_info["sender"] + + # Если sender - строка, получаем модель + if isinstance(sender, str): + from django.apps import apps + + app_label, model_name = sender.split(".") + sender = apps.get_model(app_label, model_name) + + handler_info["signal"].connect( + handler_info["handler"], + sender=sender, + dispatch_uid=handler_info["dispatch_uid"], + ) + + logger.info( + f"Подключен обработчик: {handler_info['handler'].__name__} -> {sender}" + ) + + self._connected = True + + def disconnect_all(self) -> None: + """Отключает все обработчики (полезно для тестов).""" + for handler_info in self._handlers: + sender = handler_info["sender"] + + if isinstance(sender, str): + from django.apps import apps + + app_label, model_name = sender.split(".") + sender = apps.get_model(app_label, model_name) + + handler_info["signal"].disconnect( + handler_info["handler"], + sender=sender, + dispatch_uid=handler_info["dispatch_uid"], + ) + + self._connected = False + logger.info("Все обработчики сигналов отключены") + + def list_handlers(self) -> list[dict[str, Any]]: + """Возвращает список всех зарегистрированных обработчиков.""" + return [ + { + "signal": h["signal"].__class__.__name__, + "sender": str(h["sender"]), + "handler": f"{h['handler'].__module__}.{h['handler'].__name__}", + "description": h["description"], + } + for h in self._handlers + ] + + +# Глобальный экземпляр диспетчера +signal_dispatcher = SignalDispatcher() + + +# Декораторы для удобной регистрации +def on_post_save( + sender: str | type, + description: str = "", + dispatch_uid: str | None = None, +) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + """ + Декоратор для регистрации обработчика post_save. + + Пример использования: + @on_post_save('user.User', description="Создаёт профиль") + def create_profile(sender, instance, created, **kwargs): + if created: + Profile.objects.create(user=instance) + """ + + def decorator(handler: Callable[..., Any]) -> Callable[..., Any]: + signal_dispatcher.register( + signal=post_save, + sender=sender, + handler=handler, + description=description, + dispatch_uid=dispatch_uid, + ) + return handler + + return decorator + + +def on_pre_save( + sender: str | type, + description: str = "", + dispatch_uid: str | None = None, +) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + """ + Декоратор для регистрации обработчика pre_save. + + Пример использования: + @on_pre_save('blog.Article', description="Генерирует slug") + def generate_slug(sender, instance, **kwargs): + if not instance.slug: + instance.slug = slugify(instance.title) + """ + + def decorator(handler: Callable[..., Any]) -> Callable[..., Any]: + signal_dispatcher.register( + signal=pre_save, + sender=sender, + handler=handler, + description=description, + dispatch_uid=dispatch_uid, + ) + return handler + + return decorator + + +def on_post_delete( + sender: str | type, + description: str = "", + dispatch_uid: str | None = None, +) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + """ + Декоратор для регистрации обработчика post_delete. + + Пример использования: + @on_post_delete('user.User', description="Удаляет связанные файлы") + def cleanup_user_files(sender, instance, **kwargs): + instance.avatar.delete(save=False) + """ + + def decorator(handler: Callable[..., Any]) -> Callable[..., Any]: + signal_dispatcher.register( + signal=post_delete, + sender=sender, + handler=handler, + description=description, + dispatch_uid=dispatch_uid, + ) + return handler + + return decorator + + +def on_pre_delete( + sender: str | type, + description: str = "", + dispatch_uid: str | None = None, +) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + """ + Декоратор для регистрации обработчика pre_delete. + + Пример использования: + @on_pre_delete('blog.Article', description="Архивирует перед удалением") + def archive_before_delete(sender, instance, **kwargs): + ArchivedArticle.objects.create_from_article(instance) + """ + + def decorator(handler: Callable[..., Any]) -> Callable[..., Any]: + signal_dispatcher.register( + signal=pre_delete, + sender=sender, + handler=handler, + description=description, + dispatch_uid=dispatch_uid, + ) + return handler + + return decorator + + +# Пользовательские сигналы для бизнес-событий +user_registered = Signal() # Отправляется при регистрации пользователя +user_verified = Signal() # Отправляется при верификации email +password_changed = Signal() # Отправляется при смене пароля + + +def emit_user_registered(user: Any) -> None: + """ + Отправляет сигнал о регистрации пользователя. + + Пример использования: + from apps.core.signals import emit_user_registered + + class UserService: + @classmethod + def register_user(cls, **data): + user = User.objects.create_user(**data) + emit_user_registered(user) + return user + """ + user_registered.send(sender=user.__class__, user=user) + logger.info(f"Отправлен сигнал user_registered для user_id={user.id}") + + +def emit_user_verified(user: Any) -> None: + """Отправляет сигнал о верификации email пользователя.""" + user_verified.send(sender=user.__class__, user=user) + logger.info(f"Отправлен сигнал user_verified для user_id={user.id}") + + +def emit_password_changed(user: Any) -> None: + """Отправляет сигнал о смене пароля.""" + password_changed.send(sender=user.__class__, user=user) + logger.info(f"Отправлен сигнал password_changed для user_id={user.id}") diff --git a/src/apps/core/tasks.py b/src/apps/core/tasks.py new file mode 100644 index 0000000..2a43479 --- /dev/null +++ b/src/apps/core/tasks.py @@ -0,0 +1,269 @@ +""" +Базовые классы для Celery задач. + +Предоставляет переиспользуемые базовые классы с логированием, +обработкой ошибок и retry логикой. +""" + +import logging +import time +from typing import Any + +from celery import Task +from django.db import transaction + +logger = logging.getLogger(__name__) + + +class BaseTask(Task): + """ + Базовый класс для всех Celery задач. + + Особенности: + - Автоматическое логирование начала/завершения + - Измерение времени выполнения + - Обработка ошибок с retry + - Логирование исключений + + Пример использования: + from config.celery import app + + @app.task(base=BaseTask, bind=True) + def my_task(self, arg1, arg2): + # Логика задачи + return result + """ + + # Настройки retry по умолчанию + autoretry_for = (Exception,) + retry_backoff = True + retry_backoff_max = 600 # Максимум 10 минут между retry + retry_jitter = True + max_retries = 3 + + # Не пересоздавать задачу при перезапуске worker'а + acks_late = True + reject_on_worker_lost = True + + def before_start( + self, + task_id: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + ) -> None: + """Вызывается перед началом выполнения задачи.""" + logger.info( + f"Задача {self.name}[{task_id}] запущена", + extra={ + "task_id": task_id, + "task_name": self.name, + "args": str(args)[:200], + "kwargs": str(kwargs)[:200], + }, + ) + + def on_success( + self, + retval: Any, + task_id: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + ) -> None: + """Вызывается при успешном завершении задачи.""" + logger.info( + f"Задача {self.name}[{task_id}] завершена успешно", + extra={ + "task_id": task_id, + "task_name": self.name, + "result": str(retval)[:200] if retval else None, + }, + ) + + def on_failure( + self, + exc: Exception, + task_id: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + einfo: Any, + ) -> None: + """Вызывается при ошибке выполнения задачи.""" + logger.error( + f"Задача {self.name}[{task_id}] завершена с ошибкой: {exc}", + extra={ + "task_id": task_id, + "task_name": self.name, + "exception": str(exc), + "args": str(args)[:200], + "kwargs": str(kwargs)[:200], + }, + exc_info=True, + ) + + def on_retry( + self, + exc: Exception, + task_id: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + einfo: Any, + ) -> None: + """Вызывается при повторной попытке выполнения.""" + logger.warning( + f"Задача {self.name}[{task_id}] будет повторена: {exc}", + extra={ + "task_id": task_id, + "task_name": self.name, + "exception": str(exc), + "retry_count": self.request.retries, + }, + ) + + +class TransactionalTask(BaseTask): + """ + Задача с поддержкой транзакций. + + Выполняет задачу в рамках database transaction. + При ошибке транзакция откатывается. + + Пример использования: + @app.task(base=TransactionalTask, bind=True) + def update_user_stats(self, user_id): + # Все операции в одной транзакции + user = User.objects.get(id=user_id) + user.stats.update() + user.save() + """ + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Выполняет задачу в транзакции.""" + with transaction.atomic(): + return super().__call__(*args, **kwargs) + + +class IdempotentTask(BaseTask): + """ + Идемпотентная задача. + + Гарантирует, что задача с одинаковыми аргументами + не будет выполнена повторно в течение lock_timeout. + + Пример использования: + @app.task(base=IdempotentTask, bind=True) + def send_notification(self, user_id, message): + # Не отправит дважды одно уведомление + send_email(user_id, message) + """ + + lock_timeout = 3600 # 1 час по умолчанию + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Выполняет задачу с проверкой идемпотентности.""" + from django.core.cache import cache + + # Формируем ключ блокировки + lock_key = ( + f"task_lock:{self.name}:{hash((args, tuple(sorted(kwargs.items()))))}" + ) + + # Пробуем получить блокировку + if not cache.add(lock_key, True, self.lock_timeout): + logger.info( + f"Задача {self.name} пропущена (идемпотентность)", + extra={"lock_key": lock_key}, + ) + return None + + try: + return super().__call__(*args, **kwargs) + finally: + # Не удаляем блокировку - она истечёт по таймауту + pass + + +class TimedTask(BaseTask): + """ + Задача с измерением времени выполнения. + + Логирует время выполнения и предупреждает о медленных задачах. + + Атрибуты: + slow_threshold: Порог для предупреждения (в секундах) + + Пример использования: + @app.task(base=TimedTask, bind=True) + def process_data(self): + # Время выполнения будет залогировано + process_heavy_data() + """ + + slow_threshold = 60 # 1 минута по умолчанию + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Выполняет задачу с измерением времени.""" + start_time = time.time() + try: + return super().__call__(*args, **kwargs) + finally: + elapsed = time.time() - start_time + log_extra = { + "task_id": self.request.id, + "task_name": self.name, + "elapsed_seconds": round(elapsed, 2), + } + + if elapsed > self.slow_threshold: + logger.warning( + f"Задача {self.name} выполнялась {elapsed:.2f}с " + f"(порог: {self.slow_threshold}с)", + extra=log_extra, + ) + else: + logger.debug( + f"Задача {self.name} выполнена за {elapsed:.2f}с", + extra=log_extra, + ) + + +class PeriodicTask(TimedTask): + """ + Базовый класс для периодических задач. + + Объединяет TimedTask с дополнительной логикой + для периодических задач (beat). + + Пример использования: + @app.task(base=PeriodicTask, bind=True) + def cleanup_old_data(self): + # Периодическая очистка + OldData.objects.filter(created_at__lt=threshold).delete() + + # В beat_schedule: + app.conf.beat_schedule = { + 'cleanup-every-day': { + 'task': 'tasks.cleanup_old_data', + 'schedule': crontab(hour=3, minute=0), + }, + } + """ + + # Периодические задачи обычно не требуют retry + max_retries = 1 + autoretry_for = () + + def before_start( + self, + task_id: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + ) -> None: + """Логирует запуск периодической задачи.""" + logger.info( + f"Периодическая задача {self.name}[{task_id}] запущена", + extra={ + "task_id": task_id, + "task_name": self.name, + "periodic": True, + }, + ) diff --git a/src/apps/core/urls.py b/src/apps/core/urls.py new file mode 100644 index 0000000..382194e --- /dev/null +++ b/src/apps/core/urls.py @@ -0,0 +1,14 @@ +""" +URL configuration for core app. +""" + +from apps.core.views import HealthCheckView, LivenessView, ReadinessView +from django.urls import path + +app_name = "core" + +urlpatterns = [ + path("", HealthCheckView.as_view(), name="health"), + path("live/", LivenessView.as_view(), name="liveness"), + path("ready/", ReadinessView.as_view(), name="readiness"), +] diff --git a/src/apps/core/views.py b/src/apps/core/views.py new file mode 100644 index 0000000..056376c --- /dev/null +++ b/src/apps/core/views.py @@ -0,0 +1,249 @@ +""" +Health check views for monitoring and orchestration. + +Provides endpoints for: +- Basic liveness check (is the app running?) +- Readiness check (is the app ready to serve traffic?) +- Detailed health check (DB, Redis, Celery status) +""" + +import logging +import time +from typing import Any + +from django.conf import settings +from django.db import connection +from rest_framework import status +from rest_framework.permissions import AllowAny +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.views import APIView + +logger = logging.getLogger(__name__) + + +class HealthCheckView(APIView): + """ + Comprehensive health check endpoint. + + GET /api/health/ + Returns detailed status of all dependencies. + + Response: + { + "status": "healthy" | "degraded" | "unhealthy", + "version": "1.0.0", + "checks": { + "database": {"status": "up", "latency_ms": 5}, + "redis": {"status": "up", "latency_ms": 2}, + "celery": {"status": "up"} + } + } + """ + + permission_classes = [AllowAny] + authentication_classes = [] # No auth required + + def get(self, request: Request) -> Response: + """Run all health checks and return status.""" + checks = {} + overall_status = "healthy" + + # Database check + db_check = self._check_database() + checks["database"] = db_check + if db_check["status"] != "up": + overall_status = "unhealthy" + + # Redis check + redis_check = self._check_redis() + checks["redis"] = redis_check + if redis_check["status"] != "up" and overall_status == "healthy": + overall_status = "degraded" + + # Celery check (optional, may be slow) + if request.query_params.get("include_celery", "").lower() == "true": + celery_check = self._check_celery() + checks["celery"] = celery_check + if celery_check["status"] != "up" and overall_status == "healthy": + overall_status = "degraded" + + response_data = { + "status": overall_status, + "version": getattr(settings, "APP_VERSION", "1.0.0"), + "checks": checks, + } + + # 503 only for unhealthy (critical services down) + # 200 for healthy and degraded (non-critical services down) + status_code = ( + status.HTTP_503_SERVICE_UNAVAILABLE + if overall_status == "unhealthy" + else status.HTTP_200_OK + ) + + return Response(response_data, status=status_code) + + def _check_database(self) -> dict[str, Any]: + """Check database connectivity.""" + start = time.time() + try: + with connection.cursor() as cursor: + cursor.execute("SELECT 1") + cursor.fetchone() + latency = (time.time() - start) * 1000 + return {"status": "up", "latency_ms": round(latency, 2)} + except Exception as e: + logger.error(f"Database health check failed: {e}") + return {"status": "down", "error": str(e)} + + def _check_redis(self) -> dict[str, Any]: + """Check Redis connectivity.""" + start = time.time() + try: + from django_redis import get_redis_connection + + redis_conn = get_redis_connection("default") + redis_conn.ping() + latency = (time.time() - start) * 1000 + return {"status": "up", "latency_ms": round(latency, 2)} + except ImportError: + return {"status": "skipped", "reason": "django_redis not installed"} + except Exception as e: + logger.warning(f"Redis health check failed: {e}") + return {"status": "down", "error": str(e)} + + def _check_celery(self) -> dict[str, Any]: + """Check Celery worker availability.""" + try: + from config.celery import app as celery_app + + inspector = celery_app.control.inspect(timeout=2.0) + active = inspector.active() + if active: + worker_count = len(active) + return {"status": "up", "workers": worker_count} + return {"status": "down", "error": "No active workers"} + except Exception as e: + logger.warning(f"Celery health check failed: {e}") + return {"status": "down", "error": str(e)} + + +class LivenessView(APIView): + """ + Kubernetes liveness probe endpoint. + + GET /api/health/live/ + Returns 200 if the application is running. + """ + + permission_classes = [AllowAny] + authentication_classes = [] + + def get(self, request: Request) -> Response: + """Simple liveness check.""" + return Response({"status": "alive"}, status=status.HTTP_200_OK) + + +class ReadinessView(APIView): + """ + Kubernetes readiness probe endpoint. + + GET /api/health/ready/ + Returns 200 if the application is ready to serve traffic. + """ + + permission_classes = [AllowAny] + authentication_classes = [] + + def get(self, request: Request) -> Response: + """Check if app is ready to serve traffic.""" + # Check database connection + try: + with connection.cursor() as cursor: + cursor.execute("SELECT 1") + cursor.fetchone() + except Exception as e: + logger.error(f"Readiness check failed - database: {e}") + return Response( + {"status": "not_ready", "reason": "database unavailable"}, + status=status.HTTP_503_SERVICE_UNAVAILABLE, + ) + + return Response({"status": "ready"}, status=status.HTTP_200_OK) + + +class BackgroundJobStatusView(APIView): + """ + Получение статуса фоновой задачи. + + GET /api/v1/jobs/{task_id}/ + Возвращает статус, прогресс и результат задачи. + + Response: + { + "id": "uuid", + "task_id": "celery-task-id", + "status": "pending|started|success|failure|revoked", + "progress": 75, + "progress_message": "Обработка данных...", + "result": {...}, + "error": "", + "is_finished": false + } + """ + + from rest_framework.permissions import IsAuthenticated + + permission_classes = [IsAuthenticated] + + def get(self, request: Request, task_id: str) -> Response: + """Получить статус задачи по task_id.""" + from apps.core.serializers import BackgroundJobSerializer + from apps.core.services import BackgroundJobService + + job = BackgroundJobService.get_by_task_id(task_id) + + # Проверка доступа: только владелец или админ + if job.user_id and job.user_id != request.user.id and not request.user.is_staff: + return Response( + {"detail": "Нет доступа к этой задаче"}, + status=status.HTTP_403_FORBIDDEN, + ) + + serializer = BackgroundJobSerializer(job) + return Response(serializer.data) + + +class BackgroundJobListView(APIView): + """ + Список фоновых задач пользователя. + + GET /api/v1/jobs/ + Возвращает список задач текущего пользователя. + + Query params: + status: Фильтр по статусу (pending, started, success, failure) + limit: Количество записей (по умолчанию 50) + """ + + from rest_framework.permissions import IsAuthenticated + + permission_classes = [IsAuthenticated] + + def get(self, request: Request) -> Response: + """Получить список задач пользователя.""" + from apps.core.serializers import BackgroundJobListSerializer + from apps.core.services import BackgroundJobService + + status_filter = request.query_params.get("status") + limit = min(int(request.query_params.get("limit", 50)), 100) + + jobs = BackgroundJobService.get_user_jobs( + user_id=request.user.id, + status=status_filter, + limit=limit, + ) + + serializer = BackgroundJobListSerializer(jobs, many=True) + return Response(serializer.data) diff --git a/src/apps/core/viewsets.py b/src/apps/core/viewsets.py new file mode 100644 index 0000000..99d17c1 --- /dev/null +++ b/src/apps/core/viewsets.py @@ -0,0 +1,468 @@ +""" +Базовые ViewSet классы для API. + +Предоставляет переиспользуемые ViewSet с общей логикой. +""" + +import logging +from typing import Any, Generic, TypeVar + +from apps.core.pagination import StandardPagination +from apps.core.response import api_error_response, api_response +from django.db.models import Model, QuerySet +from django_filters import rest_framework as filters +from rest_framework import status, viewsets +from rest_framework.filters import OrderingFilter, SearchFilter +from rest_framework.permissions import IsAuthenticated +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.serializers import Serializer + +logger = logging.getLogger(__name__) + +M = TypeVar("M", bound=Model) + + +class BaseViewSet(viewsets.ModelViewSet, Generic[M]): + """ + Базовый ViewSet с общей логикой для CRUD операций. + + Особенности: + - Унифицированный формат ответов + - Стандартная пагинация + - Фильтрация, поиск, сортировка + - Логирование операций + - Автоматическая оптимизация запросов + - Обработка ошибок + + Пример использования: + class ArticleViewSet(BaseViewSet[Article]): + queryset = Article.objects.all() + serializer_class = ArticleSerializer + filterset_class = ArticleFilter + search_fields = ['title', 'content'] + ordering_fields = ['created_at', 'title'] + + # Оптимизация запросов (декларативно) + select_related_fields = ['author', 'category'] + prefetch_related_fields = ['tags', 'comments'] + defer_fields = ['full_text'] # Тяжёлые поля + """ + + pagination_class = StandardPagination + permission_classes = [IsAuthenticated] + filter_backends = [ + filters.DjangoFilterBackend, + SearchFilter, + OrderingFilter, + ] + ordering = ["-created_at"] + + # Оптимизация запросов (декларативный подход) + select_related_fields: list[str] = [] + prefetch_related_fields: list[str] = [] + only_fields: list[str] = [] # Только эти поля (list) + defer_fields: list[str] = [] # Исключить эти поля (detail) + + # Можно переопределить для разных action'ов + serializer_classes: dict[str, type[Serializer[Any]]] = {} + + def get_serializer_class(self) -> type[Serializer[Any]]: + """Возвращает serializer в зависимости от action.""" + if self.action in self.serializer_classes: + return self.serializer_classes[self.action] + return super().get_serializer_class() + + def get_queryset(self) -> QuerySet[M]: + """Возвращает базовый queryset с оптимизациями.""" + queryset = super().get_queryset() + + # Декларативные оптимизации (новый стиль) + if self.select_related_fields: + queryset = queryset.select_related(*self.select_related_fields) + + if self.prefetch_related_fields: + queryset = queryset.prefetch_related(*self.prefetch_related_fields) + + # only/defer только для list + if self.action == "list": + if self.only_fields: + queryset = queryset.only(*self.only_fields) + elif self.defer_fields: + queryset = queryset.defer(*self.defer_fields) + + # Старый стиль (обратная совместимость) + if ( + hasattr(self, "select_related_fields") + and not self.select_related_fields + and hasattr(self, "_select_related") + ): + queryset = queryset.select_related(*self._select_related) + + if ( + hasattr(self, "prefetch_related_fields") + and not self.prefetch_related_fields + and hasattr(self, "_prefetch_related") + ): + queryset = queryset.prefetch_related(*self._prefetch_related) + + return queryset + + def list(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Получение списка объектов.""" + queryset = self.filter_queryset(self.get_queryset()) + + page = self.paginate_queryset(queryset) + if page is not None: + serializer = self.get_serializer(page, many=True) + return self.get_paginated_response(serializer.data) + + serializer = self.get_serializer(queryset, many=True) + return api_response(serializer.data) + + def retrieve(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Получение одного объекта.""" + instance = self.get_object() + serializer = self.get_serializer(instance) + return api_response(serializer.data) + + def create(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Создание объекта.""" + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + self.perform_create(serializer) + + logger.info( + f"Created {self.get_queryset().model.__name__}", + extra={ + "model": self.get_queryset().model.__name__, + "user_id": request.user.id if request.user.is_authenticated else None, + }, + ) + + return api_response( + serializer.data, + status_code=status.HTTP_201_CREATED, + ) + + def perform_create(self, serializer: Serializer[Any]) -> M: + """Выполняет создание объекта. Можно переопределить для добавления логики.""" + return serializer.save() + + def update(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Полное обновление объекта.""" + partial = kwargs.pop("partial", False) + instance = self.get_object() + serializer = self.get_serializer(instance, data=request.data, partial=partial) + serializer.is_valid(raise_exception=True) + self.perform_update(serializer) + + logger.info( + f"Updated {self.get_queryset().model.__name__}", + extra={ + "model": self.get_queryset().model.__name__, + "instance_id": instance.pk, + "user_id": request.user.id if request.user.is_authenticated else None, + }, + ) + + return api_response(serializer.data) + + def perform_update(self, serializer: Serializer[Any]) -> M: + """Выполняет обновление объекта.""" + return serializer.save() + + def destroy(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Удаление объекта.""" + instance = self.get_object() + instance_id = instance.pk + + self.perform_destroy(instance) + + logger.info( + f"Deleted {self.get_queryset().model.__name__}", + extra={ + "model": self.get_queryset().model.__name__, + "instance_id": instance_id, + "user_id": request.user.id if request.user.is_authenticated else None, + }, + ) + + return Response(status=status.HTTP_204_NO_CONTENT) + + def perform_destroy(self, instance: M) -> None: + """Выполняет удаление объекта.""" + instance.delete() + + +class ReadOnlyViewSet(viewsets.ReadOnlyModelViewSet, Generic[M]): + """ + ViewSet только для чтения. + + Предоставляет только list и retrieve действия. + + Пример использования: + class PublicArticleViewSet(ReadOnlyViewSet[Article]): + queryset = Article.objects.filter(is_published=True) + serializer_class = ArticleSerializer + permission_classes = [AllowAny] + """ + + pagination_class = StandardPagination + filter_backends = [ + filters.DjangoFilterBackend, + SearchFilter, + OrderingFilter, + ] + ordering = ["-created_at"] + + def list(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Получение списка объектов.""" + queryset = self.filter_queryset(self.get_queryset()) + + page = self.paginate_queryset(queryset) + if page is not None: + serializer = self.get_serializer(page, many=True) + return self.get_paginated_response(serializer.data) + + serializer = self.get_serializer(queryset, many=True) + return api_response(serializer.data) + + def retrieve(self, request: Request, *args: Any, **kwargs: Any) -> Response: + """Получение одного объекта.""" + instance = self.get_object() + serializer = self.get_serializer(instance) + return api_response(serializer.data) + + +class OwnerViewSet(BaseViewSet[M]): + """ + ViewSet с фильтрацией по владельцу. + + Автоматически фильтрует queryset по текущему пользователю + и устанавливает владельца при создании. + + Атрибуты: + owner_field: Имя поля владельца (по умолчанию 'user') + + Пример использования: + class MyTaskViewSet(OwnerViewSet[Task]): + queryset = Task.objects.all() + serializer_class = TaskSerializer + owner_field = 'owner' # или 'user' + """ + + owner_field = "user" + + def get_queryset(self) -> QuerySet[M]: + """Фильтрует queryset по текущему пользователю.""" + queryset = super().get_queryset() + + if self.request.user.is_authenticated: + filter_kwargs = {self.owner_field: self.request.user} + queryset = queryset.filter(**filter_kwargs) + + return queryset + + def perform_create(self, serializer: Serializer[Any]) -> M: + """Устанавливает владельца при создании.""" + return serializer.save(**{self.owner_field: self.request.user}) + + +class BulkMixin: + """ + Миксин для массовых операций в ViewSet. + + Добавляет возможность создания/обновления/удаления + нескольких объектов за один запрос. + + Пример использования: + class ArticleViewSet(BulkMixin, BaseViewSet[Article]): + ... + + # POST /articles/bulk_create/ + # {"items": [{"title": "A"}, {"title": "B"}]} + + # PATCH /articles/bulk_update/ + # {"items": [{"id": 1, "title": "A updated"}, {"id": 2, "title": "B updated"}]} + + # DELETE /articles/bulk_delete/ + # {"ids": [1, 2, 3]} + + Для использования добавьте в urls.py: + from rest_framework.decorators import action + + class MyViewSet(BulkMixin, BaseViewSet[MyModel]): + @action(detail=False, methods=['post']) + def bulk_create(self, request): + return super().bulk_create(request) + + @action(detail=False, methods=['patch']) + def bulk_update(self, request): + return super().bulk_update(request) + + @action(detail=False, methods=['delete']) + def bulk_delete(self, request): + return super().bulk_delete(request) + """ + + # Максимальное количество объектов для bulk операций + bulk_max_items: int = 100 + + def bulk_create(self, request: Request) -> Response: + """ + Массовое создание объектов. + + Ожидает: {"items": [{...}, {...}]} + """ + items = request.data.get("items", []) + + if not items: + return api_error_response( + [{"code": "invalid_data", "message": "Список items пуст"}], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if len(items) > self.bulk_max_items: + return api_error_response( + [ + { + "code": "too_many_items", + "message": f"Максимум {self.bulk_max_items} объектов за запрос", + } + ], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + serializer = self.get_serializer(data=items, many=True) # type: ignore + serializer.is_valid(raise_exception=True) + instances = serializer.save() + + logger.info( + f"Bulk created {len(instances)} {self.get_queryset().model.__name__}", + extra={ + "model": self.get_queryset().model.__name__, + "count": len(instances), + "user_id": request.user.id if request.user.is_authenticated else None, + }, + ) + + return api_response( + self.get_serializer(instances, many=True).data, # type: ignore + status_code=status.HTTP_201_CREATED, + ) + + def bulk_update(self, request: Request) -> Response: + """ + Массовое обновление объектов. + + Ожидает: {"items": [{"id": 1, "field": "value"}, ...]} + """ + items = request.data.get("items", []) + + if not items: + return api_error_response( + [{"code": "invalid_data", "message": "Список items пуст"}], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if len(items) > self.bulk_max_items: + return api_error_response( + [ + { + "code": "too_many_items", + "message": f"Максимум {self.bulk_max_items} объектов за запрос", + } + ], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + # Собираем ID и получаем объекты + ids = [item.get("id") for item in items if item.get("id")] + if not ids: + return api_error_response( + [ + { + "code": "missing_ids", + "message": "Все элементы должны содержать id", + } + ], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + instances_dict = {obj.pk: obj for obj in self.get_queryset().filter(pk__in=ids)} # type: ignore + + updated_instances = [] + errors = [] + + for item in items: + item_id = item.get("id") + if item_id not in instances_dict: + errors.append({"id": item_id, "error": "Объект не найден"}) + continue + + instance = instances_dict[item_id] + serializer = self.get_serializer(instance, data=item, partial=True) # type: ignore + + if serializer.is_valid(): + serializer.save() + updated_instances.append(serializer.data) + else: + errors.append({"id": item_id, "errors": serializer.errors}) + + logger.info( + f"Bulk updated {len(updated_instances)} {self.get_queryset().model.__name__}", + extra={ + "model": self.get_queryset().model.__name__, + "count": len(updated_instances), + "errors": len(errors), + "user_id": request.user.id if request.user.is_authenticated else None, + }, + ) + + return api_response( + { + "updated": updated_instances, + "errors": errors, + } + ) + + def bulk_delete(self, request: Request) -> Response: + """ + Массовое удаление объектов. + + Ожидает: {"ids": [1, 2, 3]} + """ + ids = request.data.get("ids", []) + + if not ids: + return api_error_response( + [{"code": "invalid_data", "message": "Список ids пуст"}], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + if len(ids) > self.bulk_max_items: + return api_error_response( + [ + { + "code": "too_many_items", + "message": f"Максимум {self.bulk_max_items} объектов за запрос", + } + ], + status_code=status.HTTP_400_BAD_REQUEST, + ) + + queryset = self.get_queryset().filter(pk__in=ids) # type: ignore + count = queryset.count() + queryset.delete() + + logger.info( + f"Bulk deleted {count} {self.get_queryset().model.__name__}", + extra={ + "model": self.get_queryset().model.__name__, + "count": count, + "user_id": request.user.id if request.user.is_authenticated else None, + }, + ) + + return api_response({"deleted": count}) diff --git a/src/apps/user/services.py b/src/apps/user/services.py index b6b0a95..3398035 100644 --- a/src/apps/user/services.py +++ b/src/apps/user/services.py @@ -1,5 +1,6 @@ -from typing import Any, Dict, Optional +from typing import Any +from apps.core.exceptions import NotFoundError from django.contrib.auth import get_user_model from django.db import transaction from rest_framework_simplejwt.tokens import RefreshToken @@ -38,23 +39,53 @@ class UserService: return user @classmethod - def get_user_by_email(cls, email: str) -> Optional[User]: - """Получает пользователя по email""" + def get_user_by_email(cls, email: str) -> User: + """Получает пользователя по email + + Raises: + NotFoundError: Если пользователь не найден + """ + try: + return User.objects.get(email=email) + except User.DoesNotExist as e: + raise NotFoundError( + message=f"User with email={email} not found", + details={"email": email}, + ) from e + + @classmethod + def get_user_by_email_or_none(cls, email: str) -> User | None: + """Получает пользователя по email или None""" try: return User.objects.get(email=email) except User.DoesNotExist: return None @classmethod - def get_user_by_id(cls, user_id: int) -> Optional[User]: - """Получает пользователя по ID""" + def get_user_by_id(cls, user_id: int) -> User: + """Получает пользователя по ID + + Raises: + NotFoundError: Если пользователь не найден + """ + try: + return User.objects.get(id=user_id) + except User.DoesNotExist as e: + raise NotFoundError( + message=f"User with id={user_id} not found", + details={"user_id": user_id}, + ) from e + + @classmethod + def get_user_by_id_or_none(cls, user_id: int) -> User | None: + """Получает пользователя по ID или None""" try: return User.objects.get(id=user_id) except User.DoesNotExist: return None @classmethod - def update_user(cls, user_id: int, **fields) -> Optional[User]: + def update_user(cls, user_id: int, **fields) -> User: """ Обновляет данные пользователя @@ -63,11 +94,12 @@ class UserService: **fields: Поля для обновления Returns: - User: Обновленный пользователь или None + User: Обновленный пользователь + + Raises: + NotFoundError: Если пользователь не найден """ user = cls.get_user_by_id(user_id) - if not user: - return None for field, value in fields.items(): setattr(user, field, value) @@ -76,24 +108,21 @@ class UserService: return user @classmethod - def delete_user(cls, user_id: int) -> bool: + def delete_user(cls, user_id: int) -> None: """ Удаляет пользователя Args: user_id: ID пользователя - Returns: - bool: True если успешно удален + Raises: + NotFoundError: Если пользователь не найден """ user = cls.get_user_by_id(user_id) - if user: - user.delete() - return True - return False + user.delete() @classmethod - def get_tokens_for_user(cls, user: User) -> Dict[str, str]: + def get_tokens_for_user(cls, user: User) -> dict[str, str]: """ Генерирует JWT токены для пользователя @@ -110,7 +139,7 @@ class UserService: } @classmethod - def verify_email(cls, user_id: int) -> bool: + def verify_email(cls, user_id: int) -> User: """ Подтверждает email пользователя @@ -118,29 +147,45 @@ class UserService: user_id: ID пользователя Returns: - bool: True если успешно подтвержден + User: Обновленный пользователь + + Raises: + NotFoundError: Если пользователь не найден """ user = cls.get_user_by_id(user_id) - if user: - user.is_verified = True - user.save() - return True - return False + user.is_verified = True + user.save() + return user class ProfileService: """Сервисный слой для работы с профилями""" @classmethod - def get_profile_by_user_id(cls, user_id: int) -> Optional[Profile]: - """Получает профиль по ID пользователя""" + def get_profile_by_user_id(cls, user_id: int) -> Profile: + """Получает профиль по ID пользователя + + Raises: + NotFoundError: Если профиль не найден + """ + try: + return Profile.objects.select_related("user").get(user_id=user_id) + except Profile.DoesNotExist as e: + raise NotFoundError( + message=f"Profile for user_id={user_id} not found", + details={"user_id": user_id}, + ) from e + + @classmethod + def get_profile_by_user_id_or_none(cls, user_id: int) -> Profile | None: + """Получает профиль по ID пользователя или None""" try: return Profile.objects.select_related("user").get(user_id=user_id) except Profile.DoesNotExist: return None @classmethod - def update_profile(cls, user_id: int, **fields) -> Optional[Profile]: + def update_profile(cls, user_id: int, **fields) -> Profile: """ Обновляет профиль пользователя @@ -149,11 +194,12 @@ class ProfileService: **fields: Поля для обновления Returns: - Profile: Обновленный профиль или None + Profile: Обновленный профиль + + Raises: + NotFoundError: Если профиль не найден """ profile = cls.get_profile_by_user_id(user_id) - if not profile: - return None for field, value in fields.items(): setattr(profile, field, value) @@ -162,7 +208,7 @@ class ProfileService: return profile @classmethod - def get_full_profile_data(cls, user_id: int) -> Optional[Dict[str, Any]]: + def get_full_profile_data(cls, user_id: int) -> dict[str, Any]: """ Получает полные данные пользователя и профиля @@ -170,12 +216,12 @@ class ProfileService: user_id: ID пользователя Returns: - Dict: Полные данные или None + Dict: Полные данные + + Raises: + NotFoundError: Если профиль не найден """ profile = cls.get_profile_by_user_id(user_id) - if not profile: - return None - user = profile.user return { "id": user.id, diff --git a/src/apps/user/tests/__init__.py b/src/apps/user/tests/__init__.py deleted file mode 100644 index d839e12..0000000 --- a/src/apps/user/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for user app""" diff --git a/src/apps/user/tests/factories.py b/src/apps/user/tests/factories.py deleted file mode 100644 index b9daa29..0000000 --- a/src/apps/user/tests/factories.py +++ /dev/null @@ -1,63 +0,0 @@ -import uuid - -from apps.user.models import Profile, User -from model_bakery import baker - - -class UserFactory: - """Фабрика для создания пользователей""" - - @staticmethod - def create_user(**kwargs): - """Создать обычного пользователя""" - unique_suffix = str(uuid.uuid4())[:8] - defaults = { - "email": f"test_{unique_suffix}@example.com", - "username": f"testuser_{unique_suffix}", - "phone": f"+7999{unique_suffix[:7]}", - } - defaults.update(kwargs) - return baker.make(User, **defaults) - - @staticmethod - def create_superuser(**kwargs): - """Создать суперпользователя""" - unique_suffix = str(uuid.uuid4())[:8] - defaults = { - "email": f"admin_{unique_suffix}@example.com", - "username": f"admin_{unique_suffix}", - "is_staff": True, - "is_superuser": True, - } - defaults.update(kwargs) - return baker.make(User, **defaults) - - -class ProfileFactory: - """Фабрика для создания профилей""" - - @staticmethod - def create_profile(user=None, **kwargs): - """Создать профиль""" - if user is None: - user = UserFactory.create_user() - - unique_suffix = str(uuid.uuid4())[:4] - defaults = { - "first_name": f"Иван_{unique_suffix}", - "last_name": f"Иванов_{unique_suffix}", - "bio": f"Тестовый профиль {unique_suffix}", - } - defaults.update(kwargs) - - # Проверяем, существует ли уже профиль - try: - profile = user.profile - # Обновляем существующий профиль - for key, value in defaults.items(): - setattr(profile, key, value) - profile.save() - return profile - except Profile.DoesNotExist: - # Создаем новый профиль - return baker.make(Profile, user=user, **defaults) diff --git a/src/apps/user/urls.py b/src/apps/user/urls.py index f81315e..8de1a0a 100644 --- a/src/apps/user/urls.py +++ b/src/apps/user/urls.py @@ -3,6 +3,8 @@ from rest_framework_simplejwt.views import TokenVerifyView from . import views +app_name = "user" + urlpatterns = [ # Аутентификация path("register/", views.RegisterView.as_view(), name="register"), diff --git a/src/apps/user/views.py b/src/apps/user/views.py index 2bfa658..0259014 100644 --- a/src/apps/user/views.py +++ b/src/apps/user/views.py @@ -147,12 +147,8 @@ class UserUpdateView(APIView): serializer = UserUpdateSerializer(request.user, data=request.data, partial=True) if serializer.is_valid(): user = UserService.update_user(request.user.id, **serializer.validated_data) - if user: - user_serializer = UserSerializer(user) - return Response(user_serializer.data) - return Response( - {"error": "Пользователь не найден"}, status=status.HTTP_404_NOT_FOUND - ) + user_serializer = UserSerializer(user) + return Response(user_serializer.data) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -163,7 +159,7 @@ class ProfileDetailView(generics.RetrieveUpdateAPIView): serializer_class = ProfileUpdateSerializer def get_object(self): - profile = ProfileService.get_profile_by_user_id(self.request.user.id) + profile = ProfileService.get_profile_by_user_id_or_none(self.request.user.id) if not profile: # Если профиль не существует, создаем его from .models import Profile @@ -206,11 +202,7 @@ class ProfileDetailView(generics.RetrieveUpdateAPIView): updated_profile = ProfileService.update_profile( request.user.id, **serializer.validated_data ) - if updated_profile: - return Response(ProfileUpdateSerializer(updated_profile).data) - return Response( - {"error": "Профиль не найден"}, status=status.HTTP_404_NOT_FOUND - ) + return Response(ProfileUpdateSerializer(updated_profile).data) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -259,9 +251,7 @@ class PasswordChangeView(APIView): def user_profile_detail(request): """Получение полных данных профиля пользователя""" profile_data = ProfileService.get_full_profile_data(request.user.id) - if profile_data: - return Response(profile_data) - return Response({"error": "Профиль не найден"}, status=status.HTTP_404_NOT_FOUND) + return Response(profile_data) class TokenRefreshView(APIView): diff --git a/src/config/api_v1_urls.py b/src/config/api_v1_urls.py new file mode 100644 index 0000000..a3abefd --- /dev/null +++ b/src/config/api_v1_urls.py @@ -0,0 +1,20 @@ +""" +API v1 URL configuration. + +All API endpoints are versioned under /api/v1/ +""" + +from apps.core.views import BackgroundJobListView, BackgroundJobStatusView +from django.urls import include, path + +app_name = "api_v1" + +jobs_urlpatterns = [ + path("", BackgroundJobListView.as_view(), name="job-list"), + path("/", BackgroundJobStatusView.as_view(), name="job-status"), +] + +urlpatterns = [ + path("users/", include("apps.user.urls")), + path("jobs/", include((jobs_urlpatterns, "jobs"))), +] diff --git a/src/config/custom_test_runner.py b/src/config/custom_test_runner.py deleted file mode 100644 index 3e95271..0000000 --- a/src/config/custom_test_runner.py +++ /dev/null @@ -1,27 +0,0 @@ -import sys - -from django.test.runner import DiscoverRunner - - -class CustomTestRunner(DiscoverRunner): - """Custom test runner that avoids ipdb import issues""" - - def __init__(self, *args, **kwargs): - # Отключаем использование ipdb - import os - - os.environ["PYTHONBREAKPOINT"] = "pdb.set_trace" - super().__init__(*args, **kwargs) - - def run_tests(self, test_labels, extra_tests=None, **kwargs): - # Проверяем, что ipdb не будет импортирован - # Создаем mock-модуль вместо None - mock_ipdb = type("MockModule", (), {"__getattr__": lambda s, n: None})() - sys.modules["ipdb"] = mock_ipdb - - try: - return super().run_tests(test_labels, extra_tests, **kwargs) - finally: - # Восстанавливаем модуль если был - if "ipdb" in sys.modules: - del sys.modules["ipdb"] diff --git a/src/config/settings/base.py b/src/config/settings/base.py index 8a061f8..60f6b50 100644 --- a/src/config/settings/base.py +++ b/src/config/settings/base.py @@ -11,6 +11,9 @@ from decouple import Config, RepositoryEnv # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent.parent +# Application version +APP_VERSION = "1.0.0" + # Load environment variables ENV_FILE = BASE_DIR / ".env" if ENV_FILE.exists(): @@ -50,15 +53,18 @@ INSTALLED_APPS = [ "django.contrib.staticfiles", # Third-party apps "rest_framework", + "django_filters", "corsheaders", "django_celery_beat", "django_celery_results", "drf_yasg", # Local apps + "apps.core", "apps.user", ] MIDDLEWARE = [ + "apps.core.middleware.RequestIDMiddleware", "corsheaders.middleware.CorsMiddleware", "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", @@ -104,6 +110,17 @@ DATABASES = { }, } +# Cache configuration +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": get_env("REDIS_URL", "redis://localhost:6379/0"), + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + }, + }, +} + # Password validation AUTH_PASSWORD_VALIDATORS = [ @@ -152,12 +169,27 @@ REST_FRAMEWORK = { "DEFAULT_PERMISSION_CLASSES": [ "rest_framework.permissions.IsAuthenticatedOrReadOnly", ], - "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.PageNumberPagination", + "DEFAULT_FILTER_BACKENDS": [ + "django_filters.rest_framework.DjangoFilterBackend", + "rest_framework.filters.SearchFilter", + "rest_framework.filters.OrderingFilter", + ], + "DEFAULT_PAGINATION_CLASS": "apps.core.pagination.StandardPagination", "PAGE_SIZE": 20, "DEFAULT_RENDERER_CLASSES": [ "rest_framework.renderers.JSONRenderer", "rest_framework.renderers.BrowsableAPIRenderer", ], + "EXCEPTION_HANDLER": "apps.core.exception_handler.custom_exception_handler", + # Rate limiting + "DEFAULT_THROTTLE_CLASSES": [ + "rest_framework.throttling.AnonRateThrottle", + "rest_framework.throttling.UserRateThrottle", + ], + "DEFAULT_THROTTLE_RATES": { + "anon": "100/hour", + "user": "1000/hour", + }, } # JWT settings @@ -237,6 +269,3 @@ LOGGING = { }, }, } - -# Test runner configuration -TEST_RUNNER = "config.custom_test_runner.CustomTestRunner" diff --git a/src/config/settings/test.py b/src/config/settings/test.py new file mode 100644 index 0000000..2e68d19 --- /dev/null +++ b/src/config/settings/test.py @@ -0,0 +1,105 @@ +from .base import * + +# Test settings +SECRET_KEY = "django-insecure-test-key-only-for-testing" # noqa: S105 + +DEBUG = True + +ALLOWED_HOSTS = ["localhost", "127.0.0.1", "0.0.0.0", "testserver"] # noqa: S104 + +# Use in-memory SQLite database for faster tests +DATABASES = { + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": ":memory:", + "TEST": { + "NAME": ":memory:", + }, + } +} + + +# Disable migrations for faster tests +class DisableMigrations: + def __contains__(self, item): + return True + + def __getitem__(self, item): + return None + + +MIGRATION_MODULES = DisableMigrations() + +# Cache configuration for tests (use local memory) +CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + } +} + +# Email backend for tests +EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" + +# Celery Configuration for Tests (use eager execution) +CELERY_TASK_ALWAYS_EAGER = True +CELERY_TASK_EAGER_PROPAGATES = True +CELERY_BROKER_URL = "memory://" +CELERY_RESULT_BACKEND = "cache+memory://" + +# Password hashers - use fast hasher for tests +PASSWORD_HASHERS = [ + "django.contrib.auth.hashers.MD5PasswordHasher", +] + +# Disable logging during tests +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "handlers": { + "null": { + "class": "logging.NullHandler", + }, + }, + "root": { + "handlers": ["null"], + }, + "loggers": { + "django": { + "handlers": ["null"], + "propagate": False, + }, + "django.request": { + "handlers": ["null"], + "propagate": False, + }, + }, +} + +# Media files for tests +MEDIA_ROOT = "/tmp/test_media" # noqa: S108 + +# Static files for tests +STATICFILES_STORAGE = "django.contrib.staticfiles.storage.StaticFilesStorage" + +# Disable CSRF for API tests and disable throttling +REST_FRAMEWORK = { + **globals().get("REST_FRAMEWORK", {}), + "DEFAULT_AUTHENTICATION_CLASSES": [ + "rest_framework_simplejwt.authentication.JWTAuthentication", + "rest_framework.authentication.SessionAuthentication", + ], + "TEST_REQUEST_DEFAULT_FORMAT": "json", + # Disable throttling for tests + "DEFAULT_THROTTLE_CLASSES": [], + "DEFAULT_THROTTLE_RATES": {}, +} + +# JWT settings for tests +from datetime import timedelta + +SIMPLE_JWT = { + **globals().get("SIMPLE_JWT", {}), + "ACCESS_TOKEN_LIFETIME": timedelta(minutes=5), + "REFRESH_TOKEN_LIFETIME": timedelta(days=1), + "ROTATE_REFRESH_TOKENS": True, +} diff --git a/src/config/urls.py b/src/config/urls.py index 76f409a..aca36e0 100644 --- a/src/config/urls.py +++ b/src/config/urls.py @@ -27,16 +27,15 @@ schema_view = get_schema_view( ) urlpatterns = [ - path("admin/", admin.site.urls), - path("api/users/", include("apps.user.urls")), - path("api-auth/", include("rest_framework.urls")), - # Swagger documentation path( - "swagger/", + "", schema_view.with_ui("swagger", cache_timeout=0), name="schema-swagger-ui", ), - path("redoc/", schema_view.with_ui("redoc", cache_timeout=0), name="schema-redoc"), + path("admin/", admin.site.urls), + path("health/", include("apps.core.urls")), + path("api/v1/", include("config.api_v1_urls", namespace="api_v1")), + path("auth/", include("rest_framework.urls")), ] # Serve media files in development diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 0000000..39c80d1 --- /dev/null +++ b/tests/README.md @@ -0,0 +1,368 @@ +# Тесты для mostovik-backend + +Этот документ описывает организацию и запуск тестов в проекте mostovik-backend. + +## 📁 Структура тестов + +``` +tests/ +├── __init__.py # Корневой пакет тестов +├── conftest.py # Конфигурация pytest и общие фикстуры +├── README.md # Этот файл +└── apps/ # Тесты для Django приложений + ├── __init__.py + └── user/ # Тесты для приложения user + ├── __init__.py + ├── factories.py # Фабрики для создания тестовых данных + ├── test_models.py # Тесты моделей + ├── test_serializers.py # Тесты сериализаторов + ├── test_services.py # Тесты сервисного слоя + └── test_views.py # Тесты представлений (API views) +``` + +## 🚀 Запуск тестов + +### Быстрый старт + +```bash +# Запуск всех тестов (рекомендуемый способ) +make test + +# Запуск конкретных тестов +make test TARGET=user # Все тесты user app +make test TARGET=models # Только тесты моделей +make test TARGET=views # Только тесты представлений + +# Или напрямую через скрипт +python run_tests_simple.py +python run_tests_simple.py user +``` + +### Различные способы запуска + +#### 1. Через универсальную команду make test (рекомендуется) + +```bash +# Все тесты +make test + +# Конкретные группы тестов +make test TARGET=user # Все тесты user app +make test TARGET=models # Тесты моделей +make test TARGET=views # Тесты представлений +make test TARGET=serializers # Тесты сериализаторов +make test TARGET=services # Тесты сервисов + +# Можно также использовать полные имена +make test TARGET=test_models # То же что и models +make test TARGET=test_views # То же что и views +``` + +#### 2. Через улучшенный Django runner + +```bash +# Все тесты +python run_tests_simple.py + +# Конкретное приложение +python run_tests_simple.py user + +# Конкретные группы тестов +python run_tests_simple.py models +python run_tests_simple.py views +python run_tests_simple.py serializers +python run_tests_simple.py services + +# Полные имена файлов +python run_tests_simple.py test_models +python run_tests_simple.py test_views +``` + +#### 3. Через стандартный Django test runner + +```bash +# Все тесты +python run_tests.py + +# Конкретное приложение +python run_tests.py test tests.apps.user + +# Конкретный класс тестов +python run_tests.py test tests.apps.user.test_models.UserModelTest +``` + +#### 4. Через pytest (возможны проблемы с pdbpp) + +```bash +# Через скрипт-обертку +python run_pytest.py + +# Или напрямую, если настроен PYTHONPATH +export PYTHONPATH=src:$PYTHONPATH +export DJANGO_SETTINGS_MODULE=config.settings.test +pytest tests/ +``` + +## 🔧 Конфигурация + +### Настройки тестов + +Тесты используют специальные настройки Django из `src/config/settings/test.py`: + +- **База данных**: SQLite в памяти для быстрого выполнения +- **Кэш**: Local memory cache вместо Redis +- **Email**: Локальный backend для тестирования +- **Celery**: Синхронное выполнение задач +- **Миграции**: Отключены для ускорения +- **Логирование**: Отключено + +### Pytest конфигурация + +Основные настройки в `pytest.ini`: + +- Автоматическое обнаружение Django настроек +- Переиспользование тестовой базы данных +- Отчеты о покрытии кода +- Фильтрация предупреждений + +### Полезные опции pytest + +```bash +# Подробная информация (автоматически включена) +make test TARGET=models + +# Запуск конкретного файла напрямую +python run_tests_simple.py test_models + +# Все тесты с подробным выводом +python run_tests_simple.py +``` + +## 🏭 Фабрики тестовых данных + +### UserFactory + +```python +from tests.apps.user.factories import UserFactory + +# Создание обычного пользователя +user = UserFactory.create_user() + +# Создание пользователя с конкретными данными +user = UserFactory.create_user( + email="test@example.com", + username="testuser" +) + +# Создание суперпользователя +admin = UserFactory.create_superuser() +``` + +### ProfileFactory + +```python +from tests.apps.user.factories import ProfileFactory + +# Создание профиля с новым пользователем +profile = ProfileFactory.create_profile() + +# Создание профиля для существующего пользователя +profile = ProfileFactory.create_profile( + user=existing_user, + first_name="John", + last_name="Doe" +) +``` + +## 🧪 Фикстуры pytest + +Доступные фикстуры в `tests/conftest.py`: + +```python +def test_example(test_user, authenticated_api_client): + """Пример использования фикстур""" + # test_user - готовый тестовый пользователь + # authenticated_api_client - API клиент с авторизацией + response = authenticated_api_client.get('/api/user/profile/') + assert response.status_code == 200 +``` + +### Список фикстур + +- `api_client` - DRF APIClient +- `user_factory` - Фабрика пользователей +- `profile_factory` - Фабрика профилей +- `test_user` - Готовый тестовый пользователь +- `test_superuser` - Готовый суперпользователь +- `test_profile` - Готовый профиль +- `authenticated_api_client` - Авторизованный API клиент +- `admin_api_client` - API клиент с админскими правами + +## 📊 Маркеры тестов + +Используйте маркеры для категоризации тестов: + +```python +import pytest + +@pytest.mark.unit +def test_user_model(): + """Юнит тест модели""" + pass + +@pytest.mark.integration +def test_user_registration_flow(): + """Интеграционный тест""" + pass + +@pytest.mark.slow +def test_heavy_operation(): + """Медленный тест""" + pass +``` + +Запуск по маркерам: + +```bash +# Только юнит тесты +python run_pytest.py -m "unit" + +# Исключить медленные тесты +python run_pytest.py -m "not slow" + +# Тесты моделей +python run_pytest.py -m "models" +``` + +## 🔍 Отладка тестов + +### Просмотр вывода + +```bash +# Показать print statements +python run_pytest.py -s + +# Подробные ошибки +python run_pytest.py --tb=long + +# Показать локальные переменные при ошибке +python run_pytest.py --tb=long --showlocals +``` + +### Использование pdb + +```python +def test_something(): + import pdb; pdb.set_trace() + # ваш код тестирования +``` + +```bash +# Запуск с автоматическим pdb при ошибках +python run_pytest.py --pdb +``` + +## 📈 Покрытие кода + +### Генерация отчета + +```bash +# HTML отчет +make test-coverage + +# Или напрямую +python run_pytest.py --cov=src --cov-report=html:htmlcov + +# Открыть отчет в браузере +open htmlcov/index.html +``` + +### Просмотр в терминале + +```bash +python run_pytest.py --cov=src --cov-report=term-missing +``` + +## 🔧 Добавление новых тестов + +### Создание нового файла тестов + +1. Создайте файл в соответствующей папке: `tests/apps/{app_name}/test_{module}.py` +2. Импортируйте необходимые зависимости +3. Создайте классы тестов, наследуя от `TestCase` или используя функции pytest + +### Пример структуры теста + +```python +"""Tests for new module""" + +from django.test import TestCase +from tests.apps.user.factories import UserFactory + + +class NewModuleTest(TestCase): + """Tests for NewModule""" + + def setUp(self): + """Подготовка данных для тестов""" + self.user = UserFactory.create_user() + + def test_something(self): + """Test description""" + # Arrange + expected_value = "test" + + # Act + result = some_function() + + # Assert + self.assertEqual(result, expected_value) +``` + +## 🚨 Решение проблем + +### Частые ошибки + +1. **Ошибка импорта**: Проверьте, что `PYTHONPATH` включает папку `src` +2. **База данных**: Убедитесь, что используются тестовые настройки +3. **Миграции**: В тестах миграции отключены, но модели должны быть синхронизированы + +### Очистка тестовых данных + +```bash +# Очистка кеша и временных файлов +make clean + +# Пересоздание тестовой базы данных +rm -f test_db.sqlite3 +python run_pytest.py --create-db +``` + +## 📚 Полезные ссылки + +- [Django Testing Documentation](https://docs.djangoproject.com/en/3.2/topics/testing/) +- [Pytest Documentation](https://docs.pytest.org/) +- [pytest-django](https://pytest-django.readthedocs.io/) +- [DRF Testing](https://www.django-rest-framework.org/api-guide/testing/) +- [Factory Boy](https://factoryboy.readthedocs.io/) + +## 🚀 Быстрая справка команд + +```bash +# Главная команда - make test с опциональным TARGET +make test # Все тесты +make test TARGET=user # User app (77 тестов) +make test TARGET=models # Модели (16 тестов) +make test TARGET=views # Представления (20 тестов) +make test TARGET=serializers # Сериализаторы (22 теста) +make test TARGET=services # Сервисы (18 тестов) +``` + +## 🤝 Рекомендации + +1. **Используйте make test** - это основная и самая удобная команда +2. **Именование**: Используйте описательные имена для тестов +3. **Изоляция**: Каждый тест должен быть независимым +4. **Покрытие**: Стремитесь к покрытию не менее 80% +5. **Быстрота**: Избегайте медленных операций в юнит тестах +6. **Читаемость**: Тесты должны быть понятными и хорошо документированными \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..84b82af --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,5 @@ +""" +Test suite for mostovik-backend project + +This package contains all tests organized by application structure. +""" diff --git a/tests/apps/__init__.py b/tests/apps/__init__.py new file mode 100644 index 0000000..d7f25c6 --- /dev/null +++ b/tests/apps/__init__.py @@ -0,0 +1,5 @@ +""" +Tests for applications + +This package contains tests for all Django applications. +""" diff --git a/tests/apps/core/__init__.py b/tests/apps/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/apps/core/test_background_jobs.py b/tests/apps/core/test_background_jobs.py new file mode 100644 index 0000000..b226195 --- /dev/null +++ b/tests/apps/core/test_background_jobs.py @@ -0,0 +1,236 @@ +"""Тесты для BackgroundJob.""" + +from apps.core.models import BackgroundJob, JobStatus +from apps.core.services import BackgroundJobService +from django.test import TestCase +from faker import Faker + +fake = Faker() + + +class BackgroundJobModelTest(TestCase): + """Тесты для модели BackgroundJob.""" + + def test_create_job(self): + """Тест создания задачи.""" + task_id = fake.uuid4() + job = BackgroundJob.objects.create( + task_id=task_id, + task_name="apps.test.tasks.my_task", + ) + self.assertEqual(job.task_id, task_id) + self.assertEqual(job.status, JobStatus.PENDING) + self.assertEqual(job.progress, 0) + + def test_mark_started(self): + """Тест отметки о начале выполнения.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + job.mark_started() + + self.assertEqual(job.status, JobStatus.STARTED) + self.assertIsNotNone(job.started_at) + + def test_update_progress(self): + """Тест обновления прогресса.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + job.update_progress(50, "Обработка данных...") + + self.assertEqual(job.progress, 50) + self.assertEqual(job.progress_message, "Обработка данных...") + + def test_complete(self): + """Тест успешного завершения.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + result = {"processed": 100, "errors": 0} + job.complete(result=result) + + self.assertEqual(job.status, JobStatus.SUCCESS) + self.assertEqual(job.progress, 100) + self.assertEqual(job.result, result) + self.assertIsNotNone(job.completed_at) + + def test_fail(self): + """Тест завершения с ошибкой.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + job.fail("Something went wrong", "Traceback...") + + self.assertEqual(job.status, JobStatus.FAILURE) + self.assertEqual(job.error, "Something went wrong") + self.assertEqual(job.traceback, "Traceback...") + self.assertIsNotNone(job.completed_at) + + def test_revoke(self): + """Тест отмены задачи.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + job.revoke() + + self.assertEqual(job.status, JobStatus.REVOKED) + self.assertIsNotNone(job.completed_at) + + def test_is_finished_property(self): + """Тест свойства is_finished.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + self.assertFalse(job.is_finished) + + job.complete() + self.assertTrue(job.is_finished) + + def test_is_successful_property(self): + """Тест свойства is_successful.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + self.assertFalse(job.is_successful) + + job.complete() + self.assertTrue(job.is_successful) + + def test_duration_property(self): + """Тест свойства duration.""" + job = BackgroundJob.objects.create( + task_id=fake.uuid4(), + task_name="test.task", + ) + self.assertIsNone(job.duration) + + job.mark_started() + job.complete() + self.assertIsNotNone(job.duration) + self.assertGreaterEqual(job.duration, 0) + + +class BackgroundJobServiceTest(TestCase): + """Тесты для BackgroundJobService.""" + + def test_create_job(self): + """Тест создания задачи через сервис.""" + task_id = fake.uuid4() + job = BackgroundJobService.create_job( + task_id=task_id, + task_name="apps.test.tasks.my_task", + user_id=1, + meta={"key": "value"}, + ) + self.assertEqual(job.task_id, task_id) + self.assertEqual(job.user_id, 1) + self.assertEqual(job.meta, {"key": "value"}) + + def test_get_by_task_id(self): + """Тест получения задачи по task_id.""" + task_id = fake.uuid4() + created_job = BackgroundJobService.create_job( + task_id=task_id, + task_name="test.task", + ) + found_job = BackgroundJobService.get_by_task_id(task_id) + self.assertEqual(created_job.id, found_job.id) + + def test_get_by_task_id_not_found(self): + """Тест получения несуществующей задачи.""" + from apps.core.exceptions import NotFoundError + + with self.assertRaises(NotFoundError): + BackgroundJobService.get_by_task_id("non-existent-task-id") + + def test_get_by_task_id_or_none(self): + """Тест получения задачи или None.""" + result = BackgroundJobService.get_by_task_id_or_none("non-existent") + self.assertIsNone(result) + + task_id = fake.uuid4() + BackgroundJobService.create_job( + task_id=task_id, + task_name="test.task", + ) + result = BackgroundJobService.get_by_task_id_or_none(task_id) + self.assertIsNotNone(result) + + def test_get_user_jobs(self): + """Тест получения задач пользователя.""" + user_id = 123 + # Создаём несколько задач + for i in range(3): + BackgroundJobService.create_job( + task_id=f"task-{user_id}-{i}", + task_name="test.task", + user_id=user_id, + ) + # И одну задачу другого пользователя + BackgroundJobService.create_job( + task_id="task-other-user", + task_name="test.task", + user_id=999, + ) + + jobs = BackgroundJobService.get_user_jobs(user_id) + self.assertEqual(len(jobs), 3) + + def test_get_user_jobs_with_status_filter(self): + """Тест фильтрации по статусу.""" + user_id = 456 + job1 = BackgroundJobService.create_job( + task_id="task-pending", + task_name="test.task", + user_id=user_id, + ) + job2 = BackgroundJobService.create_job( + task_id="task-success", + task_name="test.task", + user_id=user_id, + ) + job2.complete() + + pending_jobs = BackgroundJobService.get_user_jobs( + user_id, status=JobStatus.PENDING + ) + self.assertEqual(len(pending_jobs), 1) + + success_jobs = BackgroundJobService.get_user_jobs( + user_id, status=JobStatus.SUCCESS + ) + self.assertEqual(len(success_jobs), 1) + + def test_get_active_jobs(self): + """Тест получения активных задач.""" + # Создаём задачи с разными статусами + job_pending = BackgroundJobService.create_job( + task_id="job-active-pending", + task_name="test.task", + ) + job_started = BackgroundJobService.create_job( + task_id="job-active-started", + task_name="test.task", + ) + job_started.mark_started() + + job_success = BackgroundJobService.create_job( + task_id="job-active-success", + task_name="test.task", + ) + job_success.complete() + + active_jobs = list(BackgroundJobService.get_active_jobs()) + active_task_ids = [j.task_id for j in active_jobs] + + self.assertIn("job-active-pending", active_task_ids) + self.assertIn("job-active-started", active_task_ids) + self.assertNotIn("job-active-success", active_task_ids) diff --git a/tests/apps/core/test_bulk_operations.py b/tests/apps/core/test_bulk_operations.py new file mode 100644 index 0000000..5656d0e --- /dev/null +++ b/tests/apps/core/test_bulk_operations.py @@ -0,0 +1,186 @@ +"""Тесты для BulkOperationsMixin и QueryOptimizerMixin.""" + +from apps.core.models import BackgroundJob +from apps.core.services import ( + BulkOperationsMixin, + QueryOptimizerMixin, +) +from django.test import TestCase +from faker import Faker + +fake = Faker() + + +class BulkOperationsMixinTest(TestCase): + """Тесты для BulkOperationsMixin.""" + + def test_mixin_has_bulk_create_chunked(self): + """Проверка наличия метода bulk_create_chunked.""" + self.assertTrue(hasattr(BulkOperationsMixin, "bulk_create_chunked")) + + def test_mixin_has_bulk_update_or_create(self): + """Проверка наличия метода bulk_update_or_create.""" + self.assertTrue(hasattr(BulkOperationsMixin, "bulk_update_or_create")) + + def test_mixin_has_bulk_delete(self): + """Проверка наличия метода bulk_delete.""" + self.assertTrue(hasattr(BulkOperationsMixin, "bulk_delete")) + + def test_mixin_has_bulk_update_fields(self): + """Проверка наличия метода bulk_update_fields.""" + self.assertTrue(hasattr(BulkOperationsMixin, "bulk_update_fields")) + + +class QueryOptimizerMixinTest(TestCase): + """Тесты для QueryOptimizerMixin.""" + + def test_mixin_has_get_optimized_queryset(self): + """Проверка наличия метода get_optimized_queryset.""" + self.assertTrue(hasattr(QueryOptimizerMixin, "get_optimized_queryset")) + + def test_mixin_has_apply_optimizations(self): + """Проверка наличия метода apply_optimizations.""" + self.assertTrue(hasattr(QueryOptimizerMixin, "apply_optimizations")) + + def test_mixin_has_get_list_queryset(self): + """Проверка наличия метода get_list_queryset.""" + self.assertTrue(hasattr(QueryOptimizerMixin, "get_list_queryset")) + + def test_mixin_has_get_detail_queryset(self): + """Проверка наличия метода get_detail_queryset.""" + self.assertTrue(hasattr(QueryOptimizerMixin, "get_detail_queryset")) + + def test_mixin_has_with_counts(self): + """Проверка наличия метода with_counts.""" + self.assertTrue(hasattr(QueryOptimizerMixin, "with_counts")) + + def test_mixin_has_with_exists(self): + """Проверка наличия метода with_exists.""" + self.assertTrue(hasattr(QueryOptimizerMixin, "with_exists")) + + def test_mixin_default_attributes(self): + """Проверка атрибутов по умолчанию.""" + self.assertEqual(QueryOptimizerMixin.select_related, []) + self.assertEqual(QueryOptimizerMixin.prefetch_related, []) + self.assertEqual(QueryOptimizerMixin.default_only, []) + self.assertEqual(QueryOptimizerMixin.default_defer, []) + + +class BulkOperationsIntegrationTest(TestCase): + """Интеграционные тесты для bulk операций с BackgroundJob.""" + + def test_bulk_create_chunked(self): + """Тест массового создания чанками.""" + # Создаём тестовый сервис с BulkOperationsMixin + class TestService(BulkOperationsMixin): + model = BackgroundJob + + # Создаём 10 объектов чанками по 3 + jobs = [ + BackgroundJob( + task_id=f"bulk-chunk-{i}", + task_name="test.bulk.task", + ) + for i in range(10) + ] + + count = TestService.bulk_create_chunked(jobs, chunk_size=3) + self.assertEqual(count, 10) + + # Проверяем что все созданы + self.assertEqual(BackgroundJob.objects.filter(task_name="test.bulk.task").count(), 10) + + def test_bulk_delete(self): + """Тест массового удаления.""" + class TestService(BulkOperationsMixin): + model = BackgroundJob + + # Создаём несколько задач + jobs = [] + for i in range(5): + job = BackgroundJob.objects.create( + task_id=f"bulk-delete-{i}", + task_name="test.delete.task", + ) + jobs.append(job) + + # Удаляем первые 3 + ids_to_delete = [j.pk for j in jobs[:3]] + deleted = TestService.bulk_delete(ids_to_delete) + + self.assertEqual(deleted, 3) + self.assertEqual(BackgroundJob.objects.filter(task_name="test.delete.task").count(), 2) + + def test_bulk_update_fields(self): + """Тест массового обновления полей.""" + class TestService(BulkOperationsMixin): + model = BackgroundJob + + # Создаём задачи + for i in range(5): + BackgroundJob.objects.create( + task_id=f"bulk-update-{i}", + task_name="test.update.task", + progress=0, + ) + + # Обновляем все задачи этого типа + updated = TestService.bulk_update_fields( + filters={"task_name": "test.update.task"}, + updates={"progress": 50}, + ) + + self.assertEqual(updated, 5) + + # Проверяем что обновились + for job in BackgroundJob.objects.filter(task_name="test.update.task"): + self.assertEqual(job.progress, 50) + + def test_bulk_update_or_create_creates(self): + """Тест upsert - создание новых.""" + class TestService(BulkOperationsMixin): + model = BackgroundJob + + items = [ + {"task_id": "upsert-new-1", "task_name": "upsert.task", "progress": 10}, + {"task_id": "upsert-new-2", "task_name": "upsert.task", "progress": 20}, + ] + + created, updated = TestService.bulk_update_or_create( + items=items, + unique_fields=["task_id"], + update_fields=["task_name", "progress"], + ) + + self.assertEqual(created, 2) + self.assertEqual(updated, 0) + + def test_bulk_update_or_create_updates(self): + """Тест upsert - обновление существующих.""" + class TestService(BulkOperationsMixin): + model = BackgroundJob + + # Создаём существующую запись + BackgroundJob.objects.create( + task_id="upsert-existing", + task_name="old.task", + progress=0, + ) + + items = [ + {"task_id": "upsert-existing", "task_name": "new.task", "progress": 100}, + ] + + created, updated = TestService.bulk_update_or_create( + items=items, + unique_fields=["task_id"], + update_fields=["task_name", "progress"], + ) + + self.assertEqual(created, 0) + self.assertEqual(updated, 1) + + # Проверяем обновление + job = BackgroundJob.objects.get(task_id="upsert-existing") + self.assertEqual(job.task_name, "new.task") + self.assertEqual(job.progress, 100) diff --git a/tests/apps/core/test_cache.py b/tests/apps/core/test_cache.py new file mode 100644 index 0000000..ed8ca70 --- /dev/null +++ b/tests/apps/core/test_cache.py @@ -0,0 +1,193 @@ +"""Tests for core cache utilities""" + +from apps.core.cache import ( + CacheManager, + _build_cache_key, + cache_method, + cache_result, +) +from django.core.cache import cache +from django.test import TestCase + + +class CacheResultDecoratorTest(TestCase): + """Tests for @cache_result decorator""" + + def setUp(self): + cache.clear() + self.call_count = 0 + + def test_result_is_cached(self): + """Test that function result is cached""" + + @cache_result(timeout=60, key_prefix="test") + def expensive_function(x): + self.call_count += 1 + return x * 2 + + # First call - should execute + result1 = expensive_function(5) + self.assertEqual(result1, 10) + self.assertEqual(self.call_count, 1) + + # Second call - should return cached result + result2 = expensive_function(5) + self.assertEqual(result2, 10) + self.assertEqual(self.call_count, 1) # Still 1, not called again + + def test_different_args_not_cached(self): + """Test that different arguments create different cache entries""" + + @cache_result(timeout=60, key_prefix="test") + def expensive_function(x): + self.call_count += 1 + return x * 2 + + result1 = expensive_function(5) + result2 = expensive_function(10) + + self.assertEqual(result1, 10) + self.assertEqual(result2, 20) + self.assertEqual(self.call_count, 2) + + def test_kwargs_included_in_cache_key(self): + """Test that kwargs are included in cache key""" + + @cache_result(timeout=60, key_prefix="test") + def expensive_function(x, multiplier=2): + self.call_count += 1 + return x * multiplier + + result1 = expensive_function(5, multiplier=2) + result2 = expensive_function(5, multiplier=3) + + self.assertEqual(result1, 10) + self.assertEqual(result2, 15) + self.assertEqual(self.call_count, 2) + + +class CacheMethodDecoratorTest(TestCase): + """Tests for @cache_method decorator""" + + def setUp(self): + cache.clear() + + def test_classmethod_caching(self): + """Test caching works with classmethod""" + call_count = {"value": 0} + + class MyService: + @classmethod + @cache_method(timeout=60, key_prefix="service") + def get_data(cls, item_id): + call_count["value"] += 1 + return {"id": item_id, "data": "test"} + + # First call + result1 = MyService.get_data(1) + self.assertEqual(result1["id"], 1) + self.assertEqual(call_count["value"], 1) + + # Second call - should be cached + result2 = MyService.get_data(1) + self.assertEqual(result2["id"], 1) + self.assertEqual(call_count["value"], 1) + + # Different argument + result3 = MyService.get_data(2) + self.assertEqual(result3["id"], 2) + self.assertEqual(call_count["value"], 2) + + +class CacheManagerTest(TestCase): + """Tests for CacheManager""" + + def setUp(self): + cache.clear() + self.manager = CacheManager("test_prefix") + + def test_set_and_get(self): + """Test basic set and get operations""" + self.manager.set("key1", "value1", timeout=60) + result = self.manager.get("key1") + self.assertEqual(result, "value1") + + def test_get_default(self): + """Test get returns default for missing key""" + result = self.manager.get("nonexistent", default="default_value") + self.assertEqual(result, "default_value") + + def test_delete(self): + """Test delete operation""" + self.manager.set("key1", "value1") + self.manager.delete("key1") + result = self.manager.get("key1") + self.assertIsNone(result) + + def test_get_or_set(self): + """Test get_or_set operation""" + call_count = {"value": 0} + + def compute_value(): + call_count["value"] += 1 + return "computed" + + # First call - should compute + result1 = self.manager.get_or_set("key1", compute_value) + self.assertEqual(result1, "computed") + self.assertEqual(call_count["value"], 1) + + # Second call - should return cached + result2 = self.manager.get_or_set("key1", compute_value) + self.assertEqual(result2, "computed") + self.assertEqual(call_count["value"], 1) + + def test_prefix_applied(self): + """Test that prefix is applied to keys""" + self.manager.set("mykey", "myvalue") + + # Direct cache access should use prefixed key + direct_result = cache.get("test_prefix:mykey") + self.assertEqual(direct_result, "myvalue") + + +class BuildCacheKeyTest(TestCase): + """Tests for _build_cache_key function""" + + def test_key_includes_function_name(self): + """Test cache key includes function name""" + + def my_function(): + pass + + key = _build_cache_key(my_function, "", (), {}) + self.assertIn("my_function", key) + + def test_key_includes_prefix(self): + """Test cache key includes prefix""" + + def my_function(): + pass + + key = _build_cache_key(my_function, "myprefix", (), {}) + self.assertTrue(key.startswith("myprefix:")) + + def test_different_args_different_keys(self): + """Test different arguments produce different keys""" + + def my_function(): + pass + + key1 = _build_cache_key(my_function, "", (1, 2), {}) + key2 = _build_cache_key(my_function, "", (1, 3), {}) + self.assertNotEqual(key1, key2) + + def test_different_kwargs_different_keys(self): + """Test different kwargs produce different keys""" + + def my_function(): + pass + + key1 = _build_cache_key(my_function, "", (), {"a": 1}) + key2 = _build_cache_key(my_function, "", (), {"a": 2}) + self.assertNotEqual(key1, key2) diff --git a/tests/apps/core/test_exceptions.py b/tests/apps/core/test_exceptions.py new file mode 100644 index 0000000..f862d1a --- /dev/null +++ b/tests/apps/core/test_exceptions.py @@ -0,0 +1,152 @@ +"""Tests for core exceptions and exception handler""" + +from apps.core.exceptions import ( + AuthenticationError, + BadRequestError, + BaseAPIException, + BusinessLogicError, + ConflictError, + DuplicateError, + InternalError, + InvalidStateError, + NotFoundError, + PermissionDeniedError, + QuotaExceededError, + RateLimitError, + ServiceUnavailableError, + ValidationError, +) +from django.test import TestCase + + +class BaseAPIExceptionTest(TestCase): + """Tests for BaseAPIException""" + + def test_default_values(self): + """Test exception with default values""" + exc = BaseAPIException() + self.assertEqual(exc.message, "An error occurred") + self.assertEqual(exc.code, "error") + self.assertEqual(exc.status_code, 400) + self.assertIsNone(exc.details) + + def test_custom_values(self): + """Test exception with custom values""" + exc = BaseAPIException( + message="Custom message", + code="custom_code", + details={"field": "value"}, + ) + self.assertEqual(exc.message, "Custom message") + self.assertEqual(exc.code, "custom_code") + self.assertEqual(exc.details, {"field": "value"}) + + def test_to_dict(self): + """Test conversion to dictionary""" + exc = BaseAPIException( + message="Test message", + code="test_code", + details={"key": "value"}, + ) + result = exc.to_dict() + + self.assertEqual(result["message"], "Test message") + self.assertEqual(result["code"], "test_code") + self.assertEqual(result["details"], {"key": "value"}) + + def test_to_dict_without_details(self): + """Test to_dict without details""" + exc = BaseAPIException(message="Test") + result = exc.to_dict() + + self.assertNotIn("details", result) + + +class ClientErrorExceptionsTest(TestCase): + """Tests for client error exceptions (4xx)""" + + def test_validation_error(self): + """Test ValidationError defaults""" + exc = ValidationError() + self.assertEqual(exc.status_code, 400) + self.assertEqual(exc.code, "validation_error") + + def test_bad_request_error(self): + """Test BadRequestError defaults""" + exc = BadRequestError() + self.assertEqual(exc.status_code, 400) + self.assertEqual(exc.code, "bad_request") + + def test_authentication_error(self): + """Test AuthenticationError defaults""" + exc = AuthenticationError() + self.assertEqual(exc.status_code, 401) + self.assertEqual(exc.code, "authentication_error") + + def test_permission_denied_error(self): + """Test PermissionDeniedError defaults""" + exc = PermissionDeniedError() + self.assertEqual(exc.status_code, 403) + self.assertEqual(exc.code, "permission_denied") + + def test_not_found_error(self): + """Test NotFoundError defaults""" + exc = NotFoundError() + self.assertEqual(exc.status_code, 404) + self.assertEqual(exc.code, "not_found") + + def test_conflict_error(self): + """Test ConflictError defaults""" + exc = ConflictError() + self.assertEqual(exc.status_code, 409) + self.assertEqual(exc.code, "conflict") + + def test_rate_limit_error(self): + """Test RateLimitError defaults""" + exc = RateLimitError() + self.assertEqual(exc.status_code, 429) + self.assertEqual(exc.code, "rate_limit_exceeded") + + +class ServerErrorExceptionsTest(TestCase): + """Tests for server error exceptions (5xx)""" + + def test_internal_error(self): + """Test InternalError defaults""" + exc = InternalError() + self.assertEqual(exc.status_code, 500) + self.assertEqual(exc.code, "internal_error") + + def test_service_unavailable_error(self): + """Test ServiceUnavailableError defaults""" + exc = ServiceUnavailableError() + self.assertEqual(exc.status_code, 503) + self.assertEqual(exc.code, "service_unavailable") + + +class BusinessLogicExceptionsTest(TestCase): + """Tests for business logic exceptions""" + + def test_business_logic_error(self): + """Test BusinessLogicError defaults""" + exc = BusinessLogicError() + self.assertEqual(exc.status_code, 400) + self.assertEqual(exc.code, "business_error") + + def test_invalid_state_error(self): + """Test InvalidStateError defaults""" + exc = InvalidStateError() + self.assertEqual(exc.status_code, 400) + self.assertEqual(exc.code, "invalid_state") + + def test_duplicate_error(self): + """Test DuplicateError defaults""" + exc = DuplicateError() + self.assertEqual(exc.status_code, 409) + self.assertEqual(exc.code, "duplicate") + + def test_quota_exceeded_error(self): + """Test QuotaExceededError defaults""" + exc = QuotaExceededError() + self.assertEqual(exc.status_code, 400) + self.assertEqual(exc.code, "quota_exceeded") diff --git a/tests/apps/core/test_filters.py b/tests/apps/core/test_filters.py new file mode 100644 index 0000000..609761f --- /dev/null +++ b/tests/apps/core/test_filters.py @@ -0,0 +1,128 @@ +"""Tests for core filter utilities""" + +from apps.core.filters import ( + BaseFilterSet, + FilterMixin, + StandardOrderingFilter, + StandardSearchFilter, + get_filter_backends, +) +from django.test import TestCase +from django_filters import rest_framework as filters +from rest_framework.filters import OrderingFilter, SearchFilter + + +class BaseFilterSetTest(TestCase): + """Tests for BaseFilterSet""" + + def test_has_created_at_filters(self): + """Test BaseFilterSet has created_at filters""" + filter_fields = BaseFilterSet.declared_filters + + self.assertIn("created_at_after", filter_fields) + self.assertIn("created_at_before", filter_fields) + + def test_has_updated_at_filters(self): + """Test BaseFilterSet has updated_at filters""" + filter_fields = BaseFilterSet.declared_filters + + self.assertIn("updated_at_after", filter_fields) + self.assertIn("updated_at_before", filter_fields) + + def test_created_at_after_is_datetime_filter(self): + """Test created_at_after is DateTimeFilter""" + filter_field = BaseFilterSet.declared_filters["created_at_after"] + + self.assertIsInstance(filter_field, filters.DateTimeFilter) + self.assertEqual(filter_field.field_name, "created_at") + self.assertEqual(filter_field.lookup_expr, "gte") + + def test_created_at_before_is_datetime_filter(self): + """Test created_at_before is DateTimeFilter""" + filter_field = BaseFilterSet.declared_filters["created_at_before"] + + self.assertIsInstance(filter_field, filters.DateTimeFilter) + self.assertEqual(filter_field.field_name, "created_at") + self.assertEqual(filter_field.lookup_expr, "lte") + + +class StandardSearchFilterTest(TestCase): + """Tests for StandardSearchFilter""" + + def test_inherits_from_search_filter(self): + """Test StandardSearchFilter inherits from SearchFilter""" + self.assertTrue(issubclass(StandardSearchFilter, SearchFilter)) + + def test_search_param(self): + """Test search parameter name""" + filter_instance = StandardSearchFilter() + self.assertEqual(filter_instance.search_param, "search") + + def test_search_title(self): + """Test search title is in Russian""" + filter_instance = StandardSearchFilter() + self.assertEqual(filter_instance.search_title, "Поиск") + + +class StandardOrderingFilterTest(TestCase): + """Tests for StandardOrderingFilter""" + + def test_inherits_from_ordering_filter(self): + """Test StandardOrderingFilter inherits from OrderingFilter""" + self.assertTrue(issubclass(StandardOrderingFilter, OrderingFilter)) + + def test_ordering_param(self): + """Test ordering parameter name""" + filter_instance = StandardOrderingFilter() + self.assertEqual(filter_instance.ordering_param, "ordering") + + def test_ordering_title(self): + """Test ordering title is in Russian""" + filter_instance = StandardOrderingFilter() + self.assertEqual(filter_instance.ordering_title, "Сортировка") + + +class GetFilterBackendsTest(TestCase): + """Tests for get_filter_backends function""" + + def test_returns_list(self): + """Test function returns a list""" + backends = get_filter_backends() + self.assertIsInstance(backends, list) + + def test_contains_django_filter_backend(self): + """Test list contains DjangoFilterBackend""" + backends = get_filter_backends() + self.assertIn(filters.DjangoFilterBackend, backends) + + def test_contains_search_filter(self): + """Test list contains StandardSearchFilter""" + backends = get_filter_backends() + self.assertIn(StandardSearchFilter, backends) + + def test_contains_ordering_filter(self): + """Test list contains StandardOrderingFilter""" + backends = get_filter_backends() + self.assertIn(StandardOrderingFilter, backends) + + +class FilterMixinTest(TestCase): + """Tests for FilterMixin""" + + def test_has_filter_backends(self): + """Test FilterMixin has filter_backends""" + self.assertTrue(hasattr(FilterMixin, "filter_backends")) + self.assertIsInstance(FilterMixin.filter_backends, list) + + def test_has_default_ordering(self): + """Test FilterMixin has default ordering""" + self.assertTrue(hasattr(FilterMixin, "ordering")) + self.assertEqual(FilterMixin.ordering, ["-created_at"]) + + def test_filter_backends_contains_required_backends(self): + """Test filter_backends contains all required backends""" + backends = FilterMixin.filter_backends + + self.assertIn(filters.DjangoFilterBackend, backends) + self.assertIn(StandardSearchFilter, backends) + self.assertIn(StandardOrderingFilter, backends) diff --git a/tests/apps/core/test_logging.py b/tests/apps/core/test_logging.py new file mode 100644 index 0000000..394085c --- /dev/null +++ b/tests/apps/core/test_logging.py @@ -0,0 +1,163 @@ +"""Tests for core logging utilities""" + +import json +import logging +from io import StringIO + +from apps.core.logging import ( + ContextLogger, + JSONFormatter, + get_json_logging_config, +) +from django.test import TestCase + + +class JSONFormatterTest(TestCase): + """Tests for JSONFormatter""" + + def setUp(self): + self.formatter = JSONFormatter() + self.logger = logging.getLogger("test_json") + self.logger.setLevel(logging.DEBUG) + + # Remove existing handlers + self.logger.handlers = [] + + # Add handler with JSONFormatter + self.stream = StringIO() + handler = logging.StreamHandler(self.stream) + handler.setFormatter(self.formatter) + self.logger.addHandler(handler) + + def test_output_is_valid_json(self): + """Test that output is valid JSON""" + self.logger.info("Test message") + output = self.stream.getvalue() + + # Should not raise + parsed = json.loads(output) + self.assertIsInstance(parsed, dict) + + def test_contains_required_fields(self): + """Test that output contains required fields""" + self.logger.info("Test message") + output = self.stream.getvalue() + parsed = json.loads(output) + + self.assertIn("timestamp", parsed) + self.assertIn("level", parsed) + self.assertIn("logger", parsed) + self.assertIn("message", parsed) + + def test_level_is_correct(self): + """Test that log level is correct""" + self.logger.warning("Warning message") + output = self.stream.getvalue() + parsed = json.loads(output) + + self.assertEqual(parsed["level"], "WARNING") + + def test_message_is_correct(self): + """Test that message is correct""" + self.logger.info("My test message") + output = self.stream.getvalue() + parsed = json.loads(output) + + self.assertEqual(parsed["message"], "My test message") + + def test_extra_fields_included(self): + """Test that extra fields are included""" + self.logger.info("Test message", extra={"user_id": 42, "action": "login"}) + output = self.stream.getvalue() + parsed = json.loads(output) + + self.assertIn("extra", parsed) + self.assertEqual(parsed["extra"]["user_id"], 42) + self.assertEqual(parsed["extra"]["action"], "login") + + def test_exception_info_included(self): + """Test that exception info is included""" + try: + raise ValueError("Test error") + except ValueError: + self.logger.exception("An error occurred") + + output = self.stream.getvalue() + parsed = json.loads(output) + + self.assertIn("exception", parsed) + self.assertEqual(parsed["exception"]["type"], "ValueError") + self.assertIn("Test error", parsed["exception"]["message"]) + + +class ContextLoggerTest(TestCase): + """Tests for ContextLogger""" + + def setUp(self): + self.context_logger = ContextLogger("test_context") + + def test_set_context(self): + """Test context is stored""" + self.context_logger.set_context(user_id=42, action="test") + + self.assertEqual(self.context_logger._context["user_id"], 42) + self.assertEqual(self.context_logger._context["action"], "test") + + def test_clear_context(self): + """Test context is cleared""" + self.context_logger.set_context(user_id=42) + self.context_logger.clear_context() + + self.assertEqual(self.context_logger._context, {}) + + def test_context_updated_not_replaced(self): + """Test that set_context updates rather than replaces""" + self.context_logger.set_context(user_id=42) + self.context_logger.set_context(action="test") + + self.assertEqual(self.context_logger._context["user_id"], 42) + self.assertEqual(self.context_logger._context["action"], "test") + + +class GetJsonLoggingConfigTest(TestCase): + """Tests for get_json_logging_config function""" + + def test_returns_dict(self): + """Test function returns a dictionary""" + config = get_json_logging_config() + self.assertIsInstance(config, dict) + + def test_has_required_keys(self): + """Test config has required keys""" + config = get_json_logging_config() + + self.assertIn("version", config) + self.assertIn("formatters", config) + self.assertIn("handlers", config) + self.assertIn("loggers", config) + + def test_json_formatter_configured(self): + """Test JSON formatter is configured""" + config = get_json_logging_config() + + self.assertIn("json", config["formatters"]) + self.assertEqual( + config["formatters"]["json"]["()"], + "apps.core.logging.JSONFormatter", + ) + + def test_log_level_applied(self): + """Test log level is applied""" + config = get_json_logging_config(log_level="DEBUG") + + self.assertEqual(config["root"]["level"], "DEBUG") + + def test_file_handler_added_when_path_provided(self): + """Test file handler is added when path is provided""" + config = get_json_logging_config(log_file="/var/log/test.log") + + self.assertIn("file", config["handlers"]) + self.assertEqual( + config["handlers"]["file"]["filename"], + "/var/log/test.log", + ) diff --git a/tests/apps/core/test_management_commands.py b/tests/apps/core/test_management_commands.py new file mode 100644 index 0000000..9288da3 --- /dev/null +++ b/tests/apps/core/test_management_commands.py @@ -0,0 +1,92 @@ +"""Тесты для базового класса management commands.""" + +from io import StringIO + +from apps.core.management.commands.base import BaseAppCommand +from django.core.management.base import CommandError +from django.test import TestCase + + +class TestCommand(BaseAppCommand): + """Тестовая команда для проверки BaseAppCommand.""" + + help = "Test command" + + def execute_command(self, *args, **options): + if options.get("fail"): + raise ValueError("Test error") + return "Success" + + +class BaseAppCommandTest(TestCase): + """Тесты для BaseAppCommand.""" + + def test_base_command_has_dry_run_argument(self): + """Проверка наличия аргумента --dry-run.""" + cmd = BaseAppCommand() + parser = cmd.create_parser("manage.py", "test") + # Парсер должен принимать --dry-run + args = parser.parse_args(["--dry-run"]) + self.assertTrue(args.dry_run) + + def test_base_command_has_silent_argument(self): + """Проверка наличия аргумента --silent.""" + cmd = BaseAppCommand() + parser = cmd.create_parser("manage.py", "test") + args = parser.parse_args(["--silent"]) + self.assertTrue(args.silent) + + def test_log_methods_exist(self): + """Проверка наличия методов логирования.""" + cmd = BaseAppCommand() + cmd.stdout = StringIO() + cmd.stderr = StringIO() + cmd.silent = False + cmd.verbosity = 2 + + # Методы должны существовать и не падать + cmd.log_info("Test info") + cmd.log_success("Test success") + cmd.log_warning("Test warning") + cmd.log_error("Test error") + cmd.log_debug("Test debug") + + def test_progress_iter(self): + """Тест итератора с прогрессом.""" + cmd = BaseAppCommand() + cmd.stdout = StringIO() + cmd.silent = True # Без вывода + + items = list(range(10)) + result = list(cmd.progress_iter(items, "Processing")) + + self.assertEqual(result, items) + + def test_confirm_in_dry_run(self): + """Тест подтверждения в dry-run режиме.""" + cmd = BaseAppCommand() + cmd.stdout = StringIO() + cmd.dry_run = True + cmd.silent = False + + # В dry-run confirm всегда возвращает True + result = cmd.confirm("Continue?") + self.assertTrue(result) + + def test_abort_raises_command_error(self): + """Тест прерывания команды.""" + cmd = BaseAppCommand() + + with self.assertRaises(CommandError): + cmd.abort("Test abort") + + def test_timed_operation(self): + """Тест контекстного менеджера для измерения времени.""" + cmd = BaseAppCommand() + cmd.stdout = StringIO() + cmd.verbosity = 2 + + with cmd.timed_operation("Test operation"): + pass # Операция + + # Не должно падать diff --git a/tests/apps/core/test_middleware.py b/tests/apps/core/test_middleware.py new file mode 100644 index 0000000..e6ad34b --- /dev/null +++ b/tests/apps/core/test_middleware.py @@ -0,0 +1,34 @@ +"""Tests for core middleware""" + +from django.urls import reverse +from rest_framework.test import APITestCase + + +class RequestIDMiddlewareTest(APITestCase): + """Tests for RequestIDMiddleware""" + + def test_request_id_generated(self): + """Test that request ID is generated and returned in response header""" + url = reverse("core:health") + response = self.client.get(url) + + self.assertIn("X-Request-ID", response) + self.assertIsNotNone(response["X-Request-ID"]) + # UUID format check (36 chars with hyphens) + self.assertEqual(len(response["X-Request-ID"]), 36) + + def test_request_id_passed_through(self): + """Test that provided X-Request-ID is passed through""" + url = reverse("core:health") + custom_id = "custom-request-id-12345" + response = self.client.get(url, HTTP_X_REQUEST_ID=custom_id) + + self.assertEqual(response["X-Request-ID"], custom_id) + + def test_different_requests_different_ids(self): + """Test that different requests get different IDs""" + url = reverse("core:health") + response1 = self.client.get(url) + response2 = self.client.get(url) + + self.assertNotEqual(response1["X-Request-ID"], response2["X-Request-ID"]) diff --git a/tests/apps/core/test_mixins.py b/tests/apps/core/test_mixins.py new file mode 100644 index 0000000..f92d2b7 --- /dev/null +++ b/tests/apps/core/test_mixins.py @@ -0,0 +1,110 @@ +"""Тесты для Model Mixins.""" + +from apps.core.mixins import ( + OrderableMixin, + SoftDeleteMixin, + StatusMixin, +) +from django.test import TestCase + + +class TimestampMixinTest(TestCase): + """Тесты для TimestampMixin.""" + + def test_created_at_auto_set(self): + """Проверка автоматической установки created_at.""" + # Используем BackgroundJob как пример модели с TimestampMixin + from apps.core.models import BackgroundJob + + job = BackgroundJob.objects.create( + task_id="test-task-1", + task_name="test.task", + ) + self.assertIsNotNone(job.created_at) + self.assertIsNotNone(job.updated_at) + + def test_updated_at_auto_update(self): + """Проверка автоматического обновления updated_at.""" + from apps.core.models import BackgroundJob + + job = BackgroundJob.objects.create( + task_id="test-task-2", + task_name="test.task", + ) + original_updated = job.updated_at + + # Обновляем запись + job.progress = 50 + job.save() + job.refresh_from_db() + + self.assertGreaterEqual(job.updated_at, original_updated) + + +class UUIDPrimaryKeyMixinTest(TestCase): + """Тесты для UUIDPrimaryKeyMixin.""" + + def test_uuid_auto_generated(self): + """Проверка автоматической генерации UUID.""" + from apps.core.models import BackgroundJob + + job = BackgroundJob.objects.create( + task_id="test-task-3", + task_name="test.task", + ) + self.assertIsNotNone(job.id) + # Проверяем что ID похож на UUID (строка 32+ символа с дефисами) + self.assertEqual(len(str(job.id)), 36) + + def test_uuid_unique(self): + """Проверка уникальности UUID.""" + from apps.core.models import BackgroundJob + + job1 = BackgroundJob.objects.create( + task_id="test-task-4a", + task_name="test.task", + ) + job2 = BackgroundJob.objects.create( + task_id="test-task-4b", + task_name="test.task", + ) + self.assertNotEqual(job1.id, job2.id) + + +class SoftDeleteMixinTest(TestCase): + """Тесты для SoftDeleteMixin.""" + + def test_soft_delete_mixin_fields(self): + """Проверка наличия полей is_deleted и deleted_at.""" + # Проверяем что поля определены в миксине + field_names = [f.name for f in SoftDeleteMixin._meta.get_fields()] + self.assertIn("is_deleted", field_names) + self.assertIn("deleted_at", field_names) + + def test_soft_delete_queryset_methods(self): + """Проверка методов SoftDeleteQuerySet.""" + from apps.core.mixins import SoftDeleteQuerySet + + # Проверяем что методы определены + self.assertTrue(hasattr(SoftDeleteQuerySet, "alive")) + self.assertTrue(hasattr(SoftDeleteQuerySet, "dead")) + self.assertTrue(hasattr(SoftDeleteQuerySet, "hard_delete")) + + +class StatusMixinTest(TestCase): + """Тесты для StatusMixin.""" + + def test_status_choices(self): + """Проверка наличия статусов.""" + self.assertEqual(StatusMixin.Status.DRAFT, "draft") + self.assertEqual(StatusMixin.Status.ACTIVE, "active") + self.assertEqual(StatusMixin.Status.INACTIVE, "inactive") + self.assertEqual(StatusMixin.Status.ARCHIVED, "archived") + + +class OrderableMixinTest(TestCase): + """Тесты для OrderableMixin.""" + + def test_orderable_mixin_has_order_field(self): + """Проверка наличия поля order.""" + self.assertTrue(hasattr(OrderableMixin, "order")) diff --git a/tests/apps/core/test_openapi.py b/tests/apps/core/test_openapi.py new file mode 100644 index 0000000..9bc164f --- /dev/null +++ b/tests/apps/core/test_openapi.py @@ -0,0 +1,133 @@ +"""Tests for core OpenAPI utilities""" + +from apps.core.openapi import ( + CommonParameters, + CommonResponses, + _get_status_description, + api_docs, + paginated_response, +) +from django.test import TestCase +from drf_yasg import openapi +from rest_framework import serializers + + +class DummySerializer(serializers.Serializer): + """Dummy serializer for testing""" + + id = serializers.IntegerField() + name = serializers.CharField() + + +class ApiDocsDecoratorTest(TestCase): + """Tests for @api_docs decorator""" + + def test_decorator_returns_function(self): + """Test decorator returns wrapped function""" + + @api_docs(summary="Test endpoint") + def my_view(request): + pass + + self.assertTrue(callable(my_view)) + + def test_decorator_preserves_function_name(self): + """Test decorator preserves original function name""" + + @api_docs(summary="Test endpoint") + def my_view(request): + pass + + self.assertEqual(my_view.__name__, "my_view") + + +class GetStatusDescriptionTest(TestCase): + """Tests for _get_status_description function""" + + def test_known_status_codes(self): + """Test known status codes return Russian descriptions""" + self.assertEqual(_get_status_description(200), "Успешный запрос") + self.assertEqual(_get_status_description(201), "Ресурс создан") + self.assertEqual(_get_status_description(400), "Некорректный запрос") + self.assertEqual(_get_status_description(401), "Не авторизован") + self.assertEqual(_get_status_description(403), "Доступ запрещён") + self.assertEqual(_get_status_description(404), "Ресурс не найден") + self.assertEqual(_get_status_description(500), "Внутренняя ошибка сервера") + + def test_unknown_status_code(self): + """Test unknown status code returns generic description""" + result = _get_status_description(418) + self.assertEqual(result, "HTTP 418") + + +class CommonResponsesTest(TestCase): + """Tests for CommonResponses class""" + + def test_success_response_type(self): + """Test SUCCESS is an openapi.Response""" + self.assertIsInstance(CommonResponses.SUCCESS, openapi.Response) + + def test_created_response_type(self): + """Test CREATED is an openapi.Response""" + self.assertIsInstance(CommonResponses.CREATED, openapi.Response) + + def test_not_found_response_type(self): + """Test NOT_FOUND is an openapi.Response""" + self.assertIsInstance(CommonResponses.NOT_FOUND, openapi.Response) + + def test_unauthorized_response_type(self): + """Test UNAUTHORIZED is an openapi.Response""" + self.assertIsInstance(CommonResponses.UNAUTHORIZED, openapi.Response) + + def test_validation_error_response_type(self): + """Test VALIDATION_ERROR is an openapi.Response""" + self.assertIsInstance(CommonResponses.VALIDATION_ERROR, openapi.Response) + + def test_server_error_response_type(self): + """Test SERVER_ERROR is an openapi.Response""" + self.assertIsInstance(CommonResponses.SERVER_ERROR, openapi.Response) + + +class CommonParametersTest(TestCase): + """Tests for CommonParameters class""" + + def test_page_parameter(self): + """Test PAGE parameter configuration""" + self.assertEqual(CommonParameters.PAGE.name, "page") + self.assertEqual(CommonParameters.PAGE.in_, openapi.IN_QUERY) + self.assertEqual(CommonParameters.PAGE.type, openapi.TYPE_INTEGER) + + def test_page_size_parameter(self): + """Test PAGE_SIZE parameter configuration""" + self.assertEqual(CommonParameters.PAGE_SIZE.name, "page_size") + self.assertEqual(CommonParameters.PAGE_SIZE.in_, openapi.IN_QUERY) + + def test_search_parameter(self): + """Test SEARCH parameter configuration""" + self.assertEqual(CommonParameters.SEARCH.name, "search") + self.assertEqual(CommonParameters.SEARCH.type, openapi.TYPE_STRING) + + def test_ordering_parameter(self): + """Test ORDERING parameter configuration""" + self.assertEqual(CommonParameters.ORDERING.name, "ordering") + self.assertEqual(CommonParameters.ORDERING.type, openapi.TYPE_STRING) + + def test_id_parameter(self): + """Test ID parameter configuration""" + self.assertEqual(CommonParameters.ID.name, "id") + self.assertEqual(CommonParameters.ID.in_, openapi.IN_PATH) + self.assertTrue(CommonParameters.ID.required) + + +class PaginatedResponseTest(TestCase): + """Tests for paginated_response function""" + + def test_returns_response_object(self): + """Test function returns openapi.Response""" + result = paginated_response(DummySerializer) + self.assertIsInstance(result, openapi.Response) + + def test_response_has_description(self): + """Test response has description""" + result = paginated_response(DummySerializer) + self.assertEqual(result.description, "Пагинированный список") diff --git a/tests/apps/core/test_permissions.py b/tests/apps/core/test_permissions.py new file mode 100644 index 0000000..10d7dea --- /dev/null +++ b/tests/apps/core/test_permissions.py @@ -0,0 +1,252 @@ +"""Tests for core permissions""" + +from apps.core.permissions import ( + IsAdmin, + IsAdminOrReadOnly, + IsOwner, + IsOwnerOrAdmin, + IsOwnerOrReadOnly, + IsSuperuser, + IsVerified, +) +from django.contrib.auth import get_user_model +from django.test import RequestFactory, TestCase +from rest_framework.views import APIView + +from tests.apps.user.factories import UserFactory + +User = get_user_model() + + +class MockObject: + """Mock object for testing ownership""" + + def __init__(self, user=None, owner=None): + self.user = user + self.owner = owner + + +class IsOwnerTest(TestCase): + """Tests for IsOwner permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsOwner() + self.user = UserFactory.create_user() + self.other_user = UserFactory.create_user() + + def test_owner_has_permission(self): + """Test owner has permission to object""" + request = self.factory.get("/") + request.user = self.user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertTrue(result) + + def test_non_owner_denied(self): + """Test non-owner is denied""" + request = self.factory.get("/") + request.user = self.other_user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertFalse(result) + + def test_owner_field_fallback(self): + """Test fallback to 'owner' field""" + request = self.factory.get("/") + request.user = self.user + obj = MockObject(owner=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertTrue(result) + + +class IsOwnerOrReadOnlyTest(TestCase): + """Tests for IsOwnerOrReadOnly permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsOwnerOrReadOnly() + self.user = UserFactory.create_user() + self.other_user = UserFactory.create_user() + + def test_safe_methods_allowed_for_all(self): + """Test GET/HEAD/OPTIONS allowed for non-owners""" + for method in ["get", "head", "options"]: + request = getattr(self.factory, method)("/") + request.user = self.other_user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertTrue(result, f"{method.upper()} should be allowed") + + def test_unsafe_methods_denied_for_non_owner(self): + """Test POST/PUT/PATCH/DELETE denied for non-owners""" + for method in ["post", "put", "patch", "delete"]: + request = getattr(self.factory, method)("/") + request.user = self.other_user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertFalse(result, f"{method.upper()} should be denied") + + def test_unsafe_methods_allowed_for_owner(self): + """Test unsafe methods allowed for owner""" + request = self.factory.put("/") + request.user = self.user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertTrue(result) + + +class IsAdminOrReadOnlyTest(TestCase): + """Tests for IsAdminOrReadOnly permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsAdminOrReadOnly() + self.user = UserFactory.create_user() + self.admin = UserFactory.create_user(is_staff=True) + + def test_safe_methods_allowed_for_all(self): + """Test GET allowed for non-admins""" + request = self.factory.get("/") + request.user = self.user + + result = self.permission.has_permission(request, APIView()) + self.assertTrue(result) + + def test_unsafe_methods_denied_for_non_admin(self): + """Test POST denied for non-admins""" + request = self.factory.post("/") + request.user = self.user + + result = self.permission.has_permission(request, APIView()) + self.assertFalse(result) + + def test_unsafe_methods_allowed_for_admin(self): + """Test POST allowed for admins""" + request = self.factory.post("/") + request.user = self.admin + + result = self.permission.has_permission(request, APIView()) + self.assertTrue(result) + + +class IsAdminTest(TestCase): + """Tests for IsAdmin permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsAdmin() + self.user = UserFactory.create_user() + self.admin = UserFactory.create_user(is_staff=True) + + def test_admin_has_permission(self): + """Test admin has permission""" + request = self.factory.get("/") + request.user = self.admin + + result = self.permission.has_permission(request, APIView()) + self.assertTrue(result) + + def test_non_admin_denied(self): + """Test non-admin is denied""" + request = self.factory.get("/") + request.user = self.user + + result = self.permission.has_permission(request, APIView()) + self.assertFalse(result) + + +class IsSuperuserTest(TestCase): + """Tests for IsSuperuser permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsSuperuser() + self.user = UserFactory.create_user() + self.superuser = UserFactory.create_superuser() + + def test_superuser_has_permission(self): + """Test superuser has permission""" + request = self.factory.get("/") + request.user = self.superuser + + result = self.permission.has_permission(request, APIView()) + self.assertTrue(result) + + def test_non_superuser_denied(self): + """Test non-superuser is denied""" + request = self.factory.get("/") + request.user = self.user + + result = self.permission.has_permission(request, APIView()) + self.assertFalse(result) + + +class IsVerifiedTest(TestCase): + """Tests for IsVerified permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsVerified() + self.user = UserFactory.create_user(is_verified=False) + self.verified_user = UserFactory.create_user(is_verified=True) + + def test_verified_user_has_permission(self): + """Test verified user has permission""" + request = self.factory.get("/") + request.user = self.verified_user + + result = self.permission.has_permission(request, APIView()) + self.assertTrue(result) + + def test_unverified_user_denied(self): + """Test unverified user is denied""" + request = self.factory.get("/") + request.user = self.user + + result = self.permission.has_permission(request, APIView()) + self.assertFalse(result) + + +class IsOwnerOrAdminTest(TestCase): + """Tests for IsOwnerOrAdmin permission""" + + def setUp(self): + self.factory = RequestFactory() + self.permission = IsOwnerOrAdmin() + self.user = UserFactory.create_user() + self.other_user = UserFactory.create_user() + self.admin = UserFactory.create_user(is_staff=True) + + def test_owner_has_permission(self): + """Test owner has permission""" + request = self.factory.get("/") + request.user = self.user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertTrue(result) + + def test_admin_has_permission(self): + """Test admin has permission to any object""" + request = self.factory.get("/") + request.user = self.admin + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertTrue(result) + + def test_non_owner_non_admin_denied(self): + """Test non-owner non-admin is denied""" + request = self.factory.get("/") + request.user = self.other_user + obj = MockObject(user=self.user) + + result = self.permission.has_object_permission(request, APIView(), obj) + self.assertFalse(result) diff --git a/tests/apps/core/test_response.py b/tests/apps/core/test_response.py new file mode 100644 index 0000000..485d4f3 --- /dev/null +++ b/tests/apps/core/test_response.py @@ -0,0 +1,138 @@ +"""Tests for core response wrapper""" + +from apps.core.response import ( + api_created_response, + api_error_response, + api_no_content_response, + api_paginated_response, + api_response, +) +from django.test import TestCase +from rest_framework import status + + +class APIResponseTest(TestCase): + """Tests for api_response function""" + + def test_basic_response(self): + """Test basic successful response""" + response = api_response({"key": "value"}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["success"]) + self.assertEqual(response.data["data"], {"key": "value"}) + self.assertIsNone(response.data["errors"]) + + def test_response_with_request_id(self): + """Test response includes request ID in meta""" + response = api_response({"key": "value"}, request_id="test-id-123") + + self.assertEqual(response.data["meta"]["request_id"], "test-id-123") + + def test_response_with_custom_status(self): + """Test response with custom status code""" + response = api_response(None, status_code=status.HTTP_202_ACCEPTED) + + self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED) + + def test_response_with_pagination(self): + """Test response includes pagination in meta""" + pagination = {"page": 1, "total": 100} + response = api_response([1, 2, 3], pagination=pagination) + + self.assertEqual(response.data["meta"]["pagination"], pagination) + + +class APIErrorResponseTest(TestCase): + """Tests for api_error_response function""" + + def test_basic_error_response(self): + """Test basic error response""" + errors = [{"code": "test_error", "message": "Test error message"}] + response = api_error_response(errors) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertFalse(response.data["success"]) + self.assertIsNone(response.data["data"]) + self.assertEqual(response.data["errors"], errors) + + def test_error_response_with_custom_status(self): + """Test error response with custom status code""" + errors = [{"code": "not_found", "message": "Not found"}] + response = api_error_response(errors, status_code=status.HTTP_404_NOT_FOUND) + + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_error_response_with_request_id(self): + """Test error response includes request ID""" + errors = [{"code": "error", "message": "Error"}] + response = api_error_response(errors, request_id="error-id-456") + + self.assertEqual(response.data["meta"]["request_id"], "error-id-456") + + +class APICreatedResponseTest(TestCase): + """Tests for api_created_response function""" + + def test_created_response(self): + """Test 201 created response""" + response = api_created_response({"id": 1, "name": "New item"}) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertTrue(response.data["success"]) + self.assertEqual(response.data["data"]["id"], 1) + + +class APINoContentResponseTest(TestCase): + """Tests for api_no_content_response function""" + + def test_no_content_response(self): + """Test 204 no content response""" + response = api_no_content_response() + + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + self.assertTrue(response.data["success"]) + self.assertIsNone(response.data["data"]) + + +class APIPaginatedResponseTest(TestCase): + """Tests for api_paginated_response function""" + + def test_paginated_response(self): + """Test paginated response with correct metadata""" + data = [{"id": 1}, {"id": 2}] + response = api_paginated_response( + data, page=1, page_size=10, total_count=25 + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["data"], data) + + pagination = response.data["meta"]["pagination"] + self.assertEqual(pagination["page"], 1) + self.assertEqual(pagination["page_size"], 10) + self.assertEqual(pagination["total_count"], 25) + self.assertEqual(pagination["total_pages"], 3) + self.assertTrue(pagination["has_next"]) + self.assertFalse(pagination["has_previous"]) + + def test_paginated_response_last_page(self): + """Test paginated response on last page""" + response = api_paginated_response( + [{"id": 1}], page=3, page_size=10, total_count=25 + ) + + pagination = response.data["meta"]["pagination"] + self.assertFalse(pagination["has_next"]) + self.assertTrue(pagination["has_previous"]) + + def test_paginated_response_single_page(self): + """Test paginated response with single page""" + response = api_paginated_response( + [{"id": 1}], page=1, page_size=10, total_count=5 + ) + + pagination = response.data["meta"]["pagination"] + self.assertEqual(pagination["total_pages"], 1) + self.assertFalse(pagination["has_next"]) + self.assertFalse(pagination["has_previous"]) diff --git a/tests/apps/core/test_services.py b/tests/apps/core/test_services.py new file mode 100644 index 0000000..f4fe1a6 --- /dev/null +++ b/tests/apps/core/test_services.py @@ -0,0 +1,103 @@ +"""Tests for core services""" + +from apps.core.exceptions import NotFoundError +from apps.core.services import BaseService +from django.contrib.auth import get_user_model +from django.test import TestCase + +User = get_user_model() + + +class UserTestService(BaseService[User]): + """Test service using User model""" + + model = User + + +class BaseServiceTest(TestCase): + """Tests for BaseService""" + + def setUp(self): + self.user = User.objects.create_user( + username="testuser", + email="test@example.com", + password="testpass123", + ) + + def test_get_by_id_success(self): + """Test get_by_id returns entity""" + result = UserTestService.get_by_id(self.user.pk) + self.assertEqual(result.pk, self.user.pk) + self.assertEqual(result.email, self.user.email) + + def test_get_by_id_not_found(self): + """Test get_by_id raises NotFoundError for non-existent ID""" + with self.assertRaises(NotFoundError) as context: + UserTestService.get_by_id(99999) + + self.assertEqual(context.exception.code, "not_found") + self.assertIn("User", context.exception.message) + + def test_get_by_id_or_none_found(self): + """Test get_by_id_or_none returns entity when found""" + result = UserTestService.get_by_id_or_none(self.user.pk) + self.assertEqual(result.pk, self.user.pk) + + def test_get_by_id_or_none_not_found(self): + """Test get_by_id_or_none returns None when not found""" + result = UserTestService.get_by_id_or_none(99999) + self.assertIsNone(result) + + def test_get_all(self): + """Test get_all returns queryset""" + User.objects.create_user( + username="testuser2", + email="test2@example.com", + password="testpass123", + ) + + result = UserTestService.get_all() + self.assertEqual(result.count(), 2) + + def test_filter(self): + """Test filter returns filtered queryset""" + result = UserTestService.filter(email="test@example.com") + self.assertEqual(result.count(), 1) + self.assertEqual(result.first().email, "test@example.com") + + def test_exists_true(self): + """Test exists returns True when entity exists""" + result = UserTestService.exists(email="test@example.com") + self.assertTrue(result) + + def test_exists_false(self): + """Test exists returns False when entity does not exist""" + result = UserTestService.exists(email="nonexistent@example.com") + self.assertFalse(result) + + def test_count_all(self): + """Test count returns total count""" + result = UserTestService.count() + self.assertEqual(result, 1) + + def test_count_filtered(self): + """Test count with filter""" + result = UserTestService.count(email="test@example.com") + self.assertEqual(result, 1) + + result = UserTestService.count(email="nonexistent@example.com") + self.assertEqual(result, 0) + + def test_update(self): + """Test update modifies entity fields""" + UserTestService.update(self.user, username="updated_username") + + self.user.refresh_from_db() + self.assertEqual(self.user.username, "updated_username") + + def test_delete(self): + """Test delete removes entity""" + user_pk = self.user.pk + UserTestService.delete(self.user) + + self.assertFalse(User.objects.filter(pk=user_pk).exists()) diff --git a/tests/apps/core/test_signals.py b/tests/apps/core/test_signals.py new file mode 100644 index 0000000..af7f76f --- /dev/null +++ b/tests/apps/core/test_signals.py @@ -0,0 +1,204 @@ +"""Tests for core signals utilities""" + + +from apps.core.signals import ( + SignalDispatcher, + emit_password_changed, + emit_user_registered, + emit_user_verified, + on_post_save, + on_pre_save, + password_changed, + user_registered, + user_verified, +) +from django.contrib.auth import get_user_model +from django.db.models.signals import post_save, pre_save +from django.test import TestCase + +from tests.apps.user.factories import UserFactory + +User = get_user_model() + + +class SignalDispatcherTest(TestCase): + """Tests for SignalDispatcher""" + + def setUp(self): + self.dispatcher = SignalDispatcher() + + def test_register_handler(self): + """Test handler registration""" + + def my_handler(sender, **kwargs): + pass + + self.dispatcher.register( + signal=post_save, + sender="user.User", + handler=my_handler, + description="Test handler", + ) + + self.assertEqual(len(self.dispatcher._handlers), 1) + self.assertEqual(self.dispatcher._handlers[0]["handler"], my_handler) + + def test_list_handlers(self): + """Test listing registered handlers""" + + def handler1(sender, **kwargs): + pass + + def handler2(sender, **kwargs): + pass + + self.dispatcher.register( + signal=post_save, + sender="user.User", + handler=handler1, + description="Handler 1", + ) + self.dispatcher.register( + signal=pre_save, + sender="user.User", + handler=handler2, + description="Handler 2", + ) + + handlers = self.dispatcher.list_handlers() + + self.assertEqual(len(handlers), 2) + self.assertEqual(handlers[0]["description"], "Handler 1") + self.assertEqual(handlers[1]["description"], "Handler 2") + + def test_connect_all(self): + """Test connecting all handlers""" + handler_called = {"value": False} + + def test_handler(sender, instance, created, **kwargs): + handler_called["value"] = True + + self.dispatcher.register( + signal=post_save, + sender=User, + handler=test_handler, + description="Test", + ) + + self.dispatcher.connect_all() + + # Create user to trigger signal + user = UserFactory.create_user() + + self.assertTrue(handler_called["value"]) + + # Cleanup + self.dispatcher.disconnect_all() + + def test_disconnect_all(self): + """Test disconnecting all handlers""" + handler_called = {"value": False} + + def test_handler(sender, instance, created, **kwargs): + handler_called["value"] = True + + self.dispatcher.register( + signal=post_save, + sender=User, + handler=test_handler, + description="Test", + ) + + self.dispatcher.connect_all() + self.dispatcher.disconnect_all() + + # Create user - handler should not be called + handler_called["value"] = False + user = UserFactory.create_user() + + self.assertFalse(handler_called["value"]) + + +class SignalDecoratorsTest(TestCase): + """Tests for signal decorators""" + + def test_on_post_save_registers_handler(self): + """Test @on_post_save registers handler""" + from apps.core.signals import signal_dispatcher + + initial_count = len(signal_dispatcher._handlers) + + @on_post_save("user.User", description="Test decorator") + def my_handler(sender, **kwargs): + pass + + new_count = len(signal_dispatcher._handlers) + self.assertEqual(new_count, initial_count + 1) + + def test_on_pre_save_registers_handler(self): + """Test @on_pre_save registers handler""" + from apps.core.signals import signal_dispatcher + + initial_count = len(signal_dispatcher._handlers) + + @on_pre_save("user.User", description="Test pre_save") + def my_pre_handler(sender, **kwargs): + pass + + new_count = len(signal_dispatcher._handlers) + self.assertEqual(new_count, initial_count + 1) + + +class CustomSignalsTest(TestCase): + """Tests for custom signals""" + + def setUp(self): + self.user = UserFactory.create_user() + + def test_emit_user_registered(self): + """Test user_registered signal emission""" + handler_called = {"value": False, "user": None} + + def handler(sender, user, **kwargs): + handler_called["value"] = True + handler_called["user"] = user + + user_registered.connect(handler) + + try: + emit_user_registered(self.user) + + self.assertTrue(handler_called["value"]) + self.assertEqual(handler_called["user"], self.user) + finally: + user_registered.disconnect(handler) + + def test_emit_user_verified(self): + """Test user_verified signal emission""" + handler_called = {"value": False} + + def handler(sender, user, **kwargs): + handler_called["value"] = True + + user_verified.connect(handler) + + try: + emit_user_verified(self.user) + self.assertTrue(handler_called["value"]) + finally: + user_verified.disconnect(handler) + + def test_emit_password_changed(self): + """Test password_changed signal emission""" + handler_called = {"value": False} + + def handler(sender, user, **kwargs): + handler_called["value"] = True + + password_changed.connect(handler) + + try: + emit_password_changed(self.user) + self.assertTrue(handler_called["value"]) + finally: + password_changed.disconnect(handler) diff --git a/tests/apps/core/test_tasks.py b/tests/apps/core/test_tasks.py new file mode 100644 index 0000000..a50b51c --- /dev/null +++ b/tests/apps/core/test_tasks.py @@ -0,0 +1,82 @@ +"""Tests for core Celery tasks""" + + +from apps.core.tasks import ( + BaseTask, + IdempotentTask, + PeriodicTask, + TimedTask, + TransactionalTask, +) +from celery import Task +from django.test import TestCase + + +class BaseTaskTest(TestCase): + """Tests for BaseTask""" + + def test_inherits_from_celery_task(self): + """Test BaseTask inherits from Celery Task""" + self.assertTrue(issubclass(BaseTask, Task)) + + def test_has_default_retry_settings(self): + """Test BaseTask has default retry settings""" + self.assertEqual(BaseTask.max_retries, 3) + self.assertTrue(BaseTask.retry_backoff) + self.assertEqual(BaseTask.retry_backoff_max, 600) + + def test_acks_late_enabled(self): + """Test acks_late is enabled""" + self.assertTrue(BaseTask.acks_late) + + def test_reject_on_worker_lost(self): + """Test reject_on_worker_lost is enabled""" + self.assertTrue(BaseTask.reject_on_worker_lost) + + +class TransactionalTaskTest(TestCase): + """Tests for TransactionalTask""" + + def test_inherits_from_base_task(self): + """Test TransactionalTask inherits from BaseTask""" + self.assertTrue(issubclass(TransactionalTask, BaseTask)) + + +class IdempotentTaskTest(TestCase): + """Tests for IdempotentTask""" + + def test_inherits_from_base_task(self): + """Test IdempotentTask inherits from BaseTask""" + self.assertTrue(issubclass(IdempotentTask, BaseTask)) + + def test_has_lock_timeout(self): + """Test IdempotentTask has lock_timeout attribute""" + self.assertEqual(IdempotentTask.lock_timeout, 3600) + + +class TimedTaskTest(TestCase): + """Tests for TimedTask""" + + def test_inherits_from_base_task(self): + """Test TimedTask inherits from BaseTask""" + self.assertTrue(issubclass(TimedTask, BaseTask)) + + def test_has_slow_threshold(self): + """Test TimedTask has slow_threshold attribute""" + self.assertEqual(TimedTask.slow_threshold, 60) + + +class PeriodicTaskTest(TestCase): + """Tests for PeriodicTask""" + + def test_inherits_from_timed_task(self): + """Test PeriodicTask inherits from TimedTask""" + self.assertTrue(issubclass(PeriodicTask, TimedTask)) + + def test_max_retries_is_one(self): + """Test max_retries is 1 for periodic tasks""" + self.assertEqual(PeriodicTask.max_retries, 1) + + def test_autoretry_for_is_empty(self): + """Test autoretry_for is empty for periodic tasks""" + self.assertEqual(PeriodicTask.autoretry_for, ()) diff --git a/tests/apps/core/test_views.py b/tests/apps/core/test_views.py new file mode 100644 index 0000000..cbf2400 --- /dev/null +++ b/tests/apps/core/test_views.py @@ -0,0 +1,101 @@ +"""Tests for core views (health checks)""" + +from django.urls import reverse +from rest_framework import status +from rest_framework.test import APITestCase + + +class HealthCheckViewTest(APITestCase): + """Tests for HealthCheckView""" + + def test_health_check_url_reverse(self): + """Test reverse URL resolution for health check""" + url = reverse("core:health") + self.assertEqual(url, "/health/") + + def test_health_check_success(self): + """Test health check returns healthy status""" + url = reverse("core:health") + response = self.client.get(url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("status", response.data) + self.assertIn("version", response.data) + self.assertIn("checks", response.data) + self.assertIn("database", response.data["checks"]) + + def test_health_check_database_up(self): + """Test health check reports database as up""" + url = reverse("core:health") + response = self.client.get(url) + + self.assertEqual(response.data["checks"]["database"]["status"], "up") + self.assertIn("latency_ms", response.data["checks"]["database"]) + + +class LivenessViewTest(APITestCase): + """Tests for LivenessView""" + + def test_liveness_url_reverse(self): + """Test reverse URL resolution for liveness""" + url = reverse("core:liveness") + self.assertEqual(url, "/health/live/") + + def test_liveness_returns_alive(self): + """Test liveness probe returns alive status""" + url = reverse("core:liveness") + response = self.client.get(url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["status"], "alive") + + +class ReadinessViewTest(APITestCase): + """Tests for ReadinessView""" + + def test_readiness_url_reverse(self): + """Test reverse URL resolution for readiness""" + url = reverse("core:readiness") + self.assertEqual(url, "/health/ready/") + + def test_readiness_returns_ready(self): + """Test readiness probe returns ready when DB is available""" + url = reverse("core:readiness") + response = self.client.get(url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["status"], "ready") + + +class APIVersioningURLTest(APITestCase): + """Tests for API versioning URL structure""" + + def test_api_v1_user_register_reverse(self): + """Test reverse URL for user registration""" + url = reverse("api_v1:user:register") + self.assertEqual(url, "/api/v1/users/register/") + + def test_api_v1_user_login_reverse(self): + """Test reverse URL for user login""" + url = reverse("api_v1:user:login") + self.assertEqual(url, "/api/v1/users/login/") + + def test_api_v1_user_logout_reverse(self): + """Test reverse URL for user logout""" + url = reverse("api_v1:user:logout") + self.assertEqual(url, "/api/v1/users/logout/") + + def test_api_v1_user_current_user_reverse(self): + """Test reverse URL for current user""" + url = reverse("api_v1:user:current_user") + self.assertEqual(url, "/api/v1/users/me/") + + def test_api_v1_user_token_refresh_reverse(self): + """Test reverse URL for token refresh""" + url = reverse("api_v1:user:token_refresh") + self.assertEqual(url, "/api/v1/users/token/refresh/") + + def test_api_v1_user_password_change_reverse(self): + """Test reverse URL for password change""" + url = reverse("api_v1:user:password_change") + self.assertEqual(url, "/api/v1/users/password/change/") diff --git a/tests/apps/core/test_viewsets.py b/tests/apps/core/test_viewsets.py new file mode 100644 index 0000000..ab122d1 --- /dev/null +++ b/tests/apps/core/test_viewsets.py @@ -0,0 +1,86 @@ +"""Tests for core ViewSets""" + +from apps.core.pagination import StandardPagination +from apps.core.viewsets import ( + BaseViewSet, + BulkMixin, + OwnerViewSet, + ReadOnlyViewSet, +) +from django.test import TestCase +from rest_framework import viewsets +from rest_framework.permissions import IsAuthenticated + + +class BaseViewSetTest(TestCase): + """Tests for BaseViewSet""" + + def test_inherits_from_model_viewset(self): + """Test BaseViewSet inherits from ModelViewSet""" + self.assertTrue(issubclass(BaseViewSet, viewsets.ModelViewSet)) + + def test_has_pagination_class(self): + """Test BaseViewSet has pagination_class""" + self.assertEqual(BaseViewSet.pagination_class, StandardPagination) + + def test_has_permission_classes(self): + """Test BaseViewSet has permission_classes""" + self.assertIn(IsAuthenticated, BaseViewSet.permission_classes) + + def test_has_filter_backends(self): + """Test BaseViewSet has filter_backends""" + self.assertTrue(hasattr(BaseViewSet, "filter_backends")) + self.assertIsInstance(BaseViewSet.filter_backends, list) + self.assertTrue(len(BaseViewSet.filter_backends) > 0) + + def test_has_default_ordering(self): + """Test BaseViewSet has default ordering""" + self.assertEqual(BaseViewSet.ordering, ["-created_at"]) + + def test_has_serializer_classes_dict(self): + """Test BaseViewSet has serializer_classes dict""" + self.assertTrue(hasattr(BaseViewSet, "serializer_classes")) + self.assertIsInstance(BaseViewSet.serializer_classes, dict) + + +class ReadOnlyViewSetTest(TestCase): + """Tests for ReadOnlyViewSet""" + + def test_inherits_from_readonly_model_viewset(self): + """Test ReadOnlyViewSet inherits from ReadOnlyModelViewSet""" + self.assertTrue(issubclass(ReadOnlyViewSet, viewsets.ReadOnlyModelViewSet)) + + def test_has_pagination_class(self): + """Test ReadOnlyViewSet has pagination_class""" + self.assertEqual(ReadOnlyViewSet.pagination_class, StandardPagination) + + def test_has_filter_backends(self): + """Test ReadOnlyViewSet has filter_backends""" + self.assertTrue(hasattr(ReadOnlyViewSet, "filter_backends")) + self.assertTrue(len(ReadOnlyViewSet.filter_backends) > 0) + + +class OwnerViewSetTest(TestCase): + """Tests for OwnerViewSet""" + + def test_inherits_from_base_viewset(self): + """Test OwnerViewSet inherits from BaseViewSet""" + self.assertTrue(issubclass(OwnerViewSet, BaseViewSet)) + + def test_has_owner_field(self): + """Test OwnerViewSet has owner_field attribute""" + self.assertEqual(OwnerViewSet.owner_field, "user") + + +class BulkMixinTest(TestCase): + """Tests for BulkMixin""" + + def test_has_bulk_create_method(self): + """Test BulkMixin has bulk_create method""" + self.assertTrue(hasattr(BulkMixin, "bulk_create")) + self.assertTrue(callable(BulkMixin.bulk_create)) + + def test_has_bulk_delete_method(self): + """Test BulkMixin has bulk_delete method""" + self.assertTrue(hasattr(BulkMixin, "bulk_delete")) + self.assertTrue(callable(BulkMixin.bulk_delete)) diff --git a/tests/apps/user/__init__.py b/tests/apps/user/__init__.py new file mode 100644 index 0000000..4474db6 --- /dev/null +++ b/tests/apps/user/__init__.py @@ -0,0 +1,10 @@ +""" +Tests for user application + +This package contains all tests for the user app including: +- Model tests +- Serializer tests +- Service tests +- View tests +- Factory classes for test data generation +""" diff --git a/tests/apps/user/factories.py b/tests/apps/user/factories.py new file mode 100644 index 0000000..4fcd031 --- /dev/null +++ b/tests/apps/user/factories.py @@ -0,0 +1,93 @@ +"""Фабрики для создания тестовых объектов с использованием factory_boy и faker""" + +import factory +from apps.user.models import Profile, User +from faker import Faker + +fake = Faker("ru_RU") + + +class UserFactory(factory.django.DjangoModelFactory): + """Фабрика для создания пользователей""" + + class Meta: + model = User + skip_postgeneration_save = True + + email = factory.LazyAttribute(lambda _: fake.unique.email()) + username = factory.LazyAttribute(lambda _: fake.unique.user_name()) + phone = factory.LazyAttribute( + lambda _: f"+7{fake.numerify('##########')}" + ) + is_verified = False + is_staff = False + is_superuser = False + is_active = True + + @factory.lazy_attribute + def password(self): + return "testpass123" + + @classmethod + def _create(cls, model_class, *args, **kwargs): + """Переопределяем создание для корректной установки пароля""" + password = kwargs.pop("password", "testpass123") + obj = super()._create(model_class, *args, **kwargs) + obj.set_password(password) + obj.save() + return obj + + @classmethod + def create_user(cls, **kwargs): + """Создать обычного пользователя (для обратной совместимости)""" + return cls.create(**kwargs) + + @classmethod + def create_superuser(cls, **kwargs): + """Создать суперпользователя""" + defaults = { + "is_staff": True, + "is_superuser": True, + } + defaults.update(kwargs) + return cls.create(**defaults) + + +class ProfileFactory(factory.django.DjangoModelFactory): + """Фабрика для создания профилей""" + + class Meta: + model = Profile + django_get_or_create = ("user",) # Используем get_or_create для избежания дубликатов + + user = factory.SubFactory(UserFactory) + first_name = factory.LazyAttribute(lambda _: fake.first_name()) + last_name = factory.LazyAttribute(lambda _: fake.last_name()) + bio = factory.LazyAttribute(lambda _: fake.text(max_nb_chars=200)) + date_of_birth = factory.LazyAttribute( + lambda _: fake.date_of_birth(minimum_age=18, maximum_age=80) + ) + + @classmethod + def create_profile(cls, user=None, **kwargs): + """Создать профиль (для обратной совместимости)""" + if user is not None: + # Проверяем, существует ли уже профиль (сигнал мог создать) + try: + profile = user.profile + # Обновляем существующий профиль + for key, value in kwargs.items(): + setattr(profile, key, value) + # Заполняем поля faker'ом, если не переданы + if "first_name" not in kwargs: + profile.first_name = fake.first_name() + if "last_name" not in kwargs: + profile.last_name = fake.last_name() + if "bio" not in kwargs: + profile.bio = fake.text(max_nb_chars=200) + profile.save() + return profile + except Profile.DoesNotExist: + pass + kwargs["user"] = user + return cls.create(**kwargs) diff --git a/src/apps/user/tests/test_models.py b/tests/apps/user/test_models.py similarity index 76% rename from src/apps/user/tests/test_models.py rename to tests/apps/user/test_models.py index f50e51c..1ae409a 100644 --- a/src/apps/user/tests/test_models.py +++ b/tests/apps/user/test_models.py @@ -1,9 +1,12 @@ """Tests for user models""" from django.test import TestCase +from faker import Faker from .factories import ProfileFactory, UserFactory +fake = Faker("ru_RU") + class UserModelTest(TestCase): """Tests for User model""" @@ -51,16 +54,20 @@ class ProfileModelTest(TestCase): """Tests for Profile model""" def setUp(self): - self.profile = ProfileFactory.create_profile() + # Создаём профиль через фабрику с заполненными данными + self.user = UserFactory.create_user() + self.profile = ProfileFactory.create_profile(user=self.user) def test_profile_creation(self): """Test profile creation""" self.assertIsNotNone(self.profile.user) - self.assertIsInstance(self.profile.first_name, str) - self.assertIsInstance(self.profile.last_name, str) - # Проверяем, что имена начинаются с "Иван" (учитываем UUID суффиксы) - self.assertTrue(self.profile.first_name.startswith("Иван")) - self.assertTrue(self.profile.last_name.startswith("Иванов")) + # Проверяем, что профиль связан с пользователем + self.assertEqual(self.profile.user, self.user) + # Проверяем, что имена заполнены faker'ом + self.assertIsNotNone(self.profile.first_name) + self.assertIsNotNone(self.profile.last_name) + self.assertTrue(len(self.profile.first_name) > 0) + self.assertTrue(len(self.profile.last_name) > 0) def test_profile_str_representation(self): """Test profile string representation""" @@ -104,18 +111,20 @@ class ProfileModelTest(TestCase): def test_profile_full_name_property(self): """Test full_name property""" # Test with both names - self.profile.first_name = "John" - self.profile.last_name = "Doe" - self.assertEqual(self.profile.full_name, "John Doe") + first_name = fake.first_name() + last_name = fake.last_name() + self.profile.first_name = first_name + self.profile.last_name = last_name + self.assertEqual(self.profile.full_name, f"{first_name} {last_name}") # Test with only first name self.profile.last_name = "" - self.assertEqual(self.profile.full_name, "John") + self.assertEqual(self.profile.full_name, first_name) # Test with only last name self.profile.first_name = "" - self.profile.last_name = "Doe" - self.assertEqual(self.profile.full_name, "Doe") + self.profile.last_name = last_name + self.assertEqual(self.profile.full_name, last_name) # Test with no names (fallback to username) self.profile.first_name = "" diff --git a/src/apps/user/tests/test_serializers.py b/tests/apps/user/test_serializers.py similarity index 79% rename from src/apps/user/tests/test_serializers.py rename to tests/apps/user/test_serializers.py index 55edae3..e3ada24 100644 --- a/src/apps/user/tests/test_serializers.py +++ b/tests/apps/user/test_serializers.py @@ -1,9 +1,6 @@ """Tests for user serializers""" -from django.contrib.auth import get_user_model -from django.test import TestCase - -from ..serializers import ( +from apps.user.serializers import ( LoginSerializer, PasswordChangeSerializer, ProfileUpdateSerializer, @@ -12,21 +9,27 @@ from ..serializers import ( UserSerializer, UserUpdateSerializer, ) +from django.contrib.auth import get_user_model +from django.test import TestCase +from faker import Faker + from .factories import ProfileFactory, UserFactory User = get_user_model() +fake = Faker("ru_RU") class UserRegistrationSerializerTest(TestCase): """Tests for UserRegistrationSerializer""" def setUp(self): + self.password = fake.password(length=12, special_chars=False) self.user_data = { - "email": "serializer@example.com", - "username": "serializeruser", - "password": "serializerpass123", - "password_confirm": "serializerpass123", - "phone": "+79991234567", + "email": fake.unique.email(), + "username": fake.unique.user_name(), + "password": self.password, + "password_confirm": self.password, + "phone": f"+7{fake.numerify('##########')}", } def test_valid_registration_data(self): @@ -37,7 +40,7 @@ class UserRegistrationSerializerTest(TestCase): def test_passwords_do_not_match(self): """Test validation fails when passwords don't match""" data = self.user_data.copy() - data["password_confirm"] = "differentpass" + data["password_confirm"] = fake.password(length=12, special_chars=False) serializer = UserRegistrationSerializer(data=data) @@ -46,9 +49,10 @@ class UserRegistrationSerializerTest(TestCase): def test_short_password(self): """Test validation fails with short password""" + short_password = fake.pystr(min_chars=3, max_chars=5) data = self.user_data.copy() - data["password"] = "short" - data["password_confirm"] = "short" + data["password"] = short_password + data["password_confirm"] = short_password serializer = UserRegistrationSerializer(data=data) @@ -128,7 +132,10 @@ class UserUpdateSerializerTest(TestCase): def test_valid_update_data(self): """Test valid update data""" - update_data = {"username": "newusername", "phone": "+79991112233"} + update_data = { + "username": fake.unique.user_name(), + "phone": f"+7{fake.numerify('##########')}", + } serializer = UserUpdateSerializer(self.user, data=update_data, partial=True) self.assertTrue(serializer.is_valid()) @@ -155,10 +162,10 @@ class ProfileUpdateSerializerTest(TestCase): def test_valid_profile_update_data(self): """Test valid profile update data""" update_data = { - "first_name": "Александр", - "last_name": "Сидоров", - "bio": "Обновленное описание", - "date_of_birth": "1990-01-01", + "first_name": fake.first_name(), + "last_name": fake.last_name(), + "bio": fake.text(max_nb_chars=200), + "date_of_birth": str(fake.date_of_birth(minimum_age=18, maximum_age=80)), } serializer = ProfileUpdateSerializer( @@ -183,7 +190,10 @@ class LoginSerializerTest(TestCase): """Tests for LoginSerializer""" def setUp(self): - self.login_data = {"email": "test@example.com", "password": "testpass123"} + self.login_data = { + "email": fake.email(), + "password": fake.password(length=12, special_chars=False), + } def test_valid_login_data(self): """Test valid login data""" @@ -192,14 +202,14 @@ class LoginSerializerTest(TestCase): def test_missing_email(self): """Test validation fails without email""" - data = {"password": "testpass123"} + data = {"password": fake.password(length=12, special_chars=False)} serializer = LoginSerializer(data=data) self.assertFalse(serializer.is_valid()) self.assertIn("email", serializer.errors) def test_missing_password(self): """Test validation fails without password""" - data = {"email": "test@example.com"} + data = {"email": fake.email()} serializer = LoginSerializer(data=data) self.assertFalse(serializer.is_valid()) self.assertIn("password", serializer.errors) @@ -211,8 +221,8 @@ class TokenSerializerTest(TestCase): def test_valid_token_data(self): """Test valid token data""" token_data = { - "access": "access_token_string", - "refresh": "refresh_token_string", + "access": fake.pystr(min_chars=50, max_chars=100), + "refresh": fake.pystr(min_chars=50, max_chars=100), } serializer = TokenSerializer(data=token_data) @@ -220,14 +230,14 @@ class TokenSerializerTest(TestCase): def test_missing_access_token(self): """Test validation fails without access token""" - data = {"refresh": "refresh_token_string"} + data = {"refresh": fake.pystr(min_chars=50, max_chars=100)} serializer = TokenSerializer(data=data) self.assertFalse(serializer.is_valid()) self.assertIn("access", serializer.errors) def test_missing_refresh_token(self): """Test validation fails without refresh token""" - data = {"access": "access_token_string"} + data = {"access": fake.pystr(min_chars=50, max_chars=100)} serializer = TokenSerializer(data=data) self.assertFalse(serializer.is_valid()) self.assertIn("refresh", serializer.errors) @@ -237,10 +247,12 @@ class PasswordChangeSerializerTest(TestCase): """Tests for PasswordChangeSerializer""" def setUp(self): + self.old_password = fake.password(length=12, special_chars=False) + self.new_password = fake.password(length=12, special_chars=False) self.password_data = { - "old_password": "oldpass123", - "new_password": "newpass123", - "new_password_confirm": "newpass123", + "old_password": self.old_password, + "new_password": self.new_password, + "new_password_confirm": self.new_password, } def test_valid_password_change_data(self): @@ -251,7 +263,7 @@ class PasswordChangeSerializerTest(TestCase): def test_passwords_do_not_match(self): """Test validation fails when new passwords don't match""" data = self.password_data.copy() - data["new_password_confirm"] = "differentpass" + data["new_password_confirm"] = fake.password(length=12, special_chars=False) serializer = PasswordChangeSerializer(data=data) @@ -260,9 +272,10 @@ class PasswordChangeSerializerTest(TestCase): def test_short_new_password(self): """Test validation fails with short new password""" + short_password = fake.pystr(min_chars=3, max_chars=5) data = self.password_data.copy() - data["new_password"] = "short" - data["new_password_confirm"] = "short" + data["new_password"] = short_password + data["new_password_confirm"] = short_password serializer = PasswordChangeSerializer(data=data) @@ -271,7 +284,8 @@ class PasswordChangeSerializerTest(TestCase): def test_missing_old_password(self): """Test validation fails without old password""" - data = {"new_password": "newpass123", "new_password_confirm": "newpass123"} + new_password = fake.password(length=12, special_chars=False) + data = {"new_password": new_password, "new_password_confirm": new_password} serializer = PasswordChangeSerializer(data=data) self.assertFalse(serializer.is_valid()) self.assertIn("old_password", serializer.errors) diff --git a/src/apps/user/tests/test_services.py b/tests/apps/user/test_services.py similarity index 54% rename from src/apps/user/tests/test_services.py rename to tests/apps/user/test_services.py index 780cc7c..4c1d030 100644 --- a/src/apps/user/tests/test_services.py +++ b/tests/apps/user/test_services.py @@ -1,14 +1,16 @@ """Tests for user services""" - +from apps.core.exceptions import NotFoundError +from apps.user.services import ProfileService, UserService from django.contrib.auth import get_user_model from django.test import TestCase +from faker import Faker from rest_framework_simplejwt.tokens import RefreshToken -from ..services import ProfileService, UserService from .factories import ProfileFactory, UserFactory User = get_user_model() +fake = Faker("ru_RU") class UserServiceTest(TestCase): @@ -17,9 +19,9 @@ class UserServiceTest(TestCase): def setUp(self): self.user = UserFactory.create_user() self.user_data = { - "email": "service@example.com", - "username": "serviceuser", - "password": "servicepass123", + "email": fake.unique.email(), + "username": fake.unique.user_name(), + "password": fake.password(length=12, special_chars=False), } def test_create_user_success(self): @@ -35,7 +37,9 @@ class UserServiceTest(TestCase): def test_create_user_with_extra_fields(self): """Test user creation with extra fields""" extra_data = self.user_data.copy() - extra_data["phone"] = "+79991234567" + extra_data["email"] = fake.unique.email() + extra_data["username"] = fake.unique.user_name() + extra_data["phone"] = f"+7{fake.numerify('##########')}" extra_data["is_verified"] = True user = UserService.create_user(**extra_data) @@ -49,8 +53,17 @@ class UserServiceTest(TestCase): self.assertEqual(found_user, self.user) def test_get_user_by_email_not_found(self): - """Test getting user by non-existing email""" - found_user = UserService.get_user_by_email("nonexistent@example.com") + """Test getting user by non-existing email raises NotFoundError""" + nonexistent_email = fake.unique.email() + with self.assertRaises(NotFoundError) as context: + UserService.get_user_by_email(nonexistent_email) + + self.assertEqual(context.exception.code, "not_found") + self.assertIn("email", context.exception.message) + + def test_get_user_by_email_or_none_not_found(self): + """Test getting user by non-existing email returns None""" + found_user = UserService.get_user_by_email_or_none(fake.unique.email()) self.assertIsNone(found_user) def test_get_user_by_id_found(self): @@ -59,13 +72,27 @@ class UserServiceTest(TestCase): self.assertEqual(found_user, self.user) def test_get_user_by_id_not_found(self): - """Test getting user by non-existing ID""" - found_user = UserService.get_user_by_id(999999) + """Test getting user by non-existing ID raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError) as context: + UserService.get_user_by_id(nonexistent_id) + + self.assertEqual(context.exception.code, "not_found") + self.assertIn(str(nonexistent_id), context.exception.message) + + def test_get_user_by_id_or_none_not_found(self): + """Test getting user by non-existing ID returns None""" + found_user = UserService.get_user_by_id_or_none( + fake.pyint(min_value=900000, max_value=999999) + ) self.assertIsNone(found_user) def test_update_user_success(self): """Test successful user update""" - new_data = {"username": "updated_username", "phone": "+79991112233"} + new_data = { + "username": fake.unique.user_name(), + "phone": f"+7{fake.numerify('##########')}", + } updated_user = UserService.update_user(self.user.id, **new_data) @@ -74,22 +101,25 @@ class UserServiceTest(TestCase): self.assertEqual(updated_user.phone, new_data["phone"]) def test_update_user_not_found(self): - """Test updating non-existing user""" - updated_user = UserService.update_user(999999, username="test") - self.assertIsNone(updated_user) + """Test updating non-existing user raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError): + UserService.update_user(nonexistent_id, username=fake.user_name()) def test_delete_user_success(self): """Test successful user deletion""" user_id = self.user.id - result = UserService.delete_user(user_id) + UserService.delete_user(user_id) - self.assertTrue(result) - self.assertIsNone(UserService.get_user_by_id(user_id)) + # Verify user is deleted + with self.assertRaises(NotFoundError): + UserService.get_user_by_id(user_id) def test_delete_user_not_found(self): - """Test deleting non-existing user""" - result = UserService.delete_user(999999) - self.assertFalse(result) + """Test deleting non-existing user raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError): + UserService.delete_user(nonexistent_id) def test_get_tokens_for_user(self): """Test JWT token generation""" @@ -109,16 +139,17 @@ class UserServiceTest(TestCase): self.user.is_verified = False self.user.save() - result = UserService.verify_email(self.user.id) + user = UserService.verify_email(self.user.id) - self.assertTrue(result) + self.assertEqual(user.id, self.user.id) self.user.refresh_from_db() self.assertTrue(self.user.is_verified) def test_verify_email_not_found(self): - """Test email verification for non-existing user""" - result = UserService.verify_email(999999) - self.assertFalse(result) + """Test email verification for non-existing user raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError): + UserService.verify_email(nonexistent_id) class ProfileServiceTest(TestCase): @@ -128,10 +159,10 @@ class ProfileServiceTest(TestCase): self.user = UserFactory.create_user() self.profile = ProfileFactory.create_profile(user=self.user) self.profile_data = { - "first_name": "Александр", - "last_name": "Петров", - "bio": "Тестовое описание", - "date_of_birth": "1990-01-01", + "first_name": fake.first_name(), + "last_name": fake.last_name(), + "bio": fake.text(max_nb_chars=200), + "date_of_birth": str(fake.date_of_birth(minimum_age=18, maximum_age=80)), } def test_get_profile_by_user_id_found(self): @@ -142,8 +173,17 @@ class ProfileServiceTest(TestCase): self.assertIsNotNone(found_profile.user) def test_get_profile_by_user_id_not_found(self): - """Test getting profile by non-existing user ID""" - found_profile = ProfileService.get_profile_by_user_id(999999) + """Test getting profile by non-existing user ID raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError) as context: + ProfileService.get_profile_by_user_id(nonexistent_id) + + self.assertEqual(context.exception.code, "not_found") + + def test_get_profile_by_user_id_or_none_not_found(self): + """Test getting profile by non-existing user ID returns None""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + found_profile = ProfileService.get_profile_by_user_id_or_none(nonexistent_id) self.assertIsNone(found_profile) def test_update_profile_success(self): @@ -158,9 +198,10 @@ class ProfileServiceTest(TestCase): self.assertEqual(updated_profile.bio, self.profile_data["bio"]) def test_update_profile_not_found(self): - """Test updating profile for non-existing user""" - updated_profile = ProfileService.update_profile(999999, first_name="Test") - self.assertIsNone(updated_profile) + """Test updating profile for non-existing user raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError): + ProfileService.update_profile(nonexistent_id, first_name=fake.first_name()) def test_get_full_profile_data_success(self): """Test getting full profile data""" @@ -177,6 +218,7 @@ class ProfileServiceTest(TestCase): self.assertEqual(profile_data["is_verified"], self.user.is_verified) def test_get_full_profile_data_not_found(self): - """Test getting full profile data for non-existing user""" - profile_data = ProfileService.get_full_profile_data(999999) - self.assertIsNone(profile_data) + """Test getting full profile data for non-existing user raises NotFoundError""" + nonexistent_id = fake.pyint(min_value=900000, max_value=999999) + with self.assertRaises(NotFoundError): + ProfileService.get_full_profile_data(nonexistent_id) diff --git a/src/apps/user/tests/test_views.py b/tests/apps/user/test_views.py similarity index 75% rename from src/apps/user/tests/test_views.py rename to tests/apps/user/test_views.py index 5439b41..4e3aa44 100644 --- a/src/apps/user/tests/test_views.py +++ b/tests/apps/user/test_views.py @@ -1,28 +1,31 @@ """Tests for user DRF views""" +from apps.user.models import Profile +from apps.user.services import UserService from django.contrib.auth import get_user_model from django.urls import reverse +from faker import Faker from rest_framework import status from rest_framework.test import APITestCase -from ..models import Profile -from ..services import UserService from .factories import ProfileFactory, UserFactory User = get_user_model() +fake = Faker("ru_RU") class RegisterViewTest(APITestCase): """Tests for RegisterView""" def setUp(self): - self.register_url = reverse("register") + self.register_url = reverse("api_v1:user:register") + self.password = fake.password(length=12, special_chars=False) self.user_data = { - "email": "test@example.com", - "username": "testuser", - "password": "testpass123", - "password_confirm": "testpass123", - "phone": "+79991234567", + "email": fake.unique.email(), + "username": fake.unique.user_name(), + "password": self.password, + "password_confirm": self.password, + "phone": f"+7{fake.numerify('##########')}", } def test_register_success(self): @@ -41,7 +44,7 @@ class RegisterViewTest(APITestCase): def test_register_passwords_do_not_match(self): """Test registration fails when passwords don't match""" data = self.user_data.copy() - data["password_confirm"] = "differentpass" + data["password_confirm"] = fake.password(length=12, special_chars=False) response = self.client.post(self.register_url, data, format="json") @@ -51,18 +54,23 @@ class RegisterViewTest(APITestCase): def test_register_duplicate_email(self): """Test registration fails with duplicate email""" # Create existing user - UserFactory.create_user(email="test@example.com") + existing_user = UserFactory.create_user() - response = self.client.post(self.register_url, self.user_data, format="json") + # Use the same email as existing user + data = self.user_data.copy() + data["email"] = existing_user.email + + response = self.client.post(self.register_url, data, format="json") self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertIn("email", response.data) def test_register_short_password(self): """Test registration fails with short password""" + short_password = fake.pystr(min_chars=3, max_chars=5) data = self.user_data.copy() - data["password"] = "short" - data["password_confirm"] = "short" + data["password"] = short_password + data["password_confirm"] = short_password response = self.client.post(self.register_url, data, format="json") @@ -74,12 +82,11 @@ class LoginViewTest(APITestCase): """Tests for LoginView""" def setUp(self): - self.login_url = reverse("login") - self.user = UserFactory.create_user() - self.user.set_password("testpass123") - self.user.save() + self.login_url = reverse("api_v1:user:login") + self.password = fake.password(length=12, special_chars=False) + self.user = UserFactory.create_user(password=self.password) - self.login_data = {"email": self.user.email, "password": "testpass123"} + self.login_data = {"email": self.user.email, "password": self.password} def test_login_success(self): """Test successful login""" @@ -92,7 +99,7 @@ class LoginViewTest(APITestCase): def test_login_invalid_credentials(self): """Test login fails with invalid credentials""" data = self.login_data.copy() - data["password"] = "wrongpass" + data["password"] = fake.password(length=12, special_chars=False) response = self.client.post(self.login_url, data, format="json") @@ -101,7 +108,10 @@ class LoginViewTest(APITestCase): def test_login_nonexistent_user(self): """Test login fails for nonexistent user""" - data = {"email": "nonexistent@example.com", "password": "testpass123"} + data = { + "email": fake.unique.email(), + "password": fake.password(length=12, special_chars=False), + } response = self.client.post(self.login_url, data, format="json") @@ -114,9 +124,9 @@ class CurrentUserViewTest(APITestCase): def setUp(self): self.user = UserFactory.create_user() ProfileFactory.create_profile(user=self.user) - self.current_user_url = reverse("current_user") + self.current_user_url = reverse("api_v1:user:current_user") self.tokens = UserService.get_tokens_for_user(self.user) - self.client.credentials(HTTP_AUTHORIZATION=f'Bearer {self.tokens["access"]}') + self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {self.tokens['access']}") def test_get_current_user_authenticated(self): """Test getting current user when authenticated""" @@ -140,11 +150,14 @@ class UserUpdateViewTest(APITestCase): def setUp(self): self.user = UserFactory.create_user() - self.update_url = reverse("user_update") + self.update_url = reverse("api_v1:user:user_update") self.tokens = UserService.get_tokens_for_user(self.user) - self.client.credentials(HTTP_AUTHORIZATION=f'Bearer {self.tokens["access"]}') + self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {self.tokens['access']}") - self.update_data = {"username": "updated_username", "phone": "+79991112233"} + self.update_data = { + "username": fake.unique.user_name(), + "phone": f"+7{fake.numerify('##########')}", + } def test_update_user_success(self): """Test successful user update""" @@ -172,14 +185,14 @@ class ProfileDetailViewTest(APITestCase): def setUp(self): self.user = UserFactory.create_user() self.profile = ProfileFactory.create_profile(user=self.user) - self.profile_url = reverse("profile_detail") + self.profile_url = reverse("api_v1:user:profile_detail") self.tokens = UserService.get_tokens_for_user(self.user) - self.client.credentials(HTTP_AUTHORIZATION=f'Bearer {self.tokens["access"]}') + self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {self.tokens['access']}") self.update_data = { - "first_name": "John", - "last_name": "Doe", - "bio": "Updated bio", + "first_name": fake.first_name(), + "last_name": fake.last_name(), + "bio": fake.text(max_nb_chars=200), } def test_get_profile_success(self): @@ -217,17 +230,17 @@ class PasswordChangeViewTest(APITestCase): """Tests for PasswordChangeView""" def setUp(self): - self.user = UserFactory.create_user() - self.user.set_password("oldpass123") - self.user.save() - self.password_change_url = reverse("password_change") + self.old_password = fake.password(length=12, special_chars=False) + self.new_password = fake.password(length=12, special_chars=False) + self.user = UserFactory.create_user(password=self.old_password) + self.password_change_url = reverse("api_v1:user:password_change") self.tokens = UserService.get_tokens_for_user(self.user) - self.client.credentials(HTTP_AUTHORIZATION=f'Bearer {self.tokens["access"]}') + self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {self.tokens['access']}") self.password_data = { - "old_password": "oldpass123", - "new_password": "newpass123", - "new_password_confirm": "newpass123", + "old_password": self.old_password, + "new_password": self.new_password, + "new_password_confirm": self.new_password, } def test_change_password_success(self): @@ -241,12 +254,12 @@ class PasswordChangeViewTest(APITestCase): # Verify password was changed self.user.refresh_from_db() - self.assertTrue(self.user.check_password("newpass123")) + self.assertTrue(self.user.check_password(self.new_password)) def test_change_password_wrong_old_password(self): """Test password change fails with wrong old password""" data = self.password_data.copy() - data["old_password"] = "wrongpass" + data["old_password"] = fake.password(length=12, special_chars=False) response = self.client.post(self.password_change_url, data, format="json") @@ -256,7 +269,7 @@ class PasswordChangeViewTest(APITestCase): def test_change_password_passwords_do_not_match(self): """Test password change fails when new passwords don't match""" data = self.password_data.copy() - data["new_password_confirm"] = "differentpass" + data["new_password_confirm"] = fake.password(length=12, special_chars=False) response = self.client.post(self.password_change_url, data, format="json") @@ -269,7 +282,7 @@ class TokenRefreshViewTest(APITestCase): def setUp(self): self.user = UserFactory.create_user() - self.refresh_url = reverse("token_refresh") + self.refresh_url = reverse("api_v1:user:token_refresh") self.tokens = UserService.get_tokens_for_user(self.user) def test_refresh_token_success(self): @@ -285,7 +298,7 @@ class TokenRefreshViewTest(APITestCase): def test_refresh_token_invalid(self): """Test token refresh fails with invalid refresh token""" - data = {"refresh": "invalid.token.string"} + data = {"refresh": fake.pystr(min_chars=20, max_chars=50)} response = self.client.post(self.refresh_url, data, format="json") self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)