commit 1f3eeb9193971a78daf87815c9a92f13e52a8ac2 Author: zsh <3150957306@qq.com> Date: Thu Dec 4 10:04:21 2025 +0800 first commit diff --git a/_tmp_fix.py b/_tmp_fix.py new file mode 100644 index 0000000..3dd2294 --- /dev/null +++ b/_tmp_fix.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +from pathlib import Path +path = Path('frontend/app/components/RichEditor.vue') +data = path.read_text(encoding='utf-8') +data = data.replace("import { Table } from '@tiptap/extension-table'","import Table from '@tiptap/extension-table'") +data = data.replace("import { TableRow } from '@tiptap/extension-table-row'","import TableRow from '@tiptap/extension-table-row'") +data = data.replace("import { TableHeader } from '@tiptap/extension-table-header'","import TableHeader from '@tiptap/extension-table-header'") +data = data.replace("import { TableCell } from '@tiptap/extension-table-cell'","import TableCell from '@tiptap/extension-table-cell'") +data = data.replace('请输入正文,支持粘贴图片、截图、链接等�?', '请输入正文,支持粘贴图片、截图、链接等内容') +path.write_text(data, encoding='utf-8') diff --git a/backend/.dockerignore b/backend/.dockerignore new file mode 100644 index 0000000..71a0fe6 --- /dev/null +++ b/backend/.dockerignore @@ -0,0 +1,21 @@ +__pycache__ +*.pyc +*.pyo +*.pyd +.Python +.env* +pip-log.txt +pip-delete-this-directory.txt +.tox +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +*.log +.git* +tests +scripts +postman +./postgres-data diff --git a/backend/.env.example b/backend/.env.example new file mode 100644 index 0000000..1e94634 --- /dev/null +++ b/backend/.env.example @@ -0,0 +1,20 @@ +# 邮件发信配置 +MAIL_FROM=orjiance@163.com + +SMTP_HOST=smtp.163.com +SMTP_PORT=465 +SMTP_USER=orjiance@163.com +SMTP_PASSWORD=NFZqrTavzBGDQLyQ +SMTP_TLS=true + +# 验证码 +EMAIL_CODE_EXPIRES_MINUTES=60 +# 可选:逗号分隔的场景列表(不写就用默认) +# EMAIL_CODE_SCENES=register,reset,login + +# 逗号分隔的管理员邮箱,登录后自动授予 admin 权限 +# ADMIN_EMAILS=admin@example.com + +SECRET_KEY=secret +DEBUG=True +DATABASE_URL=postgresql://postgres:74110ZSH@localhost/aivise diff --git a/backend/.github/assets/logo.png b/backend/.github/assets/logo.png new file mode 100644 index 0000000..5e753f0 Binary files /dev/null and b/backend/.github/assets/logo.png differ diff --git a/backend/.github/dependabot.yml b/backend/.github/dependabot.yml new file mode 100644 index 0000000..668dfc0 --- /dev/null +++ b/backend/.github/dependabot.yml @@ -0,0 +1,20 @@ +version: 2 + +updates: + - package-ecosystem: pip + directory: "/" + schedule: + interval: monthly + time: "12:00" + pull-request-branch-name: + separator: "-" + open-pull-requests-limit: 10 + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: monthly + time: "12:00" + pull-request-branch-name: + separator: "-" + open-pull-requests-limit: 10 diff --git a/backend/.github/workflows/conduit.yml b/backend/.github/workflows/conduit.yml new file mode 100644 index 0000000..5e65fd0 --- /dev/null +++ b/backend/.github/workflows/conduit.yml @@ -0,0 +1,68 @@ +name: API spec + +on: + push: + branches: + - "master" + + pull_request: + branches: + - "*" + +jobs: + api-spec: + name: API spec tests + + runs-on: ubuntu-18.04 + + strategy: + matrix: + python-version: [3.9] + + services: + postgres: + image: postgres:11.5-alpine + env: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + ports: + - 5432:5432 + options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 + + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4.2.0 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: "1.1.12" + virtualenvs-in-project: true + + - name: Set up cache + uses: actions/cache@v3 + id: cache + with: + path: .venv + key: venv-${{ runner.os }}-py-${{ matrix.python-version }}-poetry-${{ hashFiles('poetry.lock') }} + + - name: Ensure cache is healthy + if: steps.cache.outputs.cache-hit == 'true' + run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv + + - name: Install dependencies + run: poetry install --no-interaction + + - name: Run newman and test service + env: + SECRET_KEY: secret_key + DATABASE_URL: postgresql://postgres:postgres@localhost/postgres + run: | + poetry run alembic upgrade head + poetry run uvicorn app.main:app & + APIURL=http://localhost:8000/api ./postman/run-api-tests.sh + poetry run alembic downgrade base diff --git a/backend/.github/workflows/deploy.yml b/backend/.github/workflows/deploy.yml new file mode 100644 index 0000000..abeb63b --- /dev/null +++ b/backend/.github/workflows/deploy.yml @@ -0,0 +1,26 @@ +name: Deploy + +on: + push: + branches: + - master + +env: + IMAGE_NAME: nsidnev/fastapi-realworld-example-app + DOCKER_USER: ${{ secrets.DOCKER_USER }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + +jobs: + build: + name: Build Container + + runs-on: ubuntu-18.04 + + steps: + - uses: actions/checkout@v3 + + - name: Build image and publish to registry + run: | + docker build -t $IMAGE_NAME:latest . + echo $DOCKER_PASSWORD | docker login -u $DOCKER_USER --password-stdin + docker push $IMAGE_NAME:latest diff --git a/backend/.github/workflows/styles.yml b/backend/.github/workflows/styles.yml new file mode 100644 index 0000000..7b96267 --- /dev/null +++ b/backend/.github/workflows/styles.yml @@ -0,0 +1,50 @@ +name: Styles + +on: + push: + branches: + - "master" + + pull_request: + branches: + - "*" + +jobs: + lint: + name: Lint code + + runs-on: ubuntu-18.04 + + strategy: + matrix: + python-version: [3.9] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4.2.0 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: "1.1.12" + virtualenvs-in-project: true + + - name: Set up cache + uses: actions/cache@v3 + id: cache + with: + path: .venv + key: venv-${{ runner.os }}-py-${{ matrix.python-version }}-poetry-${{ hashFiles('poetry.lock') }} + + - name: Ensure cache is healthy + if: steps.cache.outputs.cache-hit == 'true' + run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv + + - name: Install dependencies + run: poetry install --no-interaction + + - name: Run linters + run: poetry run ./scripts/lint diff --git a/backend/.github/workflows/tests.yml b/backend/.github/workflows/tests.yml new file mode 100644 index 0000000..7179857 --- /dev/null +++ b/backend/.github/workflows/tests.yml @@ -0,0 +1,70 @@ +name: Tests + +on: + push: + branches: + - "master" + + pull_request: + branches: + - "*" + +jobs: + lint: + name: Run tests + + runs-on: ubuntu-18.04 + + strategy: + matrix: + python-version: [3.9] + + services: + postgres: + image: postgres:11.5-alpine + env: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + ports: + - 5432:5432 + options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4.2.0 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: "1.1.12" + virtualenvs-in-project: true + + - name: Set up cache + uses: actions/cache@v3 + id: cache + with: + path: .venv + key: venv-${{ runner.os }}-py-${{ matrix.python-version }}-poetry-${{ hashFiles('poetry.lock') }} + + - name: Ensure cache is healthy + if: steps.cache.outputs.cache-hit == 'true' + run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv + + - name: Install dependencies + run: poetry install --no-interaction + + - name: Run tests + env: + SECRET_KEY: secret_key + DATABASE_URL: postgresql://postgres:postgres@localhost/postgres + run: | + poetry run alembic upgrade head + poetry run ./scripts/test + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3.1.0 diff --git a/backend/.gitignore b/backend/.gitignore new file mode 100644 index 0000000..ab61e76 --- /dev/null +++ b/backend/.gitignore @@ -0,0 +1,110 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +.idea/ +.vscode/ + +# Project +postgres-data diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..2caa840 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.9.10-slim + +ENV PYTHONUNBUFFERED 1 + +EXPOSE 8000 +WORKDIR /app + + +RUN apt-get update && \ + apt-get install -y --no-install-recommends netcat && \ + rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +COPY poetry.lock pyproject.toml ./ +RUN pip install poetry==1.1 && \ + poetry config virtualenvs.in-project true && \ + poetry install --no-dev + +COPY . ./ + +CMD poetry run alembic upgrade head && \ + poetry run uvicorn --host=0.0.0.0 app.main:app diff --git a/backend/LICENSE b/backend/LICENSE new file mode 100644 index 0000000..c33674b --- /dev/null +++ b/backend/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Nik Sidnev + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/backend/README.rst b/backend/README.rst new file mode 100644 index 0000000..b84e137 --- /dev/null +++ b/backend/README.rst @@ -0,0 +1,157 @@ +.. image:: ./.github/assets/logo.png + +| + +.. image:: https://github.com/nsidnev/fastapi-realworld-example-app/workflows/API%20spec/badge.svg + :target: https://github.com/nsidnev/fastapi-realworld-example-app + +.. image:: https://github.com/nsidnev/fastapi-realworld-example-app/workflows/Tests/badge.svg + :target: https://github.com/nsidnev/fastapi-realworld-example-app + +.. image:: https://github.com/nsidnev/fastapi-realworld-example-app/workflows/Styles/badge.svg + :target: https://github.com/nsidnev/fastapi-realworld-example-app + +.. image:: https://codecov.io/gh/nsidnev/fastapi-realworld-example-app/branch/master/graph/badge.svg + :target: https://codecov.io/gh/nsidnev/fastapi-realworld-example-app + +.. image:: https://img.shields.io/github/license/Naereen/StrapDown.js.svg + :target: https://github.com/nsidnev/fastapi-realworld-example-app/blob/master/LICENSE + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/ambv/black + +.. image:: https://img.shields.io/badge/style-wemake-000000.svg + :target: https://github.com/wemake-services/wemake-python-styleguide + +---------- + +**NOTE**: This repository is not actively maintained because this example is quite complete and does its primary goal - passing Conduit testsuite. + +More modern and relevant examples can be found in other repositories with ``fastapi`` tag on GitHub. + +Quickstart +---------- + +First, run ``PostgreSQL``, set environment variables and create database. For example using ``docker``: :: + + export POSTGRES_DB=rwdb POSTGRES_PORT=5432 POSTGRES_USER=postgres POSTGRES_PASSWORD=postgres + docker run --name pgdb --rm -e POSTGRES_USER="$POSTGRES_USER" -e POSTGRES_PASSWORD="$POSTGRES_PASSWORD" -e POSTGRES_DB="$POSTGRES_DB" postgres + export POSTGRES_HOST=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' pgdb) + createdb --host=$POSTGRES_HOST --port=$POSTGRES_PORT --username=$POSTGRES_USER $POSTGRES_DB + +Then run the following commands to bootstrap your environment with ``poetry``: :: + + git clone https://github.com/nsidnev/fastapi-realworld-example-app + cd fastapi-realworld-example-app + poetry install + poetry shell + +Then create ``.env`` file (or rename and modify ``.env.example``) in project root and set environment variables for application: :: + + touch .env + echo APP_ENV=dev >> .env + echo DATABASE_URL=postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB >> .env + echo SECRET_KEY=$(openssl rand -hex 32) >> .env + +To run the web application in debug use:: + + alembic upgrade head + uvicorn app.main:app --reload + +If you run into the following error in your docker container: + + sqlalchemy.exc.OperationalError: (psycopg2.OperationalError) could not connect to server: No such file or directory + Is the server running locally and accepting + connections on Unix domain socket "/tmp/.s.PGSQL.5432"? + +Ensure the DATABASE_URL variable is set correctly in the `.env` file. +It is most likely caused by POSTGRES_HOST not pointing to its localhost. + + DATABASE_URL=postgresql://postgres:postgres@0.0.0.0:5432/rwdb + + + +Run tests +--------- + +Tests for this project are defined in the ``tests/`` folder. + +Set up environment variable ``DATABASE_URL`` or set up ``database_url`` in ``app/core/settings/test.py`` + +This project uses `pytest +`_ to define tests because it allows you to use the ``assert`` keyword with good formatting for failed assertations. + + +To run all the tests of a project, simply run the ``pytest`` command: :: + + $ pytest + ================================================= test session starts ================================================== + platform linux -- Python 3.8.3, pytest-5.4.2, py-1.8.1, pluggy-0.13.1 + rootdir: /home/some-user/user-projects/fastapi-realworld-example-app, inifile: setup.cfg, testpaths: tests + plugins: env-0.6.2, cov-2.9.0, asyncio-0.12.0 + collected 90 items + + tests/test_api/test_errors/test_422_error.py . [ 1%] + tests/test_api/test_errors/test_error.py . [ 2%] + tests/test_api/test_routes/test_articles.py ................................. [ 38%] + tests/test_api/test_routes/test_authentication.py .. [ 41%] + tests/test_api/test_routes/test_comments.py .... [ 45%] + tests/test_api/test_routes/test_login.py ... [ 48%] + tests/test_api/test_routes/test_profiles.py ............ [ 62%] + tests/test_api/test_routes/test_registration.py ... [ 65%] + tests/test_api/test_routes/test_tags.py .. [ 67%] + tests/test_api/test_routes/test_users.py .................... [ 90%] + tests/test_db/test_queries/test_tables.py ... [ 93%] + tests/test_schemas/test_rw_model.py . [ 94%] + tests/test_services/test_jwt.py ..... [100%] + + ============================================ 90 passed in 70.50s (0:01:10) ============================================= + $ + +If you want to run a specific test, you can do this with `this +`_ pytest feature: :: + + $ pytest tests/test_api/test_routes/test_users.py::test_user_can_not_take_already_used_credentials + +Deployment with Docker +---------------------- + +You must have ``docker`` and ``docker-compose`` tools installed to work with material in this section. +First, create ``.env`` file like in `Quickstart` section or modify ``.env.example``. +``POSTGRES_HOST`` must be specified as `db` or modified in ``docker-compose.yml`` also. +Then just run:: + + docker-compose up -d db + docker-compose up -d app + +Application will be available on ``localhost`` in your browser. + +Web routes +---------- + +All routes are available on ``/docs`` or ``/redoc`` paths with Swagger or ReDoc. + + +Project structure +----------------- + +Files related to application are in the ``app`` or ``tests`` directories. +Application parts are: + +:: + + app + ├── api - web related stuff. + │   ├── dependencies - dependencies for routes definition. + │   ├── errors - definition of error handlers. + │   └── routes - web routes. + ├── core - application configuration, startup events, logging. + ├── db - db related stuff. + │   ├── migrations - manually written alembic migrations. + │   └── repositories - all crud stuff. + ├── models - pydantic models for this application. + │   ├── domain - main models that are used almost everywhere. + │   └── schemas - schemas for using in web routes. + ├── resources - strings that are used in web responses. + ├── services - logic that is not just crud related. + └── main.py - FastAPI application creation and configuration. diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 0000000..2c43e60 --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,36 @@ +[alembic] +script_location = ./app/db/migrations + +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/app/__init__.py b/backend/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/api/__init__.py b/backend/app/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/api/dependencies/__init__.py b/backend/app/api/dependencies/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/api/dependencies/admin.py b/backend/app/api/dependencies/admin.py new file mode 100644 index 0000000..91f3b12 --- /dev/null +++ b/backend/app/api/dependencies/admin.py @@ -0,0 +1,70 @@ +from fastapi import Depends, HTTPException, status +from loguru import logger + +from app.api.dependencies.authentication import get_current_user_authorizer +from app.api.dependencies.database import get_repository +from app.db.repositories.roles import RolesRepository +from app.models.domain.users import User +from app.core.config import get_app_settings + +ADMIN_ROLE_NAME = "admin" + + +async def get_admin_user( + current_user: User = Depends(get_current_user_authorizer()), + roles_repo: RolesRepository = Depends(get_repository(RolesRepository)), +) -> User: + user_id = getattr(current_user, "id", None) or getattr(current_user, "id_", None) + if not user_id: + logger.warning("[AdminAccess] missing user_id, deny") + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Insufficient permissions", + ) + + # 调试日志,观察鉴权上下文 + logger.info( + "[AdminAccess] current_user id={id} email={email} roles={roles}", + id=user_id, + email=getattr(current_user, "email", None), + roles=getattr(current_user, "roles", None), + ) + try: + logger.info( + "[AdminAccess] current_user dump={}", + current_user.model_dump() if hasattr(current_user, "model_dump") else vars(current_user), + ) + except Exception as exc: + logger.warning("[AdminAccess] dump error: {}", exc) + + # 先看用户自身的 roles 是否已包含 admin + roles_from_user = getattr(current_user, "roles", []) or [] + if isinstance(roles_from_user, (list, tuple)) and "admin" in roles_from_user: + logger.info("[AdminAccess] allow via user.roles contains admin") + return current_user + + # 再看是否在信任邮箱名单,避免“还未赋权”时卡死 + settings = get_app_settings() + trusted_emails = { + email.strip().lower() + for email in getattr(settings, "admin_emails", []) or [] + if email + } + email = getattr(current_user, "email", None) + if email and email.lower() in trusted_emails: + logger.info("[AdminAccess] allow via trusted email list") + return current_user + + has_role = await roles_repo.user_has_role( + user_id=user_id, + role_name=ADMIN_ROLE_NAME, + ) + if has_role: + logger.info("[AdminAccess] allow via DB role check") + return current_user + + logger.warning("[AdminAccess] deny id={id} email={email}", id=user_id, email=email) + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Insufficient permissions", + ) diff --git a/backend/app/api/dependencies/articles.py b/backend/app/api/dependencies/articles.py new file mode 100644 index 0000000..03038f3 --- /dev/null +++ b/backend/app/api/dependencies/articles.py @@ -0,0 +1,67 @@ +from typing import List, Optional + +from fastapi import Depends, HTTPException, Path, Query +from starlette import status + +from app.api.dependencies.authentication import get_current_user_authorizer +from app.api.dependencies.database import get_repository +from app.db.errors import EntityDoesNotExist +from app.db.repositories.articles import ArticlesRepository +from app.models.domain.articles import Article +from app.models.domain.users import User +from app.models.schemas.articles import ( + DEFAULT_ARTICLES_LIMIT, + DEFAULT_ARTICLES_OFFSET, + ArticlesFilters, +) +from app.resources import strings +from app.services.articles import check_user_can_modify_article + + +def get_articles_filters( + tag: Optional[str] = None, + tags: Optional[List[str]] = Query(default=None), + author: Optional[str] = None, + favorited: Optional[str] = None, + search: Optional[str] = Query(default=None, description="搜索标题/描述/slug"), + limit: int = Query(DEFAULT_ARTICLES_LIMIT, ge=1), + offset: int = Query(DEFAULT_ARTICLES_OFFSET, ge=0), +) -> ArticlesFilters: + final_tags: Optional[List[str]] = tags + # 兼容旧的 tag 单值参数:若提供则与 tags 合并并去重 + if tag: + final_tags = list({*(final_tags or []), tag}) + return ArticlesFilters( + tag=tag, + tags=final_tags, + author=author, + favorited=favorited, + search=search, + limit=limit, + offset=offset, + ) + + +async def get_article_by_slug_from_path( + slug: str = Path(..., min_length=1), + user: Optional[User] = Depends(get_current_user_authorizer(required=False)), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> Article: + try: + return await articles_repo.get_article_by_slug(slug=slug, requested_user=user) + except EntityDoesNotExist: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=strings.ARTICLE_DOES_NOT_EXIST_ERROR, + ) + + +def check_article_modification_permissions( + current_article: Article = Depends(get_article_by_slug_from_path), + user: User = Depends(get_current_user_authorizer()), +) -> None: + if not check_user_can_modify_article(current_article, user): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=strings.USER_IS_NOT_AUTHOR_OF_ARTICLE, + ) diff --git a/backend/app/api/dependencies/authentication.py b/backend/app/api/dependencies/authentication.py new file mode 100644 index 0000000..3ad064b --- /dev/null +++ b/backend/app/api/dependencies/authentication.py @@ -0,0 +1,123 @@ +# noqa:WPS201 +from typing import Callable, Optional + +from fastapi import Depends, HTTPException, Security +from fastapi.security import APIKeyHeader +from starlette import requests, status +from starlette.exceptions import HTTPException as StarletteHTTPException + +from app.api.dependencies.database import get_repository +from app.core.config import get_app_settings +from app.core.settings.app import AppSettings +from app.db.errors import EntityDoesNotExist +from app.db.repositories.users import UsersRepository +from app.models.domain.users import User +from app.resources import strings +from app.services import jwt + +HEADER_KEY = "Authorization" + + +class RWAPIKeyHeader(APIKeyHeader): + async def __call__( # noqa: WPS610 + self, + request: requests.Request, + ) -> Optional[str]: + try: + return await super().__call__(request) + except StarletteHTTPException as original_auth_exc: + raise HTTPException( + status_code=original_auth_exc.status_code, + detail=strings.AUTHENTICATION_REQUIRED, + ) + + +def get_current_user_authorizer(*, required: bool = True) -> Callable: # type: ignore + return _get_current_user if required else _get_current_user_optional + + +def _get_authorization_header_retriever( + *, + required: bool = True, +) -> Callable: # type: ignore + return _get_authorization_header if required else _get_authorization_header_optional + + +def _get_authorization_header( + api_key: str = Security(RWAPIKeyHeader(name=HEADER_KEY)), + settings: AppSettings = Depends(get_app_settings), +) -> str: + try: + token_prefix, token = api_key.split(" ") + except ValueError: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=strings.WRONG_TOKEN_PREFIX, + ) + if token_prefix != settings.jwt_token_prefix: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=strings.WRONG_TOKEN_PREFIX, + ) + + return token + + +def _get_authorization_header_optional( + authorization: Optional[str] = Security( + RWAPIKeyHeader(name=HEADER_KEY, auto_error=False), + ), + settings: AppSettings = Depends(get_app_settings), +) -> str: + if authorization: + return _get_authorization_header(authorization, settings) + + return "" + + +async def _get_current_user( + users_repo: UsersRepository = Depends(get_repository(UsersRepository)), + token: str = Depends(_get_authorization_header_retriever()), + settings: AppSettings = Depends(get_app_settings), +) -> User: + try: + username = jwt.get_username_from_token( + token, + str(settings.secret_key.get_secret_value()), + ) + except ValueError: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=strings.MALFORMED_PAYLOAD, + ) + + try: + user = await users_repo.get_user_by_username(username=username) + try: + from loguru import logger # local import to avoid global dependency if not installed + logger.info( + "[Auth] fetched user username={} id/id_={}/{} roles={}", + getattr(user, "username", None), + getattr(user, "id", None), + getattr(user, "id_", None), + getattr(user, "roles", None), + ) + except Exception: + pass + return user + except EntityDoesNotExist: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=strings.MALFORMED_PAYLOAD, + ) + + +async def _get_current_user_optional( + repo: UsersRepository = Depends(get_repository(UsersRepository)), + token: str = Depends(_get_authorization_header_retriever(required=False)), + settings: AppSettings = Depends(get_app_settings), +) -> Optional[User]: + if token: + return await _get_current_user(repo, token, settings) + + return None diff --git a/backend/app/api/dependencies/comments.py b/backend/app/api/dependencies/comments.py new file mode 100644 index 0000000..c00a077 --- /dev/null +++ b/backend/app/api/dependencies/comments.py @@ -0,0 +1,47 @@ +from typing import Optional + +from fastapi import Depends, HTTPException, Path +from starlette import status + +from app.api.dependencies import articles, authentication, database +from app.db.errors import EntityDoesNotExist +from app.db.repositories.comments import CommentsRepository +from app.models.domain.articles import Article +from app.models.domain.comments import Comment +from app.models.domain.users import User +from app.resources import strings +from app.services.comments import check_user_can_modify_comment + + +async def get_comment_by_id_from_path( + comment_id: int = Path(..., ge=1), + article: Article = Depends(articles.get_article_by_slug_from_path), + user: Optional[User] = Depends( + authentication.get_current_user_authorizer(required=False), + ), + comments_repo: CommentsRepository = Depends( + database.get_repository(CommentsRepository), + ), +) -> Comment: + try: + return await comments_repo.get_comment_by_id( + comment_id=comment_id, + article=article, + user=user, + ) + except EntityDoesNotExist: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=strings.COMMENT_DOES_NOT_EXIST, + ) + + +def check_comment_modification_permissions( + comment: Comment = Depends(get_comment_by_id_from_path), + user: User = Depends(authentication.get_current_user_authorizer()), +) -> None: + if not check_user_can_modify_comment(comment, user): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=strings.USER_IS_NOT_AUTHOR_OF_ARTICLE, + ) diff --git a/backend/app/api/dependencies/database.py b/backend/app/api/dependencies/database.py new file mode 100644 index 0000000..29c134a --- /dev/null +++ b/backend/app/api/dependencies/database.py @@ -0,0 +1,45 @@ +# app/api/dependencies/database.py +from typing import AsyncIterator, Callable, Type + +from asyncpg import Connection, Pool +from fastapi import Depends +from starlette.requests import Request + +from app.db.repositories.base import BaseRepository + + +def _get_db_pool(request: Request) -> Pool: + """ + 从 app.state.pool 取得连接池;若未初始化给出清晰报错。 + """ + pool = getattr(request.app.state, "pool", None) + if pool is None: + raise RuntimeError("Database pool not initialized on app.state.pool") + return pool + + +async def _get_connection_from_pool( + pool: Pool = Depends(_get_db_pool), +) -> AsyncIterator[Connection]: + """ + 私有实现:从连接池借出一个连接,使用后自动归还。 + """ + async with pool.acquire() as conn: + yield conn + + +# ✅ 公共别名:供路由里直接使用 Depends(get_connection) +get_connection = _get_connection_from_pool + + +def get_repository( + repo_type: Type[BaseRepository], +) -> Callable[[Connection], BaseRepository]: + """ + 兼容旧用法:Depends(get_repository(UserRepo)) + 内部依赖 get_connection,因此两种写法都能共存。 + """ + def _get_repo(conn: Connection = Depends(get_connection)) -> BaseRepository: + return repo_type(conn) + + return _get_repo diff --git a/backend/app/api/dependencies/profiles.py b/backend/app/api/dependencies/profiles.py new file mode 100644 index 0000000..db8f9b0 --- /dev/null +++ b/backend/app/api/dependencies/profiles.py @@ -0,0 +1,29 @@ +from typing import Optional + +from fastapi import Depends, HTTPException, Path +from starlette.status import HTTP_404_NOT_FOUND + +from app.api.dependencies.authentication import get_current_user_authorizer +from app.api.dependencies.database import get_repository +from app.db.errors import EntityDoesNotExist +from app.db.repositories.profiles import ProfilesRepository +from app.models.domain.profiles import Profile +from app.models.domain.users import User +from app.resources import strings + + +async def get_profile_by_username_from_path( + username: str = Path(..., min_length=1), + user: Optional[User] = Depends(get_current_user_authorizer(required=False)), + profiles_repo: ProfilesRepository = Depends(get_repository(ProfilesRepository)), +) -> Profile: + try: + return await profiles_repo.get_profile_by_username( + username=username, + requested_user=user, + ) + except EntityDoesNotExist: + raise HTTPException( + status_code=HTTP_404_NOT_FOUND, + detail=strings.USER_DOES_NOT_EXIST_ERROR, + ) diff --git a/backend/app/api/errors/__init__.py b/backend/app/api/errors/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/api/errors/http_error.py b/backend/app/api/errors/http_error.py new file mode 100644 index 0000000..c503229 --- /dev/null +++ b/backend/app/api/errors/http_error.py @@ -0,0 +1,7 @@ +from fastapi import HTTPException +from starlette.requests import Request +from starlette.responses import JSONResponse + + +async def http_error_handler(_: Request, exc: HTTPException) -> JSONResponse: + return JSONResponse({"errors": [exc.detail]}, status_code=exc.status_code) diff --git a/backend/app/api/errors/validation_error.py b/backend/app/api/errors/validation_error.py new file mode 100644 index 0000000..a85730c --- /dev/null +++ b/backend/app/api/errors/validation_error.py @@ -0,0 +1,28 @@ +from typing import Union + +from fastapi.exceptions import RequestValidationError +from fastapi.openapi.constants import REF_PREFIX +from fastapi.openapi.utils import validation_error_response_definition +from pydantic import ValidationError +from starlette.requests import Request +from starlette.responses import JSONResponse +from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY + + +async def http422_error_handler( + _: Request, + exc: Union[RequestValidationError, ValidationError], +) -> JSONResponse: + return JSONResponse( + {"errors": exc.errors()}, + status_code=HTTP_422_UNPROCESSABLE_ENTITY, + ) + + +validation_error_response_definition["properties"] = { + "errors": { + "title": "Errors", + "type": "array", + "items": {"$ref": "{0}ValidationError".format(REF_PREFIX)}, + }, +} diff --git a/backend/app/api/routes/__init__.py b/backend/app/api/routes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/api/routes/admin.py b/backend/app/api/routes/admin.py new file mode 100644 index 0000000..f9ede22 --- /dev/null +++ b/backend/app/api/routes/admin.py @@ -0,0 +1,418 @@ +from typing import List, Optional + +from fastapi import APIRouter, Body, Depends, HTTPException, Query, status + +from app.api.dependencies.admin import get_admin_user +from app.api.dependencies.database import get_repository +from app.db.repositories.admin import AdminRepository +from app.db.repositories.articles import ArticlesRepository +from app.db.repositories.home_featured import HomeFeaturedRepository +from app.db.repositories.menu_slots import DEFAULT_MENU_SLOTS, MenuSlotsRepository +from app.db.repositories.roles import RolesRepository +from app.db.repositories.users import UsersRepository +from app.models.domain.users import User +from app.models.schemas.admin import ( + AdminHomeFeaturedUpdate, + AdminMenuSlot, + AdminMenuSlotListResponse, + AdminMenuSlotResponse, + AdminMenuSlotUpdate, + AdminDashboardStats, + AdminUserCreate, + AdminUserListResponse, + AdminUserResponse, + AdminUserUpdate, +) +from app.models.schemas.articles import ( + ArticleForResponse, + ArticleInResponse, + ArticleInUpdate, + ListOfArticlesInResponse, +) +from app.models.schemas.roles import ( + ListOfRolesInResponse, + RoleInCreate, + RoleInResponse, + RoleInUpdate, +) +from app.services.articles import get_slug_for_article + +router = APIRouter(prefix="/admin", tags=["admin"]) + +DEFAULT_MENU_SLOT_KEYS = {slot["slot_key"] for slot in DEFAULT_MENU_SLOTS} + + +@router.get("/dashboard", response_model=AdminDashboardStats, name="admin:dashboard") +async def get_dashboard_stats( + _: User = Depends(get_admin_user), + admin_repo: AdminRepository = Depends(get_repository(AdminRepository)), +) -> AdminDashboardStats: + return await admin_repo.get_dashboard_stats() + + +@router.get("/users", response_model=AdminUserListResponse, name="admin:list-users") +async def list_admin_users( + _: User = Depends(get_admin_user), + search: Optional[str] = Query(default=None, description="Search by username/email"), + role_id: Optional[int] = Query(default=None), + limit: int = Query(default=20, ge=1, le=100), + offset: int = Query(default=0, ge=0), + admin_repo: AdminRepository = Depends(get_repository(AdminRepository)), +) -> AdminUserListResponse: + users, total = await admin_repo.list_users( + search=search, + role_id=role_id, + limit=limit, + offset=offset, + ) + return AdminUserListResponse(users=users, total=total) + + +@router.post( + "/users", + response_model=AdminUserResponse, + status_code=status.HTTP_201_CREATED, + name="admin:create-user", +) +async def create_admin_user( + _: User = Depends(get_admin_user), + payload: AdminUserCreate = Body(..., embed=True, alias="user"), + users_repo: UsersRepository = Depends(get_repository(UsersRepository)), + roles_repo: RolesRepository = Depends(get_repository(RolesRepository)), + admin_repo: AdminRepository = Depends(get_repository(AdminRepository)), +) -> AdminUserResponse: + user = await users_repo.create_user( + username=payload.username, + email=payload.email, + password=payload.password, + ) + # optional profile info + if payload.bio or payload.image: + user = await users_repo.update_user_by_id( + user_id=user.id, + bio=payload.bio, + image=payload.image, + ) + if payload.role_ids: + await roles_repo.set_roles_for_user( + user_id=user.id, + role_ids=payload.role_ids, + ) + summary = await admin_repo.get_user_summary(user.id) + if not summary: + raise HTTPException(status_code=500, detail="Failed to load created user") + return AdminUserResponse(user=summary) + + +@router.put( + "/users/{user_id}", + response_model=AdminUserResponse, + name="admin:update-user", +) +async def update_admin_user( + _: User = Depends(get_admin_user), + user_id: int = 0, + payload: AdminUserUpdate = Body(..., embed=True, alias="user"), + users_repo: UsersRepository = Depends(get_repository(UsersRepository)), + roles_repo: RolesRepository = Depends(get_repository(RolesRepository)), + admin_repo: AdminRepository = Depends(get_repository(AdminRepository)), +) -> AdminUserResponse: + await users_repo.update_user_by_id( + user_id=user_id, + username=payload.username, + email=payload.email, + password=payload.password, + bio=payload.bio, + image=payload.image, + ) + if payload.role_ids is not None: + await roles_repo.set_roles_for_user( + user_id=user_id, + role_ids=payload.role_ids, + ) + summary = await admin_repo.get_user_summary(user_id) + if not summary: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") + return AdminUserResponse(user=summary) + + +@router.delete( + "/users/{user_id}", + status_code=status.HTTP_204_NO_CONTENT, + name="admin:delete-user", +) +async def delete_admin_user( + _: User = Depends(get_admin_user), + user_id: int = 0, + users_repo: UsersRepository = Depends(get_repository(UsersRepository)), +) -> None: + await users_repo.delete_user_by_id(user_id=user_id) + + +@router.get("/roles", response_model=ListOfRolesInResponse, name="admin:list-roles") +async def list_roles( + _: User = Depends(get_admin_user), + roles_repo: RolesRepository = Depends(get_repository(RolesRepository)), +) -> ListOfRolesInResponse: + roles = await roles_repo.list_roles() + return ListOfRolesInResponse(roles=roles) + + +@router.post( + "/roles", + response_model=RoleInResponse, + status_code=status.HTTP_201_CREATED, + name="admin:create-role", +) +async def create_role( + _: User = Depends(get_admin_user), + payload: RoleInCreate = Body(..., embed=True, alias="role"), + roles_repo: RolesRepository = Depends(get_repository(RolesRepository)), +) -> RoleInResponse: + role = await roles_repo.create_role( + name=payload.name, + description=payload.description or "", + permissions=payload.permissions, + ) + return RoleInResponse(role=role) + + +@router.put( + "/roles/{role_id}", + response_model=RoleInResponse, + name="admin:update-role", +) +async def update_role( + _: User = Depends(get_admin_user), + role_id: int = 0, + payload: RoleInUpdate = Body(..., embed=True, alias="role"), + roles_repo: RolesRepository = Depends(get_repository(RolesRepository)), +) -> RoleInResponse: + role = await roles_repo.update_role( + role_id=role_id, + name=payload.name, + description=payload.description, + permissions=payload.permissions, + ) + return RoleInResponse(role=role) + + +@router.delete( + "/roles/{role_id}", + status_code=status.HTTP_204_NO_CONTENT, + name="admin:delete-role", +) +async def delete_role( + _: User = Depends(get_admin_user), + role_id: int = 0, + roles_repo: RolesRepository = Depends(get_repository(RolesRepository)), +) -> None: + await roles_repo.delete_role(role_id=role_id) + + +@router.get( + "/menu-slots", + response_model=AdminMenuSlotListResponse, + name="admin:list-menu-slots", +) +async def list_menu_slots( + _: User = Depends(get_admin_user), + menu_slots_repo: MenuSlotsRepository = Depends(get_repository(MenuSlotsRepository)), +) -> AdminMenuSlotListResponse: + rows = await menu_slots_repo.list_slots() + slots: List[AdminMenuSlot] = [] + for row in rows: + default_label = next( + (slot["label"] for slot in DEFAULT_MENU_SLOTS if slot["slot_key"] == row["slot_key"]), + row["slot_key"], + ) + slots.append( + AdminMenuSlot( + slot_key=row["slot_key"], + label=row["label"] or default_label, + tags=row["tags"] or [], + created_at=row.get("created_at"), + updated_at=row.get("updated_at"), + ), + ) + return AdminMenuSlotListResponse(slots=slots) + + +@router.put( + "/menu-slots/{slot_key}", + response_model=AdminMenuSlotResponse, + name="admin:update-menu-slot", +) +async def update_menu_slot( + _: User = Depends(get_admin_user), + slot_key: str = "", + payload: AdminMenuSlotUpdate = Body(..., embed=True, alias="slot"), + menu_slots_repo: MenuSlotsRepository = Depends(get_repository(MenuSlotsRepository)), +) -> AdminMenuSlotResponse: + if slot_key not in DEFAULT_MENU_SLOT_KEYS: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Invalid menu slot", + ) + + row = await menu_slots_repo.upsert_slot_tags( + slot_key=slot_key, + tags=payload.tags, + label=payload.label, + ) + default_label = next( + (slot["label"] for slot in DEFAULT_MENU_SLOTS if slot["slot_key"] == slot_key), + slot_key, + ) + slot = AdminMenuSlot( + slot_key=row["slot_key"], + label=row["label"] or default_label, + tags=row["tags"] or [], + created_at=row.get("created_at"), + updated_at=row.get("updated_at"), + ) + return AdminMenuSlotResponse(slot=slot) + + +@router.get( + "/home-featured-articles", + response_model=ListOfArticlesInResponse, + name="admin:list-home-featured-articles", +) +async def list_home_featured_articles_admin( + _: User = Depends(get_admin_user), + home_repo: HomeFeaturedRepository = Depends(get_repository(HomeFeaturedRepository)), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> ListOfArticlesInResponse: + slugs = await home_repo.list_slugs() + articles = await articles_repo.list_articles_by_slugs( + slugs=slugs, + requested_user=None, + ) + articles_for_response = [ + ArticleForResponse.from_orm(article) + for article in articles + ] + return ListOfArticlesInResponse( + articles=articles_for_response, + articles_count=len(articles_for_response), + ) + + +@router.put( + "/home-featured-articles", + response_model=ListOfArticlesInResponse, + name="admin:save-home-featured-articles", +) +async def save_home_featured_articles_admin( + _: User = Depends(get_admin_user), + payload: AdminHomeFeaturedUpdate = Body(...), + home_repo: HomeFeaturedRepository = Depends(get_repository(HomeFeaturedRepository)), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> ListOfArticlesInResponse: + input_slugs = [item.slug.strip() for item in payload.articles if item.slug] + if not input_slugs: + await home_repo.save_slugs(slugs=[]) + return ListOfArticlesInResponse(articles=[], articles_count=0) + + slugs: List[str] = [] + for slug in input_slugs: + if slug and slug not in slugs: + slugs.append(slug) + slugs = slugs[:10] + + articles = await articles_repo.list_articles_by_slugs( + slugs=slugs, + requested_user=None, + ) + found_slugs = {article.slug for article in articles} + missing = [slug for slug in slugs if slug not in found_slugs] + if missing: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Articles not found: {', '.join(missing)}", + ) + + await home_repo.save_slugs(slugs=slugs) + + articles_for_response = [ + ArticleForResponse.from_orm(article) + for article in articles + ] + return ListOfArticlesInResponse( + articles=articles_for_response, + articles_count=len(articles_for_response), + ) + + +@router.get( + "/articles", + response_model=ListOfArticlesInResponse, + name="admin:list-articles", +) +async def list_articles_admin( + _: User = Depends(get_admin_user), + search: Optional[str] = Query(default=None), + author: Optional[str] = Query(default=None), + limit: int = Query(default=20, ge=1, le=200), + offset: int = Query(default=0, ge=0), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> ListOfArticlesInResponse: + articles, total = await articles_repo.list_articles_for_admin( + search=search, + author=author, + limit=limit, + offset=offset, + ) + articles_for_response = [ + ArticleForResponse.from_orm(article) + for article in articles + ] + return ListOfArticlesInResponse(articles=articles_for_response, articles_count=total) + + +@router.put( + "/articles/{slug}", + response_model=ArticleInResponse, + name="admin:update-article", +) +async def update_article_admin( + _: User = Depends(get_admin_user), + slug: str = "", + article_update: ArticleInUpdate = Body(..., embed=True, alias="article"), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> ArticleInResponse: + current_article = await articles_repo.get_article_by_slug( + slug=slug, + requested_user=None, + ) + new_slug = get_slug_for_article(article_update.title) if article_update.title else None + cover_provided = "cover" in article_update.__fields_set__ + + article = await articles_repo.admin_update_article( + article=current_article, + slug=new_slug, + title=article_update.title, + body=article_update.body, + description=article_update.description, + cover=article_update.cover, + is_top=article_update.is_top, + is_featured=article_update.is_featured, + sort_weight=article_update.sort_weight, + cover_provided=cover_provided, + ) + return ArticleInResponse(article=ArticleForResponse.from_orm(article)) + + +@router.delete( + "/articles/{slug}", + status_code=status.HTTP_204_NO_CONTENT, + name="admin:delete-article", +) +async def delete_article_admin( + _: User = Depends(get_admin_user), + slug: str = "", + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> None: + article = await articles_repo.get_article_by_slug(slug=slug, requested_user=None) + await articles_repo.admin_delete_article(article=article) diff --git a/backend/app/api/routes/api.py b/backend/app/api/routes/api.py new file mode 100644 index 0000000..7d935ca --- /dev/null +++ b/backend/app/api/routes/api.py @@ -0,0 +1,50 @@ +from fastapi import APIRouter + +from app.api.routes import ( + admin, + authentication, + comments, + home_featured, + profiles, + tags, + users, + password_reset, + uploads, +) +from app.api.routes.articles import api as articles + +router = APIRouter() + +# authentication /auth +router.include_router(authentication.router, tags=["authentication"], prefix="/auth") + +# password reset /auth/password +router.include_router(password_reset.router, prefix="/auth/password") + +# current user /user +router.include_router(users.router, tags=["users"], prefix="/user") + +# profiles /profiles/{username} +router.include_router(profiles.router, tags=["profiles"], prefix="/profiles") + +# articles /articles/** +router.include_router(articles.router, tags=["articles"]) + +# comments /articles/{slug}/comments/** +router.include_router( + comments.router, + tags=["comments"], + prefix="/articles/{slug}/comments", +) + +# tags /tags +router.include_router(tags.router, tags=["tags"], prefix="/tags") + +# upload image POST /upload-image +router.include_router(uploads.router) + +# home featured /home-featured-articles +router.include_router(home_featured.router) + +# admin backend /admin/** +router.include_router(admin.router) diff --git a/backend/app/api/routes/articles/__init__.py b/backend/app/api/routes/articles/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/api/routes/articles/api.py b/backend/app/api/routes/articles/api.py new file mode 100644 index 0000000..f3ac4bb --- /dev/null +++ b/backend/app/api/routes/articles/api.py @@ -0,0 +1,9 @@ +# app\api\routes\articles\api.py +from fastapi import APIRouter + +from app.api.routes.articles import articles_common, articles_resource + +router = APIRouter() + +router.include_router(articles_common.router, prefix="/articles") +router.include_router(articles_resource.router, prefix="/articles") diff --git a/backend/app/api/routes/articles/articles_common.py b/backend/app/api/routes/articles/articles_common.py new file mode 100644 index 0000000..1eddb5e --- /dev/null +++ b/backend/app/api/routes/articles/articles_common.py @@ -0,0 +1,105 @@ +# app\api\routes\articles\articles_common.py +from fastapi import APIRouter, Depends, HTTPException, Query +from starlette import status + +from app.api.dependencies.articles import get_article_by_slug_from_path +from app.api.dependencies.authentication import get_current_user_authorizer +from app.api.dependencies.database import get_repository +from app.db.repositories.articles import ArticlesRepository +from app.models.domain.articles import Article +from app.models.domain.users import User +from app.models.schemas.articles import ( + DEFAULT_ARTICLES_LIMIT, + DEFAULT_ARTICLES_OFFSET, + ArticleForResponse, + ArticleInResponse, + ListOfArticlesInResponse, +) +from app.resources import strings + +router = APIRouter() + + +@router.get( + "/feed", + response_model=ListOfArticlesInResponse, + name="articles:get-user-feed-articles", +) +async def get_articles_for_user_feed( + limit: int = Query(DEFAULT_ARTICLES_LIMIT, ge=1), + offset: int = Query(DEFAULT_ARTICLES_OFFSET, ge=0), + user: User = Depends(get_current_user_authorizer()), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> ListOfArticlesInResponse: + articles = await articles_repo.get_articles_for_user_feed( + user=user, + limit=limit, + offset=offset, + ) + articles_for_response = [ + ArticleForResponse(**article.dict()) for article in articles + ] + return ListOfArticlesInResponse( + articles=articles_for_response, + articles_count=len(articles), + ) + + +@router.post( + "/{slug}/favorite", + response_model=ArticleInResponse, + name="articles:mark-article-favorite", +) +async def mark_article_as_favorite( + article: Article = Depends(get_article_by_slug_from_path), + user: User = Depends(get_current_user_authorizer()), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> ArticleInResponse: + if not article.favorited: + await articles_repo.add_article_into_favorites(article=article, user=user) + + return ArticleInResponse( + article=ArticleForResponse.from_orm( + article.copy( + update={ + "favorited": True, + "favorites_count": article.favorites_count + 1, + }, + ), + ), + ) + + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=strings.ARTICLE_IS_ALREADY_FAVORITED, + ) + + +@router.delete( + "/{slug}/favorite", + response_model=ArticleInResponse, + name="articles:unmark-article-favorite", +) +async def remove_article_from_favorites( + article: Article = Depends(get_article_by_slug_from_path), + user: User = Depends(get_current_user_authorizer()), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> ArticleInResponse: + if article.favorited: + await articles_repo.remove_article_from_favorites(article=article, user=user) + + return ArticleInResponse( + article=ArticleForResponse.from_orm( + article.copy( + update={ + "favorited": False, + "favorites_count": article.favorites_count - 1, + }, + ), + ), + ) + + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=strings.ARTICLE_IS_NOT_FAVORITED, + ) diff --git a/backend/app/api/routes/articles/articles_resource.py b/backend/app/api/routes/articles/articles_resource.py new file mode 100644 index 0000000..4b51b3c --- /dev/null +++ b/backend/app/api/routes/articles/articles_resource.py @@ -0,0 +1,220 @@ +# app/api/routes/articles/articles_resource.py + +from typing import Optional + +from fastapi import APIRouter, Body, Depends, HTTPException, Query, Response +from starlette import status + +from app.api.dependencies.articles import ( + check_article_modification_permissions, + get_article_by_slug_from_path, + get_articles_filters, +) +from app.api.dependencies.authentication import get_current_user_authorizer +from app.api.dependencies.database import get_repository +from app.db.repositories.articles import ArticlesRepository +from app.db.repositories.menu_slots import DEFAULT_MENU_SLOTS, MenuSlotsRepository +from app.models.domain.articles import Article +from app.models.domain.users import User +from app.models.schemas.articles import ( + DEFAULT_ARTICLES_LIMIT, + DEFAULT_ARTICLES_OFFSET, + ArticleForResponse, + ArticleInCreate, + ArticleInResponse, + ArticleInUpdate, + ArticlesFilters, + ListOfArticlesInResponse, +) +from app.resources import strings +from app.services.articles import check_article_exists, get_slug_for_article + +router = APIRouter() + +DEFAULT_MENU_SLOT_KEYS = {slot["slot_key"] for slot in DEFAULT_MENU_SLOTS} + + +@router.get( + "", + response_model=ListOfArticlesInResponse, + name="articles:list-articles", +) +async def list_articles( + articles_filters: ArticlesFilters = Depends(get_articles_filters), + # ✅ 可选用户:未登录/坏 token 都允许,只是 requested_user=None + user: Optional[User] = Depends(get_current_user_authorizer(required=False)), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> ListOfArticlesInResponse: + articles = await articles_repo.filter_articles( + tag=articles_filters.tag, + tags=articles_filters.tags, + author=articles_filters.author, + favorited=articles_filters.favorited, + search=articles_filters.search, + limit=articles_filters.limit, + offset=articles_filters.offset, + requested_user=user, + ) + articles_for_response = [ + ArticleForResponse.from_orm(article) for article in articles + ] + return ListOfArticlesInResponse( + articles=articles_for_response, + articles_count=len(articles), + ) + + +@router.get( + "/menu/{slot_key}", + response_model=ListOfArticlesInResponse, + name="articles:list-by-menu-slot", +) +async def list_articles_by_menu_slot( + slot_key: str, + limit: int = Query(DEFAULT_ARTICLES_LIMIT, ge=1, le=200), + offset: int = Query(DEFAULT_ARTICLES_OFFSET, ge=0), + mode: str = Query("and", description="tag match mode: and/or"), + user: Optional[User] = Depends(get_current_user_authorizer(required=False)), + menu_slots_repo: MenuSlotsRepository = Depends(get_repository(MenuSlotsRepository)), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> ListOfArticlesInResponse: + slot = await menu_slots_repo.get_slot(slot_key) + if not slot and slot_key not in DEFAULT_MENU_SLOT_KEYS: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Menu slot not found", + ) + if not slot: + default_label = next( + (s["label"] for s in DEFAULT_MENU_SLOTS if s["slot_key"] == slot_key), + slot_key, + ) + slot = await menu_slots_repo.upsert_slot_tags( + slot_key=slot_key, + tags=[], + label=default_label, + ) + + tags = slot["tags"] or [] + articles = await articles_repo.filter_articles( + tags=tags, + limit=limit, + offset=offset, + requested_user=user, + tag_mode=mode, + ) + # 如果严格 AND 结果为空且指定了标签,则降级为 OR,避免前台完全空白 + if mode == "and" and tags and not articles: + articles = await articles_repo.filter_articles( + tags=tags, + limit=limit, + offset=offset, + requested_user=user, + tag_mode="or", + ) + articles_for_response = [ + ArticleForResponse.from_orm(article) for article in articles + ] + return ListOfArticlesInResponse( + articles=articles_for_response, + articles_count=len(articles), + ) + + +@router.post( + "", + status_code=status.HTTP_201_CREATED, + response_model=ArticleInResponse, + name="articles:create-article", +) +async def create_new_article( + article_create: ArticleInCreate = Body(..., embed=True, alias="article"), + # ✅ 必须登录 + user: User = Depends(get_current_user_authorizer()), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> ArticleInResponse: + slug = get_slug_for_article(article_create.title) + if await check_article_exists(articles_repo, slug): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=strings.ARTICLE_ALREADY_EXISTS, + ) + + article = await articles_repo.create_article( + slug=slug, + title=article_create.title, + description=article_create.description, + body=article_create.body, + author=user, + tags=article_create.tags, + cover=article_create.cover, # 支持封面 + ) + return ArticleInResponse(article=ArticleForResponse.from_orm(article)) + + +@router.get( + "/{slug}", + response_model=ArticleInResponse, + name="articles:get-article", +) +async def retrieve_article_by_slug( + # ❗ 不再使用 get_article_by_slug_from_path(它通常会强制鉴权) + slug: str, + # ✅ 可选用户:支持个性化(是否已收藏等),但不影响公开访问 + user: Optional[User] = Depends(get_current_user_authorizer(required=False)), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> ArticleInResponse: + """ + 文章详情:对所有人开放访问。 + - 未登录 / token 缺失 / token 无效 -> user 为 None,正常返回文章。 + - 已登录且 token 有效 -> user 有值,可用于 favorited 等字段计算。 + """ + article = await articles_repo.get_article_by_slug( + slug=slug, + requested_user=user, + ) + # 每次访问详情时累加查看次数,并同步更新返回对象 + article.views = await articles_repo.increment_article_views(slug=slug) + return ArticleInResponse(article=ArticleForResponse.from_orm(article)) + + +@router.put( + "/{slug}", + response_model=ArticleInResponse, + name="articles:update-article", + dependencies=[Depends(check_article_modification_permissions)], +) +async def update_article_by_slug( + article_update: ArticleInUpdate = Body(..., embed=True, alias="article"), + current_article: Article = Depends(get_article_by_slug_from_path), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> ArticleInResponse: + slug = get_slug_for_article(article_update.title) if article_update.title else None + + # 是否在本次请求里显式传了 cover 字段(视你的 ArticleInUpdate 定义而定) + cover_provided = "cover" in article_update.__fields_set__ + + article = await articles_repo.update_article( + article=current_article, + slug=slug, + title=article_update.title, + body=article_update.body, + description=article_update.description, + cover=article_update.cover, + cover_provided=cover_provided, + ) + return ArticleInResponse(article=ArticleForResponse.from_orm(article)) + + +@router.delete( + "/{slug}", + status_code=status.HTTP_204_NO_CONTENT, + name="articles:delete-article", + dependencies=[Depends(check_article_modification_permissions)], + response_class=Response, +) +async def delete_article_by_slug( + article: Article = Depends(get_article_by_slug_from_path), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> None: + await articles_repo.delete_article(article=article) diff --git a/backend/app/api/routes/authentication.py b/backend/app/api/routes/authentication.py new file mode 100644 index 0000000..19f871e --- /dev/null +++ b/backend/app/api/routes/authentication.py @@ -0,0 +1,321 @@ +# app/api/routes/authentication.py +from __future__ import annotations + +from typing import Optional, Any, TYPE_CHECKING +from datetime import datetime, timedelta + +from fastapi import APIRouter, Body, Depends, HTTPException, Request, Response +from starlette.status import HTTP_201_CREATED, HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED + +from app.api.dependencies.database import get_repository +from app.core.config import get_app_settings +from app.core.settings.app import AppSettings +from app.db.errors import EntityDoesNotExist +from app.db.repositories.users import UsersRepository + +# 条件导入:运行期可能没有 email_codes 仓库 +try: + from app.db.repositories.email_codes import EmailCodesRepository # type: ignore + HAS_EMAIL_CODES_REPO = True +except Exception: # pragma: no cover + EmailCodesRepository = None # type: ignore + HAS_EMAIL_CODES_REPO = False + +# 仅用于类型检查(让 Pylance/pyright 认识名字,但运行期不导入) +if TYPE_CHECKING: # pragma: no cover + from app.db.repositories.email_codes import EmailCodesRepository as _EmailCodesRepositoryT # noqa: F401 + +from app.models.schemas.users import ( + UserInLogin, + UserInResponse, + UserWithToken, + RegisterWithEmailIn, +) +from app.models.schemas.email_code import EmailCodeSendIn, EmailCodeSendOut +from app.resources import strings +from app.services import jwt +from app.services.mailer import send_email +from app.services.authentication import ( + check_email_is_taken, + assert_passwords_match, + make_unique_username, +) + +router = APIRouter() + +# ================= Cookie 工具(最小改造,无需新增文件) ================= +REFRESH_COOKIE_NAME = "refresh_token" + +def set_refresh_cookie(resp: Response, token: str, *, max_age_days: int = 30) -> None: + """ + 仅通过 HttpOnly Cookie 下发 refresh。 + - SameSite=Lax:避免跨站表单滥用 + - Secure=True:生产环境建议始终为 True;如本地纯 HTTP 开发可按需改为 False + - Path 设为 /api/auth,缩小作用域 + """ + resp.set_cookie( + key=REFRESH_COOKIE_NAME, + value=token, + max_age=max_age_days * 24 * 3600, + httponly=True, + secure=True, # 如需在本地 http 调试,可改为 False + samesite="lax", + path="/api/auth", + ) + +def clear_refresh_cookie(resp: Response) -> None: + resp.delete_cookie( + key=REFRESH_COOKIE_NAME, + path="/api/auth", + httponly=True, + secure=True, + samesite="lax", + ) + +# 为了兼容“可选的验证码仓库”,构造一个可交给 Depends 的工厂 +def _provide_optional_email_codes_repo(): + if HAS_EMAIL_CODES_REPO: + return get_repository(EmailCodesRepository) # type: ignore[name-defined] + + async def _none(): + return None + + return _none + + +# ========= 发送邮箱验证码 ========= +@router.post( + "/email-code", + response_model=EmailCodeSendOut, + name="auth:email-code", +) +async def send_email_code( + payload: EmailCodeSendIn = Body(...), + settings: AppSettings = Depends(get_app_settings), + email_codes_repo: Optional[Any] = Depends(_provide_optional_email_codes_repo()), +) -> EmailCodeSendOut: + """ + 发送邮箱验证码并写入数据库(若仓库存在)。 + """ + # 1) 生成验证码(6 位数字) + rnd = __import__("random").randint(0, 999999) + code = f"{rnd:06d}" + + # 2) 过期时间 + expires_at = datetime.utcnow() + timedelta(minutes=settings.email_code_expires_minutes) + + # 3) 记录到数据库(可选) + if email_codes_repo is not None: + await email_codes_repo.create_code( # type: ignore[attr-defined] + email=payload.email, + code=code, + scene=payload.scene, + expires_at=expires_at, + ) + + # 4) 发邮件 + subject = f"【AI平台】{payload.scene} 验证码:{code}" + html = f""" +
+

您好!

+

您正在进行 {payload.scene} 操作,本次验证码为:

+

{code}

+

有效期:{settings.email_code_expires_minutes} 分钟;请勿泄露给他人。

+
+ """ + send_email(payload.email, subject, html) + return EmailCodeSendOut(ok=True) + + +# ========= 登录 ========= +@router.post( + "/login", + response_model=UserInResponse, + response_model_exclude_none=True, + name="auth:login", +) +async def login( + response: Response, + user_login: UserInLogin = Body(..., embed=True, alias="user"), + users_repo: UsersRepository = Depends(get_repository(UsersRepository)), + settings: AppSettings = Depends(get_app_settings), +) -> UserInResponse: + """邮箱 + 密码登录(签发 Access & Set-Cookie Refresh)""" + wrong_login_error = HTTPException( + status_code=HTTP_400_BAD_REQUEST, + detail=strings.INCORRECT_LOGIN_INPUT, + ) + + try: + user = await users_repo.get_user_by_email(email=user_login.email) + except EntityDoesNotExist as existence_error: + raise wrong_login_error from existence_error + + if not user.check_password(user_login.password): + raise wrong_login_error + + secret = str(settings.secret_key.get_secret_value()) + + # Access(15m) + Refresh(30d) + access = jwt.create_access_token_for_user(user, secret) + refresh = jwt.create_refresh_token_for_user(user, secret) + + # 仅通过 HttpOnly Cookie 下发 refresh + set_refresh_cookie(response, refresh, max_age_days=jwt.REFRESH_TOKEN_EXPIRE_DAYS) + + return UserInResponse( + user=UserWithToken( + username=user.username, + email=user.email, + bio=user.bio, + image=user.image, + token=access, # 仍然在 body 返回 access,保持前端兼容 + email_verified=getattr(user, "email_verified", False), + roles=getattr(user, "roles", []), + ), + ) + + +# ========= 注册 ========= +@router.post( + "", + status_code=HTTP_201_CREATED, + response_model=UserInResponse, + response_model_exclude_none=True, + name="auth:register", +) +async def register( + response: Response, + payload: RegisterWithEmailIn = Body(..., embed=True, alias="user"), + users_repo: UsersRepository = Depends(get_repository(UsersRepository)), + settings: AppSettings = Depends(get_app_settings), + email_codes_repo: Optional[Any] = Depends(_provide_optional_email_codes_repo()), +) -> UserInResponse: + """ + 注册流程: + 1) 校验两次密码一致 + 2) 校验邮箱未被占用 + 3) 校验验证码(若存在验证码仓库) + 4) 生成唯一用户名 + 5) 创建用户 + 6) 如仓库提供 set_email_verified,则置为 True + 7) 签发 Access & Set-Cookie Refresh + """ + # 1) 两次密码一致 + try: + assert_passwords_match(payload.password, payload.confirm_password) + except ValueError: + raise HTTPException( + status_code=HTTP_400_BAD_REQUEST, + detail="Passwords do not match", + ) + + # 2) 邮箱是否占用 + if await check_email_is_taken(users_repo, payload.email): + raise HTTPException( + status_code=HTTP_400_BAD_REQUEST, + detail=strings.EMAIL_TAKEN, + ) + + # 3) 校验验证码 + if email_codes_repo is not None: + ok = await email_codes_repo.verify_and_consume( # type: ignore[attr-defined] + email=payload.email, + code=payload.code, + scene="register", + ) + if not ok: + raise HTTPException( + status_code=HTTP_400_BAD_REQUEST, + detail="Invalid or expired verification code", + ) + + # 4) 生成唯一用户名 + username = await make_unique_username(users_repo, payload.email) + + # 5) 创建用户 + user = await users_repo.create_user( + username=username, + email=payload.email, + password=payload.password, + ) + + # 6) 若仓库支持置已验证,则更新并回读 + if hasattr(users_repo, "set_email_verified"): + try: + await users_repo.set_email_verified(email=payload.email, verified=True) # type: ignore[attr-defined] + user = await users_repo.get_user_by_email(email=payload.email) + except Exception: + pass # 不阻塞主流程 + + # 7) 签发 Access & Refresh(并下发 Cookie) + secret = str(settings.secret_key.get_secret_value()) + access = jwt.create_access_token_for_user(user, secret) + refresh = jwt.create_refresh_token_for_user(user, secret) + set_refresh_cookie(response, refresh, max_age_days=jwt.REFRESH_TOKEN_EXPIRE_DAYS) + + return UserInResponse( + user=UserWithToken( + username=user.username, + email=user.email, + bio=user.bio, + image=user.image, + token=access, + email_verified=getattr(user, "email_verified", True), + roles=getattr(user, "roles", []), + ), + ) + + +# ========= 刷新 Access(仅 Cookie 取 refresh)========= +@router.post( + "/refresh", + name="auth:refresh", +) +async def refresh_access_token( + request: Request, + response: Response, + users_repo: UsersRepository = Depends(get_repository(UsersRepository)), + settings: AppSettings = Depends(get_app_settings), +) -> dict: + """ + 从 HttpOnly Cookie 读取 refresh,校验后签发新的 access,并重置 refresh Cookie。 + 最小改造版本:refresh 不轮换(如需轮换/重放检测,请走“增表方案”)。 + """ + refresh = request.cookies.get(REFRESH_COOKIE_NAME) + if not refresh: + raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail="Missing refresh token") + + secret = str(settings.secret_key.get_secret_value()) + try: + username = jwt.get_username_from_token(refresh, secret, expected_subject=jwt.JWT_SUBJECT_REFRESH) + except ValueError: + raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail="Invalid refresh token") + + # 取用户(优先按 username) + try: + # 大多数 RealWorld 模板都有该方法 + user = await users_repo.get_user_by_username(username=username) # type: ignore[attr-defined] + except Exception: + # 若没有 get_user_by_username,则退回按 email 查 + try: + user = await users_repo.get_user_by_email(email=username) + except Exception as e: + raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail="User not found") from e + + # 签发新 access;最小改造——同一个 refresh 继续使用(不轮换) + access = jwt.create_access_token_for_user(user, secret) + # 也可选择重置 refresh 的过期时间(同值覆盖),这里直接重设 Cookie: + set_refresh_cookie(response, refresh, max_age_days=jwt.REFRESH_TOKEN_EXPIRE_DAYS) + + return {"token": access, "expires_in": jwt.ACCESS_TOKEN_EXPIRE_MINUTES * 60} + + +# ========= 登出(清 Cookie;前端清本地 access)========= +@router.post( + "/logout", + name="auth:logout", +) +async def logout(response: Response) -> dict: + clear_refresh_cookie(response) + return {"ok": True} diff --git a/backend/app/api/routes/comments.py b/backend/app/api/routes/comments.py new file mode 100644 index 0000000..dfe6fc6 --- /dev/null +++ b/backend/app/api/routes/comments.py @@ -0,0 +1,71 @@ +from typing import Optional + +from fastapi import APIRouter, Body, Depends, Response +from starlette import status + +from app.api.dependencies.articles import get_article_by_slug_from_path +from app.api.dependencies.authentication import get_current_user_authorizer +from app.api.dependencies.comments import ( + check_comment_modification_permissions, + get_comment_by_id_from_path, +) +from app.api.dependencies.database import get_repository +from app.db.repositories.comments import CommentsRepository +from app.models.domain.articles import Article +from app.models.domain.comments import Comment +from app.models.domain.users import User +from app.models.schemas.comments import ( + CommentInCreate, + CommentInResponse, + ListOfCommentsInResponse, +) + +router = APIRouter() + + +@router.get( + "", + response_model=ListOfCommentsInResponse, + name="comments:get-comments-for-article", +) +async def list_comments_for_article( + article: Article = Depends(get_article_by_slug_from_path), + user: Optional[User] = Depends(get_current_user_authorizer(required=False)), + comments_repo: CommentsRepository = Depends(get_repository(CommentsRepository)), +) -> ListOfCommentsInResponse: + comments = await comments_repo.get_comments_for_article(article=article, user=user) + return ListOfCommentsInResponse(comments=comments) + + +@router.post( + "", + status_code=status.HTTP_201_CREATED, + response_model=CommentInResponse, + name="comments:create-comment-for-article", +) +async def create_comment_for_article( + comment_create: CommentInCreate = Body(..., embed=True, alias="comment"), + article: Article = Depends(get_article_by_slug_from_path), + user: User = Depends(get_current_user_authorizer()), + comments_repo: CommentsRepository = Depends(get_repository(CommentsRepository)), +) -> CommentInResponse: + comment = await comments_repo.create_comment_for_article( + body=comment_create.body, + article=article, + user=user, + ) + return CommentInResponse(comment=comment) + + +@router.delete( + "/{comment_id}", + status_code=status.HTTP_204_NO_CONTENT, + name="comments:delete-comment-from-article", + dependencies=[Depends(check_comment_modification_permissions)], + response_class=Response, +) +async def delete_comment_from_article( + comment: Comment = Depends(get_comment_by_id_from_path), + comments_repo: CommentsRepository = Depends(get_repository(CommentsRepository)), +) -> None: + await comments_repo.delete_comment(comment=comment) diff --git a/backend/app/api/routes/home_featured.py b/backend/app/api/routes/home_featured.py new file mode 100644 index 0000000..e5338f2 --- /dev/null +++ b/backend/app/api/routes/home_featured.py @@ -0,0 +1,37 @@ +from typing import Optional + +from fastapi import APIRouter, Depends + +from app.api.dependencies.authentication import get_current_user_authorizer +from app.api.dependencies.database import get_repository +from app.db.repositories.articles import ArticlesRepository +from app.db.repositories.home_featured import HomeFeaturedRepository +from app.models.domain.users import User +from app.models.schemas.articles import ArticleForResponse, ListOfArticlesInResponse + +router = APIRouter() + + +@router.get( + "/home-featured-articles", + response_model=ListOfArticlesInResponse, + name="home-featured:list", +) +async def list_home_featured_articles( + user: Optional[User] = Depends(get_current_user_authorizer(required=False)), + home_repo: HomeFeaturedRepository = Depends(get_repository(HomeFeaturedRepository)), + articles_repo: ArticlesRepository = Depends(get_repository(ArticlesRepository)), +) -> ListOfArticlesInResponse: + slugs = await home_repo.list_slugs() + articles = await articles_repo.list_articles_by_slugs( + slugs=slugs, + requested_user=user, + ) + articles_for_response = [ + ArticleForResponse.from_orm(article) + for article in articles + ] + return ListOfArticlesInResponse( + articles=articles_for_response, + articles_count=len(articles_for_response), + ) diff --git a/backend/app/api/routes/password_reset.py b/backend/app/api/routes/password_reset.py new file mode 100644 index 0000000..d4c2645 --- /dev/null +++ b/backend/app/api/routes/password_reset.py @@ -0,0 +1,47 @@ +from fastapi import APIRouter, Depends, Request +from pydantic import BaseModel, EmailStr, Field +from asyncpg import Connection + +from app.api.dependencies.database import get_connection +from app.db.repositories.users import UsersRepository +from app.services.password_reset import send_reset_code_by_email, reset_password_with_code + +# ❌ 不要再写 prefix,这里只负责声明相对路径 +router = APIRouter(tags=["auth-password"]) + +class PasswordForgotIn(BaseModel): + email: EmailStr + +@router.post("/forgot") +async def forgot_password( + payload: PasswordForgotIn, + request: Request, + conn: Connection = Depends(get_connection), +): + users_repo = UsersRepository(conn) + await send_reset_code_by_email(request, conn, users_repo, payload.email) + return {"ok": True} + +class PasswordResetIn(BaseModel): + email: EmailStr + code: str = Field(min_length=4, max_length=12) + password: str = Field(min_length=6) + confirm_password: str = Field(min_length=6) + +@router.post("/reset") +async def reset_password( + payload: PasswordResetIn, + conn: Connection = Depends(get_connection), +): + if payload.password != payload.confirm_password: + return {"ok": False, "detail": "两次输入的密码不一致"} + + users_repo = UsersRepository(conn) + await reset_password_with_code( + conn, + users_repo, + email=payload.email, + code=payload.code, + new_password=payload.password, + ) + return {"ok": True} diff --git a/backend/app/api/routes/profiles.py b/backend/app/api/routes/profiles.py new file mode 100644 index 0000000..6ec1bf0 --- /dev/null +++ b/backend/app/api/routes/profiles.py @@ -0,0 +1,84 @@ +from fastapi import APIRouter, Depends, HTTPException +from starlette.status import HTTP_400_BAD_REQUEST + +from app.api.dependencies.authentication import get_current_user_authorizer +from app.api.dependencies.database import get_repository +from app.api.dependencies.profiles import get_profile_by_username_from_path +from app.db.repositories.profiles import ProfilesRepository +from app.models.domain.profiles import Profile +from app.models.domain.users import User +from app.models.schemas.profiles import ProfileInResponse +from app.resources import strings + +router = APIRouter() + + +@router.get( + "/{username}", + response_model=ProfileInResponse, + name="profiles:get-profile", +) +async def retrieve_profile_by_username( + profile: Profile = Depends(get_profile_by_username_from_path), +) -> ProfileInResponse: + return ProfileInResponse(profile=profile) + + +@router.post( + "/{username}/follow", + response_model=ProfileInResponse, + name="profiles:follow-user", +) +async def follow_for_user( + profile: Profile = Depends(get_profile_by_username_from_path), + user: User = Depends(get_current_user_authorizer()), + profiles_repo: ProfilesRepository = Depends(get_repository(ProfilesRepository)), +) -> ProfileInResponse: + if user.username == profile.username: + raise HTTPException( + status_code=HTTP_400_BAD_REQUEST, + detail=strings.UNABLE_TO_FOLLOW_YOURSELF, + ) + + if profile.following: + raise HTTPException( + status_code=HTTP_400_BAD_REQUEST, + detail=strings.USER_IS_ALREADY_FOLLOWED, + ) + + await profiles_repo.add_user_into_followers( + target_user=profile, + requested_user=user, + ) + + return ProfileInResponse(profile=profile.copy(update={"following": True})) + + +@router.delete( + "/{username}/follow", + response_model=ProfileInResponse, + name="profiles:unsubscribe-from-user", +) +async def unsubscribe_from_user( + profile: Profile = Depends(get_profile_by_username_from_path), + user: User = Depends(get_current_user_authorizer()), + profiles_repo: ProfilesRepository = Depends(get_repository(ProfilesRepository)), +) -> ProfileInResponse: + if user.username == profile.username: + raise HTTPException( + status_code=HTTP_400_BAD_REQUEST, + detail=strings.UNABLE_TO_UNSUBSCRIBE_FROM_YOURSELF, + ) + + if not profile.following: + raise HTTPException( + status_code=HTTP_400_BAD_REQUEST, + detail=strings.USER_IS_NOT_FOLLOWED, + ) + + await profiles_repo.remove_user_from_followers( + target_user=profile, + requested_user=user, + ) + + return ProfileInResponse(profile=profile.copy(update={"following": False})) diff --git a/backend/app/api/routes/tags.py b/backend/app/api/routes/tags.py new file mode 100644 index 0000000..4706187 --- /dev/null +++ b/backend/app/api/routes/tags.py @@ -0,0 +1,15 @@ +from fastapi import APIRouter, Depends + +from app.api.dependencies.database import get_repository +from app.db.repositories.tags import TagsRepository +from app.models.schemas.tags import TagsInList + +router = APIRouter() + + +@router.get("", response_model=TagsInList, name="tags:get-all") +async def get_all_tags( + tags_repo: TagsRepository = Depends(get_repository(TagsRepository)), +) -> TagsInList: + tags = await tags_repo.get_all_tags() + return TagsInList(tags=tags) diff --git a/backend/app/api/routes/uploads.py b/backend/app/api/routes/uploads.py new file mode 100644 index 0000000..ce94544 --- /dev/null +++ b/backend/app/api/routes/uploads.py @@ -0,0 +1,35 @@ +# app/api/routes/uploads.py +from fastapi import APIRouter, UploadFile, File, HTTPException, Request +from uuid import uuid4 +from pathlib import Path + +router = APIRouter(tags=["uploads"]) + +# 保存目录:项目根目录下 static/uploads +UPLOAD_DIR = Path("static/uploads") +UPLOAD_DIR.mkdir(parents=True, exist_ok=True) + + +@router.post("/upload-image") +async def upload_image( + request: Request, + file: UploadFile = File(...), +): + # 只允许图片 + if not file.content_type.startswith("image/"): + raise HTTPException(status_code=400, detail="只支持图片上传") + + # 生成文件名 + ext = (file.filename or "").rsplit(".", 1)[-1].lower() or "png" + name = f"{uuid4().hex}.{ext}" + save_path = UPLOAD_DIR / name + + # 保存文件 + content = await file.read() + save_path.write_bytes(content) + + # 拼出完整 URL,确保在 3000 端口页面里也能访问到 8000 的静态资源 + base = str(request.base_url).rstrip("/") # e.g. "http://127.0.0.1:8000" + url = f"{base}/static/uploads/{name}" + + return {"url": url} diff --git a/backend/app/api/routes/users.py b/backend/app/api/routes/users.py new file mode 100644 index 0000000..40d0268 --- /dev/null +++ b/backend/app/api/routes/users.py @@ -0,0 +1,82 @@ +# app\api\routes\users.py +from fastapi import APIRouter, Body, Depends, HTTPException +from starlette.status import HTTP_400_BAD_REQUEST + +from app.api.dependencies.authentication import get_current_user_authorizer +from app.api.dependencies.database import get_repository +from app.core.config import get_app_settings +from app.core.settings.app import AppSettings +from app.db.repositories.users import UsersRepository +from app.models.domain.users import User +from app.models.schemas.users import UserInResponse, UserInUpdate, UserWithToken +from app.resources import strings +from app.services import jwt +from app.services.authentication import check_email_is_taken, check_username_is_taken + +router = APIRouter() + + +@router.get("", response_model=UserInResponse, name="users:get-current-user") +async def retrieve_current_user( + user: User = Depends(get_current_user_authorizer()), + settings: AppSettings = Depends(get_app_settings), +) -> UserInResponse: + token = jwt.create_access_token_for_user( + user, + str(settings.secret_key.get_secret_value()), + ) + return UserInResponse( + user=UserWithToken( + username=user.username, + email=user.email, + bio=user.bio, + image=user.image, + phone=getattr(user, "phone", None), + user_type=getattr(user, "user_type", None), + company_name=getattr(user, "company_name", None), + token=token, + roles=getattr(user, "roles", []), + ), + ) + + +@router.put("", response_model=UserInResponse, name="users:update-current-user") +async def update_current_user( + user_update: UserInUpdate = Body(..., embed=True, alias="user"), + current_user: User = Depends(get_current_user_authorizer()), + users_repo: UsersRepository = Depends(get_repository(UsersRepository)), + settings: AppSettings = Depends(get_app_settings), +) -> UserInResponse: + if user_update.username and user_update.username != current_user.username: + if await check_username_is_taken(users_repo, user_update.username): + raise HTTPException( + status_code=HTTP_400_BAD_REQUEST, + detail=strings.USERNAME_TAKEN, + ) + + if user_update.email and user_update.email != current_user.email: + if await check_email_is_taken(users_repo, user_update.email): + raise HTTPException( + status_code=HTTP_400_BAD_REQUEST, + detail=strings.EMAIL_TAKEN, + ) + + user = await users_repo.update_user(user=current_user, **user_update.dict()) + + token = jwt.create_access_token_for_user( + user, + str(settings.secret_key.get_secret_value()), + ) + return UserInResponse( + user=UserWithToken( + username=user.username, + email=user.email, + bio=user.bio, + image=user.image, + phone=getattr(user, "phone", None), + user_type=getattr(user, "user_type", None), + company_name=getattr(user, "company_name", None), + token=token, + roles=getattr(user, "roles", []), + ), + ) diff --git a/backend/app/core/__init__.py b/backend/app/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/core/config.py b/backend/app/core/config.py new file mode 100644 index 0000000..87f58a8 --- /dev/null +++ b/backend/app/core/config.py @@ -0,0 +1,21 @@ +from functools import lru_cache +from typing import Dict, Type + +from app.core.settings.app import AppSettings +from app.core.settings.base import AppEnvTypes, BaseAppSettings +from app.core.settings.development import DevAppSettings +from app.core.settings.production import ProdAppSettings +from app.core.settings.test import TestAppSettings + +environments: Dict[AppEnvTypes, Type[AppSettings]] = { + AppEnvTypes.dev: DevAppSettings, + AppEnvTypes.prod: ProdAppSettings, + AppEnvTypes.test: TestAppSettings, +} + + +@lru_cache +def get_app_settings() -> AppSettings: + app_env = BaseAppSettings().app_env + config = environments[app_env] + return config() diff --git a/backend/app/core/events.py b/backend/app/core/events.py new file mode 100644 index 0000000..3e82ee3 --- /dev/null +++ b/backend/app/core/events.py @@ -0,0 +1,25 @@ +from typing import Callable + +from fastapi import FastAPI +from loguru import logger + +from app.core.settings.app import AppSettings +from app.db.events import close_db_connection, connect_to_db + + +def create_start_app_handler( + app: FastAPI, + settings: AppSettings, +) -> Callable: # type: ignore + async def start_app() -> None: + await connect_to_db(app, settings) + + return start_app + + +def create_stop_app_handler(app: FastAPI) -> Callable: # type: ignore + @logger.catch + async def stop_app() -> None: + await close_db_connection(app) + + return stop_app diff --git a/backend/app/core/logging.py b/backend/app/core/logging.py new file mode 100644 index 0000000..10ceda7 --- /dev/null +++ b/backend/app/core/logging.py @@ -0,0 +1,25 @@ +import logging +from types import FrameType +from typing import cast + +from loguru import logger + + +class InterceptHandler(logging.Handler): + def emit(self, record: logging.LogRecord) -> None: # pragma: no cover + # Get corresponding Loguru level if it exists + try: + level = logger.level(record.levelname).name + except ValueError: + level = str(record.levelno) + + # Find caller from where originated the logged message + frame, depth = logging.currentframe(), 2 + while frame.f_code.co_filename == logging.__file__: # noqa: WPS609 + frame = cast(FrameType, frame.f_back) + depth += 1 + + logger.opt(depth=depth, exception=record.exc_info).log( + level, + record.getMessage(), + ) diff --git a/backend/app/core/settings/__init__.py b/backend/app/core/settings/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/core/settings/app.py b/backend/app/core/settings/app.py new file mode 100644 index 0000000..e9495ef --- /dev/null +++ b/backend/app/core/settings/app.py @@ -0,0 +1,77 @@ +# app/core/settings/app.py +import logging +import sys +from typing import Any, Dict, List, Tuple, Optional + +from loguru import logger +from pydantic import PostgresDsn, SecretStr, EmailStr + +from app.core.logging import InterceptHandler +from app.core.settings.base import BaseAppSettings + + +class AppSettings(BaseAppSettings): + # ===== 基本 FastAPI 设置 ===== + debug: bool = False + docs_url: str = "/docs" + openapi_prefix: str = "" + openapi_url: str = "/openapi.json" + redoc_url: str = "/redoc" + title: str = "FastAPI example application" + version: str = "0.0.0" + + # ===== 数据库 ===== + database_url: PostgresDsn + max_connection_count: int = 10 + min_connection_count: int = 10 + + # ===== 安全/JWT ===== + secret_key: SecretStr + api_prefix: str = "/api" + jwt_token_prefix: str = "Token" + + # ===== CORS/Host ===== + allowed_hosts: List[str] = ["*"] + + # ===== 日志 ===== + logging_level: int = logging.INFO + loggers: Tuple[str, str] = ("uvicorn.asgi", "uvicorn.access") + + # ===== 邮件/验证码(新增配置) ===== + mail_from: EmailStr = "no-reply@example.com" + + # SMTP 基础(兼容 Py3.9:使用 Optional[...]) + smtp_host: str = "localhost" + smtp_port: int = 25 + smtp_user: Optional[SecretStr] = None + smtp_password: Optional[SecretStr] = None + smtp_tls: bool = False # True 时将尝试 STARTTLS + + # 验证码配置 + email_code_expires_minutes: int = 10 + email_code_scenes: Tuple[str, ...] = ("register", "reset", "login") + + # ===== 管理员自动信任的邮箱(逗号分隔,登录后自动授予 admin �?===== + admin_emails: List[str] = [] + + class Config: + validate_assignment = True + + @property + def fastapi_kwargs(self) -> Dict[str, Any]: + return { + "debug": self.debug, + "docs_url": self.docs_url, + "openapi_prefix": self.openapi_prefix, + "openapi_url": self.openapi_url, + "redoc_url": self.redoc_url, + "title": self.title, + "version": self.version, + } + + def configure_logging(self) -> None: + logging.getLogger().handlers = [InterceptHandler()] + for logger_name in self.loggers: + logging_logger = logging.getLogger(logger_name) + logging_logger.handlers = [InterceptHandler(level=self.logging_level)] + logger.configure(handlers=[{"sink": sys.stderr, "level": self.logging_level}]) diff --git a/backend/app/core/settings/base.py b/backend/app/core/settings/base.py new file mode 100644 index 0000000..0397cbb --- /dev/null +++ b/backend/app/core/settings/base.py @@ -0,0 +1,16 @@ +from enum import Enum + +from pydantic import BaseSettings + + +class AppEnvTypes(Enum): + prod: str = "prod" + dev: str = "dev" + test: str = "test" + + +class BaseAppSettings(BaseSettings): + app_env: AppEnvTypes = AppEnvTypes.prod + + class Config: + env_file = ".env" diff --git a/backend/app/core/settings/development.py b/backend/app/core/settings/development.py new file mode 100644 index 0000000..7eabc86 --- /dev/null +++ b/backend/app/core/settings/development.py @@ -0,0 +1,14 @@ +# app/core/settings/development.py +import logging + +from app.core.settings.app import AppSettings + + +class DevAppSettings(AppSettings): + debug: bool = True + title: str = "Dev FastAPI example application" + logging_level: int = logging.DEBUG + + class Config(AppSettings.Config): + # 开发环境读取 .env + env_file = ".env" diff --git a/backend/app/core/settings/production.py b/backend/app/core/settings/production.py new file mode 100644 index 0000000..49329f5 --- /dev/null +++ b/backend/app/core/settings/production.py @@ -0,0 +1,8 @@ +# app/core/settings/production.py +from app.core.settings.app import AppSettings + + +class ProdAppSettings(AppSettings): + class Config(AppSettings.Config): + # 生产环境读取 prod.env + env_file = "prod.env" diff --git a/backend/app/core/settings/test.py b/backend/app/core/settings/test.py new file mode 100644 index 0000000..bea3c53 --- /dev/null +++ b/backend/app/core/settings/test.py @@ -0,0 +1,19 @@ +import logging + +from pydantic import PostgresDsn, SecretStr + +from app.core.settings.app import AppSettings + + +class TestAppSettings(AppSettings): + debug: bool = True + + title: str = "Test FastAPI example application" + + secret_key: SecretStr = SecretStr("test_secret") + + database_url: PostgresDsn + max_connection_count: int = 5 + min_connection_count: int = 5 + + logging_level: int = logging.DEBUG diff --git a/backend/app/db/__init__.py b/backend/app/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/db/errors.py b/backend/app/db/errors.py new file mode 100644 index 0000000..bb3ef66 --- /dev/null +++ b/backend/app/db/errors.py @@ -0,0 +1,2 @@ +class EntityDoesNotExist(Exception): + """Raised when entity was not found in database.""" diff --git a/backend/app/db/events.py b/backend/app/db/events.py new file mode 100644 index 0000000..5fa7f73 --- /dev/null +++ b/backend/app/db/events.py @@ -0,0 +1,25 @@ +import asyncpg +from fastapi import FastAPI +from loguru import logger + +from app.core.settings.app import AppSettings + + +async def connect_to_db(app: FastAPI, settings: AppSettings) -> None: + logger.info("Connecting to PostgreSQL") + + app.state.pool = await asyncpg.create_pool( + str(settings.database_url), + min_size=settings.min_connection_count, + max_size=settings.max_connection_count, + ) + + logger.info("Connection established") + + +async def close_db_connection(app: FastAPI) -> None: + logger.info("Closing connection to database") + + await app.state.pool.close() + + logger.info("Connection closed") diff --git a/backend/app/db/migrations/env.py b/backend/app/db/migrations/env.py new file mode 100644 index 0000000..f3129f3 --- /dev/null +++ b/backend/app/db/migrations/env.py @@ -0,0 +1,38 @@ +import pathlib +import sys +from logging.config import fileConfig + +from alembic import context +from sqlalchemy import engine_from_config, pool + +sys.path.append(str(pathlib.Path(__file__).resolve().parents[3])) + +from app.core.config import get_app_settings # isort:skip + +SETTINGS = get_app_settings() +DATABASE_URL = SETTINGS.database_url + +config = context.config + +fileConfig(config.config_file_name) # type: ignore + +target_metadata = None + +config.set_main_option("sqlalchemy.url", str(DATABASE_URL)) + + +def run_migrations_online() -> None: + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +run_migrations_online() diff --git a/backend/app/db/migrations/script.py.mako b/backend/app/db/migrations/script.py.mako new file mode 100644 index 0000000..3217cf0 --- /dev/null +++ b/backend/app/db/migrations/script.py.mako @@ -0,0 +1,23 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/backend/app/db/migrations/versions/20251121_add_article_views.py b/backend/app/db/migrations/versions/20251121_add_article_views.py new file mode 100644 index 0000000..8b3b79a --- /dev/null +++ b/backend/app/db/migrations/versions/20251121_add_article_views.py @@ -0,0 +1,26 @@ +"""add article views column + +Revision ID: add_article_views +Revises: fdf8821871d7 +Create Date: 2025-11-21 +""" + +import sqlalchemy as sa +from alembic import op + +revision = "add_article_views" +down_revision = "fdf8821871d7" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column( + "articles", + sa.Column("views", sa.Integer(), nullable=False, server_default="0"), + ) + op.alter_column("articles", "views", server_default=None) + + +def downgrade() -> None: + op.drop_column("articles", "views") diff --git a/backend/app/db/migrations/versions/20251122_add_roles_tables.py b/backend/app/db/migrations/versions/20251122_add_roles_tables.py new file mode 100644 index 0000000..7617822 --- /dev/null +++ b/backend/app/db/migrations/versions/20251122_add_roles_tables.py @@ -0,0 +1,120 @@ +"""add roles and user_roles tables + +Revision ID: add_roles_tables +Revises: add_article_views +Create Date: 2025-11-21 +""" + +from typing import Tuple + +import sqlalchemy as sa +from alembic import op +from sqlalchemy import func, text +from sqlalchemy.dialects.postgresql import JSONB + +revision = "add_roles_tables" +down_revision = "add_article_views" +branch_labels = None +depends_on = None + + +def timestamps() -> Tuple[sa.Column, sa.Column]: + return ( + sa.Column( + "created_at", + sa.TIMESTAMP(timezone=True), + nullable=False, + server_default=func.now(), + ), + sa.Column( + "updated_at", + sa.TIMESTAMP(timezone=True), + nullable=False, + server_default=func.now(), + onupdate=func.current_timestamp(), + ), + ) + + +def upgrade() -> None: + op.create_table( + "roles", + sa.Column("id", sa.Integer, primary_key=True), + sa.Column("name", sa.String(length=64), nullable=False, unique=True), + sa.Column("description", sa.Text, nullable=False, server_default=""), + sa.Column( + "permissions", + JSONB, + nullable=False, + server_default=text("'[]'::jsonb"), + ), + *timestamps(), + ) + op.execute( + """ + CREATE TRIGGER update_role_modtime + BEFORE UPDATE + ON roles + FOR EACH ROW + EXECUTE PROCEDURE update_updated_at_column(); + """, + ) + + op.create_table( + "user_roles", + sa.Column( + "user_id", + sa.Integer, + sa.ForeignKey("users.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column( + "role_id", + sa.Integer, + sa.ForeignKey("roles.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column( + "assigned_at", + sa.TIMESTAMP(timezone=True), + nullable=False, + server_default=func.now(), + ), + ) + op.create_primary_key( + "pk_user_roles", + "user_roles", + ["user_id", "role_id"], + ) + op.create_index("ix_user_roles_role_id", "user_roles", ["role_id"]) + + op.execute( + """ + INSERT INTO roles (name, description, permissions) + VALUES ('admin', 'System administrator with full privileges', '["*"]') + ON CONFLICT (name) DO NOTHING; + """, + ) + op.execute( + """ + DO $$ + DECLARE + admin_role_id INTEGER; + first_user_id INTEGER; + BEGIN + SELECT id INTO admin_role_id FROM roles WHERE name = 'admin'; + SELECT id INTO first_user_id FROM users ORDER BY id ASC LIMIT 1; + IF admin_role_id IS NOT NULL AND first_user_id IS NOT NULL THEN + INSERT INTO user_roles (user_id, role_id) + VALUES (first_user_id, admin_role_id) + ON CONFLICT DO NOTHING; + END IF; + END $$; + """, + ) + + +def downgrade() -> None: + op.drop_index("ix_user_roles_role_id", table_name="user_roles") + op.drop_table("user_roles") + op.drop_table("roles") diff --git a/backend/app/db/migrations/versions/fdf8821871d7_main_tables.py b/backend/app/db/migrations/versions/fdf8821871d7_main_tables.py new file mode 100644 index 0000000..526d360 --- /dev/null +++ b/backend/app/db/migrations/versions/fdf8821871d7_main_tables.py @@ -0,0 +1,216 @@ +"""main tables + +Revision ID: fdf8821871d7 +Revises: +Create Date: 2019-09-22 01:36:44.791880 + +""" +from typing import Tuple + +import sqlalchemy as sa +from alembic import op +from sqlalchemy import func + +revision = "fdf8821871d7" +down_revision = None +branch_labels = None +depends_on = None + + +def create_updated_at_trigger() -> None: + op.execute( + """ + CREATE FUNCTION update_updated_at_column() + RETURNS TRIGGER AS + $$ + BEGIN + NEW.updated_at = now(); + RETURN NEW; + END; + $$ language 'plpgsql'; + """ + ) + + +def timestamps() -> Tuple[sa.Column, sa.Column]: + return ( + sa.Column( + "created_at", + sa.TIMESTAMP(timezone=True), + nullable=False, + server_default=func.now(), + ), + sa.Column( + "updated_at", + sa.TIMESTAMP(timezone=True), + nullable=False, + server_default=func.now(), + onupdate=func.current_timestamp(), + ), + ) + + +def create_users_table() -> None: + op.create_table( + "users", + sa.Column("id", sa.Integer, primary_key=True), + sa.Column("username", sa.Text, unique=True, nullable=False, index=True), + sa.Column("email", sa.Text, unique=True, nullable=False, index=True), + sa.Column("salt", sa.Text, nullable=False), + sa.Column("hashed_password", sa.Text), + sa.Column("bio", sa.Text, nullable=False, server_default=""), + sa.Column("image", sa.Text), + *timestamps(), + ) + op.execute( + """ + CREATE TRIGGER update_user_modtime + BEFORE UPDATE + ON users + FOR EACH ROW + EXECUTE PROCEDURE update_updated_at_column(); + """ + ) + + +def create_followers_to_followings_table() -> None: + op.create_table( + "followers_to_followings", + sa.Column( + "follower_id", + sa.Integer, + sa.ForeignKey("users.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column( + "following_id", + sa.Integer, + sa.ForeignKey("users.id", ondelete="CASCADE"), + nullable=False, + ), + ) + op.create_primary_key( + "pk_followers_to_followings", + "followers_to_followings", + ["follower_id", "following_id"], + ) + + +def create_articles_table() -> None: + op.create_table( + "articles", + sa.Column("id", sa.Integer, primary_key=True), + sa.Column("slug", sa.Text, unique=True, nullable=False, index=True), + sa.Column("title", sa.Text, nullable=False), + sa.Column("description", sa.Text, nullable=False), + sa.Column("body", sa.Text, nullable=False), + sa.Column( + "author_id", sa.Integer, sa.ForeignKey("users.id", ondelete="SET NULL") + ), + *timestamps(), + ) + op.execute( + """ + CREATE TRIGGER update_article_modtime + BEFORE UPDATE + ON articles + FOR EACH ROW + EXECUTE PROCEDURE update_updated_at_column(); + """ + ) + + +def create_tags_table() -> None: + op.create_table("tags", sa.Column("tag", sa.Text, primary_key=True)) + + +def create_articles_to_tags_table() -> None: + op.create_table( + "articles_to_tags", + sa.Column( + "article_id", + sa.Integer, + sa.ForeignKey("articles.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column( + "tag", + sa.Text, + sa.ForeignKey("tags.tag", ondelete="CASCADE"), + nullable=False, + ), + ) + op.create_primary_key( + "pk_articles_to_tags", "articles_to_tags", ["article_id", "tag"] + ) + + +def create_favorites_table() -> None: + op.create_table( + "favorites", + sa.Column( + "user_id", + sa.Integer, + sa.ForeignKey("users.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column( + "article_id", + sa.Integer, + sa.ForeignKey("articles.id", ondelete="CASCADE"), + nullable=False, + ), + ) + op.create_primary_key("pk_favorites", "favorites", ["user_id", "article_id"]) + + +def create_commentaries_table() -> None: + op.create_table( + "commentaries", + sa.Column("id", sa.Integer, primary_key=True), + sa.Column("body", sa.Text, nullable=False), + sa.Column( + "author_id", + sa.Integer, + sa.ForeignKey("users.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column( + "article_id", + sa.Integer, + sa.ForeignKey("articles.id", ondelete="CASCADE"), + nullable=False, + ), + *timestamps(), + ) + op.execute( + """ + CREATE TRIGGER update_comment_modtime + BEFORE UPDATE + ON commentaries + FOR EACH ROW + EXECUTE PROCEDURE update_updated_at_column(); + """ + ) + + +def upgrade() -> None: + create_updated_at_trigger() + create_users_table() + create_followers_to_followings_table() + create_articles_table() + create_tags_table() + create_articles_to_tags_table() + create_favorites_table() + create_commentaries_table() + + +def downgrade() -> None: + op.drop_table("commentaries") + op.drop_table("favorites") + op.drop_table("articles_to_tags") + op.drop_table("tags") + op.drop_table("articles") + op.drop_table("followers_to_followings") + op.drop_table("users") + op.execute("DROP FUNCTION update_updated_at_column") diff --git a/backend/app/db/queries/__init__.py b/backend/app/db/queries/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/db/queries/queries.py b/backend/app/db/queries/queries.py new file mode 100644 index 0000000..043b72d --- /dev/null +++ b/backend/app/db/queries/queries.py @@ -0,0 +1,16 @@ +# app/db/queries/queries.py +import pathlib +import aiosql + +_SQL_DIR = pathlib.Path(__file__).parent / "sql" + +def _load_all_sql_text_utf8() -> str: + # 统一用 UTF-8 读取 sql 目录下所有 .sql 文件(按文件名排序) + parts: list[str] = [] + for p in sorted(_SQL_DIR.glob("*.sql")): + parts.append(p.read_text(encoding="utf-8")) + parts.append("\n") + return "".join(parts) + +# 用 from_str,而不是 from_path(from_path 会按系统默认编码读取) +queries = aiosql.from_str(_load_all_sql_text_utf8(), driver_adapter="asyncpg") diff --git a/backend/app/db/queries/queries.pyi b/backend/app/db/queries/queries.pyi new file mode 100644 index 0000000..a6186ad --- /dev/null +++ b/backend/app/db/queries/queries.pyi @@ -0,0 +1,140 @@ +"""Typings for queries generated by aiosql""" + +from typing import Dict, Optional, Sequence + +from asyncpg import Connection, Record + +class TagsQueriesMixin: + async def get_all_tags(self, conn: Connection) -> Record: ... + async def create_new_tags( + self, conn: Connection, tags: Sequence[Dict[str, str]] + ) -> None: ... + +class UsersQueriesMixin: + async def get_user_by_email(self, conn: Connection, *, email: str) -> Record: ... + async def get_user_by_username( + self, conn: Connection, *, username: str + ) -> Record: ... + async def get_user_by_id(self, conn: Connection, *, id: int) -> Record: ... + async def create_new_user( + self, + conn: Connection, + *, + username: str, + email: str, + salt: str, + hashed_password: str + ) -> Record: ... + async def update_user_by_username( + self, + conn: Connection, + *, + username: str, + new_username: str, + new_email: str, + new_salt: str, + new_password: str, + new_bio: Optional[str], + new_image: Optional[str], + new_phone: Optional[str], + new_user_type: Optional[str], + new_company_name: Optional[str] + ) -> Record: ... + async def admin_update_user_by_id( + self, + conn: Connection, + *, + id: int, + new_username: Optional[str], + new_email: Optional[str], + new_salt: Optional[str], + new_password: Optional[str], + new_bio: Optional[str], + new_image: Optional[str], + new_phone: Optional[str], + new_user_type: Optional[str], + new_company_name: Optional[str] + ) -> Record: ... + +class ProfilesQueriesMixin: + async def is_user_following_for_another( + self, conn: Connection, *, follower_username: str, following_username: str + ) -> Record: ... + async def subscribe_user_to_another( + self, conn: Connection, *, follower_username: str, following_username: str + ) -> None: ... + async def unsubscribe_user_from_another( + self, conn: Connection, *, follower_username: str, following_username: str + ) -> None: ... + +class CommentsQueriesMixin: + async def get_comments_for_article_by_slug( + self, conn: Connection, *, slug: str + ) -> Record: ... + async def get_comment_by_id_and_slug( + self, conn: Connection, *, comment_id: int, article_slug: str + ) -> Record: ... + async def create_new_comment( + self, conn: Connection, *, body: str, article_slug: str, author_username: str + ) -> Record: ... + async def delete_comment_by_id( + self, conn: Connection, *, comment_id: int, author_username: str + ) -> None: ... + +class ArticlesQueriesMixin: + async def add_article_to_favorites( + self, conn: Connection, *, username: str, slug: str + ) -> None: ... + async def remove_article_from_favorites( + self, conn: Connection, *, username: str, slug: str + ) -> None: ... + async def is_article_in_favorites( + self, conn: Connection, *, username: str, slug: str + ) -> Record: ... + async def get_favorites_count_for_article( + self, conn: Connection, *, slug: str + ) -> Record: ... + async def get_tags_for_article_by_slug( + self, conn: Connection, *, slug: str + ) -> Record: ... + async def get_article_by_slug(self, conn: Connection, *, slug: str) -> Record: ... + async def create_new_article( + self, + conn: Connection, + *, + slug: str, + title: str, + description: str, + body: str, + author_username: str + ) -> Record: ... + async def add_tags_to_article( + self, conn: Connection, tags_slugs: Sequence[Dict[str, str]] + ) -> None: ... + async def update_article( + self, + conn: Connection, + *, + slug: str, + author_username: str, + new_slug: str, + new_title: str, + new_body: str, + new_description: str + ) -> Record: ... + async def delete_article( + self, conn: Connection, *, slug: str, author_username: str + ) -> None: ... + async def get_articles_for_feed( + self, conn: Connection, *, follower_username: str, limit: int, offset: int + ) -> Record: ... + +class Queries( + TagsQueriesMixin, + UsersQueriesMixin, + ProfilesQueriesMixin, + CommentsQueriesMixin, + ArticlesQueriesMixin, +): ... + +queries: Queries diff --git a/backend/app/db/queries/sql/articles.sql b/backend/app/db/queries/sql/articles.sql new file mode 100644 index 0000000..3814f71 --- /dev/null +++ b/backend/app/db/queries/sql/articles.sql @@ -0,0 +1,116 @@ +-- name: add-article-to-favorites! +INSERT INTO favorites (user_id, article_id) +VALUES ((SELECT id FROM users WHERE username = :username), + (SELECT id FROM articles WHERE slug = :slug)) +ON CONFLICT DO NOTHING; + + +-- name: remove-article-from-favorites! +DELETE +FROM favorites +WHERE user_id = (SELECT id FROM users WHERE username = :username) + AND article_id = (SELECT id FROM articles WHERE slug = :slug); + + +-- name: is-article-in-favorites^ +SELECT CASE WHEN count(user_id) > 0 THEN TRUE ELSE FALSE END AS favorited +FROM favorites +WHERE user_id = (SELECT id FROM users WHERE username = :username) + AND article_id = (SELECT id FROM articles WHERE slug = :slug); + + +-- name: get-favorites-count-for-article^ +SELECT count(*) as favorites_count +FROM favorites +WHERE article_id = (SELECT id FROM articles WHERE slug = :slug); + + +-- name: get-tags-for-article-by-slug +SELECT t.tag +FROM tags t + INNER JOIN articles_to_tags att ON + t.tag = att.tag + AND + att.article_id = (SELECT id FROM articles WHERE slug = :slug); + + +-- name: get-article-by-slug^ +SELECT id, + slug, + title, + description, + body, + created_at, + updated_at, + (SELECT username FROM users WHERE id = author_id) AS author_username +FROM articles +WHERE slug = :slug +LIMIT 1; + + +-- name: create-new-article 使用 ^ +-- name: create_new_article^ +INSERT INTO articles ( + slug, + title, + description, + body, + cover, + author_id, + views +) VALUES ( + :slug, + :title, + :description, + :body, + :cover, + (SELECT id FROM users WHERE username = :author_username), + 0 +) +RETURNING + articles.id, + articles.slug, + articles.title, + articles.description, + articles.body, + articles.cover, + articles.views, + articles.is_top, + articles.is_featured, + articles.sort_weight, + articles.created_at, + articles.updated_at, + (SELECT username FROM users WHERE id = articles.author_id) AS author_username; + +-- 更新文章(包含 cover) +-- 单行(返回 updated_at) -> ^ +-- name: update_article^ +UPDATE articles +SET + slug = COALESCE(:new_slug, slug), + title = COALESCE(:new_title, title), + body = COALESCE(:new_body, body), + description = COALESCE(:new_description, description), + cover = :new_cover +WHERE + slug = :slug + AND author_id = (SELECT id FROM users WHERE username = :author_username) +RETURNING updated_at; + +-- 删除文章 +-- 执行型,无返回 -> ! +-- name: delete_article! +DELETE FROM articles +WHERE + slug = :slug + AND author_id = (SELECT id FROM users WHERE username = :author_username); + +-- 根据 slug 获取单篇文章(带 cover) +-- 单行返回 -> ^ +-- name: get_article_by_slug^ +SELECT + a.id, + a.slug, + a.title, + a.description, + a.body, + a.cover, + a.views, + a.is_top, + a.is_featured, + a.sort_weight, + a.created_at, + a.updated_at, + u.username AS author_username +FROM articles AS a +JOIN users AS u ON u.id = a.author_id +WHERE a.slug = :slug; + +-- Feed / 列表文章(带 cover) +-- 多行结果 -> 不能用 ^ +-- name: get_articles_for_feed +SELECT + a.id, + a.slug, + a.title, + a.description, + a.body, + a.cover, + a.views, + a.is_top, + a.is_featured, + a.sort_weight, + a.created_at, + a.updated_at, + u.username AS author_username +FROM articles AS a +JOIN users AS u + ON u.id = a.author_id +JOIN followers_to_followings AS f + ON f.following_id = u.id +WHERE f.follower_id = ( + SELECT id FROM users WHERE username = :follower_username +) +ORDER BY a.is_top DESC, a.sort_weight DESC, a.created_at DESC +LIMIT :limit OFFSET :offset; + +-- ====================================================================== +-- Tags 相关 +-- ====================================================================== + +-- 给文章添加标签 +-- 执行型 -> ! +-- name: add_tags_to_article! +INSERT INTO articles_to_tags (article_id, tag) +SELECT a.id, :tag +FROM articles a +WHERE a.slug = :slug; + +-- 获取文章的所有标签 +-- name: get_tags_for_article_by_slug +SELECT t.tag +FROM articles_to_tags t +JOIN articles a ON a.id = t.article_id +WHERE a.slug = :slug +ORDER BY t.tag; + +-- ====================================================================== +-- Favorites 相关 +-- ====================================================================== + +-- 统计收藏数 +-- 单值 -> ^ +-- name: get_favorites_count_for_article^ +SELECT COUNT(*)::int AS favorites_count +FROM favorites f +JOIN articles a ON a.id = f.article_id +WHERE a.slug = :slug; + +-- 是否已收藏 +-- 单值布尔 -> ^ +-- name: is_article_in_favorites^ +SELECT EXISTS ( + SELECT 1 + FROM favorites f + JOIN articles a ON a.id = f.article_id + JOIN users u ON u.id = f.user_id + WHERE a.slug = :slug + AND u.username = :username +) AS favorited; + +-- 加入收藏 +-- 执行型 -> ! +-- name: add_article_to_favorites! +INSERT INTO favorites (user_id, article_id) +SELECT + (SELECT id FROM users WHERE username = :username), + (SELECT id FROM articles WHERE slug = :slug) +ON CONFLICT DO NOTHING; + +-- 取消收藏 +-- 执行型 -> ! +-- name: remove_article_from_favorites! +DELETE FROM favorites +WHERE user_id = (SELECT id FROM users WHERE username = :username) + AND article_id = (SELECT id FROM articles WHERE slug = :slug); + +-- ====================================================================== +-- Views 相关 +-- ====================================================================== + +-- 访问量 +1,返回最新值 +-- name: increment_article_views^ +UPDATE articles +SET views = views + 1 +WHERE slug = :slug +RETURNING views; diff --git a/backend/app/db/queries/sql/roles.sql b/backend/app/db/queries/sql/roles.sql new file mode 100644 index 0000000..ecc7665 --- /dev/null +++ b/backend/app/db/queries/sql/roles.sql @@ -0,0 +1,86 @@ +-- name: list-roles +SELECT id, + name, + description, + permissions, + created_at, + updated_at +FROM roles +ORDER BY name; + + +-- name: get-role-by-id^ +SELECT id, + name, + description, + permissions, + created_at, + updated_at +FROM roles +WHERE id = :role_id +LIMIT 1; + + +-- name: create-role^ +INSERT INTO roles (name, description, permissions) +VALUES (:name, :description, :permissions) +RETURNING id, + name, + description, + permissions, + created_at, + updated_at; + + +-- name: update-role^ +UPDATE roles +SET name = COALESCE(:name, name), + description = COALESCE(:description, description), + permissions = COALESCE(:permissions, permissions) +WHERE id = :role_id +RETURNING id, + name, + description, + permissions, + created_at, + updated_at; + + +-- name: delete-role! +DELETE FROM roles +WHERE id = :role_id; + + +-- name: get-roles-for-user +SELECT r.id, + r.name, + r.description, + r.permissions, + r.created_at, + r.updated_at +FROM roles r +JOIN user_roles ur ON ur.role_id = r.id +WHERE ur.user_id = :user_id +ORDER BY r.name; + + +-- name: assign-role-to-user! +INSERT INTO user_roles (user_id, role_id) +VALUES (:user_id, :role_id) +ON CONFLICT DO NOTHING; + + +-- name: revoke-role-from-user! +DELETE FROM user_roles +WHERE user_id = :user_id + AND role_id = :role_id; + + +-- name: user-has-role^ +SELECT EXISTS ( + SELECT 1 + FROM user_roles ur + JOIN roles r ON r.id = ur.role_id + WHERE ur.user_id = :user_id + AND r.name = :role_name +) AS has_role; diff --git a/backend/app/db/queries/sql/tags.sql b/backend/app/db/queries/sql/tags.sql new file mode 100644 index 0000000..c862a75 --- /dev/null +++ b/backend/app/db/queries/sql/tags.sql @@ -0,0 +1,9 @@ +-- name: get-all-tags +SELECT tag +FROM tags; + + +-- name: create-new-tags*! +INSERT INTO tags (tag) +VALUES (:tag) +ON CONFLICT DO NOTHING; diff --git a/backend/app/db/queries/sql/users.sql b/backend/app/db/queries/sql/users.sql new file mode 100644 index 0000000..a43021f --- /dev/null +++ b/backend/app/db/queries/sql/users.sql @@ -0,0 +1,119 @@ +-- name: get-user-by-email^ +SELECT u.id, + u.username, + u.email, + u.salt, + u.hashed_password, + u.bio, + u.image, + u.phone, + u.user_type, + u.company_name, + u.created_at, + u.updated_at, + COALESCE(array_agg(DISTINCT r.name) FILTER (WHERE r.name IS NOT NULL), '{}') AS roles +FROM users u +LEFT JOIN user_roles ur ON ur.user_id = u.id +LEFT JOIN roles r ON r.id = ur.role_id +WHERE u.email = :email +GROUP BY u.id +LIMIT 1; + + +-- name: get-user-by-username^ +SELECT u.id, + u.username, + u.email, + u.salt, + u.hashed_password, + u.bio, + u.image, + u.phone, + u.user_type, + u.company_name, + u.created_at, + u.updated_at, + COALESCE(array_agg(DISTINCT r.name) FILTER (WHERE r.name IS NOT NULL), '{}') AS roles +FROM users u +LEFT JOIN user_roles ur ON ur.user_id = u.id +LEFT JOIN roles r ON r.id = ur.role_id +WHERE u.username = :username +GROUP BY u.id +LIMIT 1; + + +-- name: create-new-user None: + super().__init__("${0}".format(count)) + + +class TypedTable(Table): + __table__ = "" + + def __init__( + self, + name: Optional[str] = None, + schema: Optional[str] = None, + alias: Optional[str] = None, + query_cls: Optional[Query] = None, + ) -> None: + if name is None: + if self.__table__: + name = self.__table__ + else: + name = self.__class__.__name__ + + super().__init__(name, schema, alias, query_cls) + + +class Users(TypedTable): + __table__ = "users" + + id: int + username: str + + +class Articles(TypedTable): + __table__ = "articles" + + id: int + slug: str + title: str + description: str + body: str + author_id: int + created_at: datetime + updated_at: datetime + + +class Tags(TypedTable): + __table__ = "tags" + + tag: str + + +class ArticlesToTags(TypedTable): + __table__ = "articles_to_tags" + + article_id: int + tag: str + + +class Favorites(TypedTable): + __table__ = "favorites" + + article_id: int + user_id: int + + +users = Users() +articles = Articles() +tags = Tags() +articles_to_tags = ArticlesToTags() +favorites = Favorites() diff --git a/backend/app/db/repositories/__init__.py b/backend/app/db/repositories/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/db/repositories/admin.py b/backend/app/db/repositories/admin.py new file mode 100644 index 0000000..c3a1807 --- /dev/null +++ b/backend/app/db/repositories/admin.py @@ -0,0 +1,161 @@ +from __future__ import annotations + +import json +from typing import List, Optional, Tuple + +from app.db.repositories.base import BaseRepository +from app.models.schemas.admin import AdminRoleLite, AdminUserSummary, AdminDashboardStats + + +class AdminRepository(BaseRepository): + def _normalize_roles(self, payload) -> List[AdminRoleLite]: + roles_payload = payload or [] + if isinstance(roles_payload, str): + try: + roles_payload = json.loads(roles_payload) + except ValueError: + roles_payload = [] + return [ + AdminRoleLite(**role) + for role in roles_payload + if role + ] + + def _record_to_user(self, record) -> AdminUserSummary: + roles = self._normalize_roles(record.get("roles")) + return AdminUserSummary( + id=record["id"], + username=record["username"], + email=record["email"], + bio=record.get("bio"), + image=record.get("image"), + roles=roles, + created_at=record["created_at"], + updated_at=record["updated_at"], + ) + + def _build_user_filters( + self, + search: Optional[str], + role_id: Optional[int], + ) -> Tuple[str, List[object]]: + clauses: List[str] = [] + params: List[object] = [] + + if search: + placeholder = f"${len(params) + 1}" + params.append(f"%{search}%") + clauses.append( + f"(u.username ILIKE {placeholder} OR u.email ILIKE {placeholder})", + ) + + if role_id: + placeholder = f"${len(params) + 1}" + params.append(role_id) + clauses.append( + f"EXISTS (SELECT 1 FROM user_roles ur WHERE ur.user_id = u.id AND ur.role_id = {placeholder})", + ) + + if not clauses: + return "", params + return "WHERE " + " AND ".join(clauses), params + + async def list_users( + self, + *, + search: Optional[str], + role_id: Optional[int], + limit: int, + offset: int, + ) -> Tuple[List[AdminUserSummary], int]: + where_sql, params = self._build_user_filters(search, role_id) + base_params = list(params) + + count_sql = f"SELECT COUNT(*) FROM users u {where_sql}" + total = await self.connection.fetchval(count_sql, *base_params) + + list_params = list(base_params) + list_params.extend([limit, offset]) + list_sql = f""" + SELECT + u.id, + u.username, + u.email, + u.bio, + u.image, + u.created_at, + u.updated_at, + COALESCE( + jsonb_agg( + DISTINCT jsonb_build_object( + 'id', r.id, + 'name', r.name, + 'description', r.description, + 'permissions', r.permissions + ) + ) FILTER (WHERE r.id IS NOT NULL), + '[]'::jsonb + ) AS roles + FROM users u + LEFT JOIN user_roles ur ON ur.user_id = u.id + LEFT JOIN roles r ON r.id = ur.role_id + {where_sql} + GROUP BY u.id + ORDER BY u.created_at DESC + LIMIT ${len(base_params) + 1} + OFFSET ${len(base_params) + 2} + """ + rows = await self.connection.fetch(list_sql, *list_params) + return [self._record_to_user(row) for row in rows], int(total or 0) + + async def get_user_summary(self, user_id: int) -> Optional[AdminUserSummary]: + sql = """ + SELECT + u.id, + u.username, + u.email, + u.bio, + u.image, + u.created_at, + u.updated_at, + COALESCE( + jsonb_agg( + DISTINCT jsonb_build_object( + 'id', r.id, + 'name', r.name, + 'description', r.description, + 'permissions', r.permissions + ) + ) FILTER (WHERE r.id IS NOT NULL), + '[]'::jsonb + ) AS roles + FROM users u + LEFT JOIN user_roles ur ON ur.user_id = u.id + LEFT JOIN roles r ON r.id = ur.role_id + WHERE u.id = $1 + GROUP BY u.id + """ + record = await self.connection.fetchrow(sql, user_id) + if not record: + return None + return self._record_to_user(record) + + async def get_dashboard_stats(self) -> AdminDashboardStats: + users_count = await self.connection.fetchval("SELECT COUNT(*) FROM users") + roles_count = await self.connection.fetchval("SELECT COUNT(*) FROM roles") + + articles_count = await self.connection.fetchval("SELECT COUNT(*) FROM articles") + total_views = await self.connection.fetchval( + "SELECT COALESCE(SUM(views), 0) FROM articles", + ) + published_today = await self.connection.fetchval( + "SELECT COUNT(*) FROM articles WHERE created_at >= (NOW() - INTERVAL '1 day')", + ) + + return AdminDashboardStats( + users=int(users_count or 0), + roles=int(roles_count or 0), + articles=int(articles_count or 0), + total_views=int(total_views or 0), + published_today=int(published_today or 0), + ) diff --git a/backend/app/db/repositories/articles.py b/backend/app/db/repositories/articles.py new file mode 100644 index 0000000..ab2ff54 --- /dev/null +++ b/backend/app/db/repositories/articles.py @@ -0,0 +1,624 @@ +# app/db/repositories/articles.py +from typing import List, Optional, Sequence, Tuple + +from asyncpg import Connection, Record +from pathlib import Path + +from app.db.errors import EntityDoesNotExist +from app.db.queries.queries import queries +from app.db.repositories.base import BaseRepository +from app.db.repositories.profiles import ProfilesRepository +from app.db.repositories.tags import TagsRepository +from app.models.domain.articles import Article +from app.models.domain.users import User + +AUTHOR_USERNAME_ALIAS = "author_username" +SLUG_ALIAS = "slug" + + +class ArticlesRepository(BaseRepository): # noqa: WPS214 + def __init__(self, conn: Connection) -> None: + super().__init__(conn) + self._profiles_repo = ProfilesRepository(conn) + self._tags_repo = TagsRepository(conn) + + # ===== 内部工具 ===== + + async def _ensure_article_flag_columns(self) -> None: + """ + 给 articles 表补充置顶/推荐/权重字段,兼容旧库。 + 多次执行使用 IF NOT EXISTS,不会抛错。 + """ + await self.connection.execute( + """ + ALTER TABLE articles + ADD COLUMN IF NOT EXISTS is_top BOOLEAN NOT NULL DEFAULT FALSE, + ADD COLUMN IF NOT EXISTS is_featured BOOLEAN NOT NULL DEFAULT FALSE, + ADD COLUMN IF NOT EXISTS sort_weight INT NOT NULL DEFAULT 0; + """, + ) + + def _try_delete_cover_file(self, cover: Optional[str]) -> None: + """ + 清空 cover 时顺带删除 static/uploads 下的旧封面文件。 + """ + if not cover: + return + + try: + path_str = cover.lstrip("/") + if not path_str.startswith("static/uploads/"): + return + + p = Path(path_str) + if not p.is_absolute(): + p = Path(".") / p + + if p.is_file(): + p.unlink() + except Exception: + # 可以按需加日志,这里静默 + pass + + # ===== CRUD ===== + + async def create_article( # noqa: WPS211 + self, + *, + slug: str, + title: str, + description: str, + body: str, + author: User, + tags: Optional[Sequence[str]] = None, + cover: Optional[str] = None, + ) -> Article: + await self._ensure_article_flag_columns() + + async with self.connection.transaction(): + article_row = await queries.create_new_article( + self.connection, + slug=slug, + title=title, + description=description, + body=body, + author_username=author.username, + cover=cover, + ) + + if tags: + await self._tags_repo.create_tags_that_dont_exist(tags=tags) + await self._link_article_with_tags(slug=slug, tags=tags) + + return await self._get_article_from_db_record( + article_row=article_row, + slug=slug, + author_username=article_row[AUTHOR_USERNAME_ALIAS], + requested_user=author, + ) + + async def update_article( # noqa: WPS211 + self, + *, + article: Article, + slug: Optional[str] = None, + title: Optional[str] = None, + body: Optional[str] = None, + description: Optional[str] = None, + cover: Optional[str] = None, + cover_provided: bool = False, + ) -> Article: + """ + cover_provided: + - True 表示本次请求体里包含 cover 字段(可能是字符串/""/null) + - False 表示前端没动 cover,保持不变 + """ + await self._ensure_article_flag_columns() + + updated_article = article.copy(deep=True) + updated_article.slug = slug or updated_article.slug + updated_article.title = title or article.title + updated_article.body = body or article.body + updated_article.description = description or article.description + + old_cover = article.cover + + if cover_provided: + # 约定:None / "" 视为清空封面 + updated_article.cover = cover or None + + async with self.connection.transaction(): + updated_row = await queries.update_article( + self.connection, + slug=article.slug, + author_username=article.author.username, + new_slug=updated_article.slug, + new_title=updated_article.title, + new_body=updated_article.body, + new_description=updated_article.description, + new_cover=updated_article.cover, + ) + updated_article.updated_at = updated_row["updated_at"] + + # 如果这次真的更新了 cover,并且旧值存在且发生变化,则尝试删除旧文件 + if cover_provided and old_cover and old_cover != updated_article.cover: + self._try_delete_cover_file(old_cover) + + return updated_article + + async def delete_article(self, *, article: Article) -> None: + await self._ensure_article_flag_columns() + + async with self.connection.transaction(): + await queries.delete_article( + self.connection, + slug=article.slug, + author_username=article.author.username, + ) + + if article.cover: + self._try_delete_cover_file(article.cover) + + async def filter_articles( # noqa: WPS211 + self, + *, + tag: Optional[str] = None, + tags: Optional[Sequence[str]] = None, + author: Optional[str] = None, + favorited: Optional[str] = None, + search: Optional[str] = None, + limit: int = 20, + offset: int = 0, + requested_user: Optional[User] = None, + tag_mode: str = "and", + ) -> List[Article]: + await self._ensure_article_flag_columns() + + tag_list: List[str] = [] + if tags: + tag_list.extend([t.strip() for t in tags if str(t).strip()]) + if tag: + tag_list.append(tag.strip()) + # 去重,保留顺序 + seen = set() + tag_list = [t for t in tag_list if not (t in seen or seen.add(t))] + tag_mode = (tag_mode or "and").lower() + if tag_mode not in ("and", "or"): + tag_mode = "and" + + params: List[object] = [] + joins: List[str] = ["LEFT JOIN users u ON u.id = a.author_id"] + where_clauses: List[str] = [] + having_clause = "" + + if author: + params.append(author) + where_clauses.append(f"u.username = ${len(params)}") + + if favorited: + params.append(favorited) + joins.append( + f"""JOIN favorites f + ON f.article_id = a.id + AND f.user_id = (SELECT id FROM users WHERE username = ${len(params)})""", + ) + + if tag_list: + params.append(tag_list) + joins.append( + f"JOIN articles_to_tags att ON att.article_id = a.id AND att.tag = ANY(${len(params)})", + ) + # AND 逻辑:命中全部 tag + if tag_mode == "and": + having_clause = f"HAVING COUNT(DISTINCT att.tag) >= {len(tag_list)}" + + if search: + params.append(f"%{search}%") + where_clauses.append( + f"(a.title ILIKE ${len(params)} OR a.description ILIKE ${len(params)} OR a.slug ILIKE ${len(params)})", + ) + + where_sql = f"WHERE {' AND '.join(where_clauses)}" if where_clauses else "" + + limit_idx = len(params) + 1 + offset_idx = len(params) + 2 + params.extend([limit, offset]) + + group_cols = ", ".join( + [ + "a.id", + "a.slug", + "a.title", + "a.description", + "a.body", + "a.cover", + "a.views", + "a.created_at", + "a.updated_at", + "a.is_top", + "a.is_featured", + "a.sort_weight", + "u.username", + ], + ) + + sql = f""" + SELECT + a.id, + a.slug, + a.title, + a.description, + a.body, + a.cover, + a.views, + a.created_at, + a.updated_at, + a.is_top, + a.is_featured, + a.sort_weight, + u.username AS {AUTHOR_USERNAME_ALIAS} + FROM articles a + {' '.join(joins)} + {where_sql} + GROUP BY {group_cols} + {having_clause} + ORDER BY a.is_top DESC, a.sort_weight DESC, a.updated_at DESC, a.created_at DESC + LIMIT ${limit_idx} + OFFSET ${offset_idx} + """ + + articles_rows = await self.connection.fetch(sql, *params) + + return [ + await self._get_article_from_db_record( + article_row=article_row, + slug=article_row[SLUG_ALIAS], + author_username=article_row[AUTHOR_USERNAME_ALIAS], + requested_user=requested_user, + ) + for article_row in articles_rows + ] + + async def list_articles_by_slugs( + self, + *, + slugs: Sequence[str], + requested_user: Optional[User] = None, + ) -> List[Article]: + """ + 按给定顺序批量获取文章;缺失的 slug 会被忽略。 + """ + if not slugs: + return [] + await self._ensure_article_flag_columns() + unique_slugs: List[str] = [] + for slug in slugs: + if slug not in unique_slugs: + unique_slugs.append(slug) + + rows = await self.connection.fetch( + f""" + SELECT + a.id, + a.slug, + a.title, + a.description, + a.body, + a.cover, + a.views, + a.is_top, + a.is_featured, + a.sort_weight, + a.created_at, + a.updated_at, + u.username AS {AUTHOR_USERNAME_ALIAS} + FROM articles a + LEFT JOIN users u ON u.id = a.author_id + WHERE a.slug = ANY($1::text[]) + ORDER BY array_position($1::text[], a.slug) + """, + unique_slugs, + ) + + articles: List[Article] = [] + for row in rows: + articles.append( + await self._get_article_from_db_record( + article_row=row, + slug=row[SLUG_ALIAS], + author_username=row[AUTHOR_USERNAME_ALIAS], + requested_user=requested_user, + ), + ) + return articles + + async def get_articles_for_user_feed( + self, + *, + user: User, + limit: int = 20, + offset: int = 0, + ) -> List[Article]: + await self._ensure_article_flag_columns() + + articles_rows = await queries.get_articles_for_feed( + self.connection, + follower_username=user.username, + limit=limit, + offset=offset, + ) + return [ + await self._get_article_from_db_record( + article_row=article_row, + slug=article_row[SLUG_ALIAS], + author_username=article_row[AUTHOR_USERNAME_ALIAS], + requested_user=user, + ) + for article_row in articles_rows + ] + + async def get_article_by_slug( + self, + *, + slug: str, + requested_user: Optional[User] = None, + ) -> Article: + await self._ensure_article_flag_columns() + + article_row = await queries.get_article_by_slug(self.connection, slug=slug) + if article_row: + return await self._get_article_from_db_record( + article_row=article_row, + slug=article_row[SLUG_ALIAS], + author_username=article_row[AUTHOR_USERNAME_ALIAS], + requested_user=requested_user, + ) + + raise EntityDoesNotExist(f"article with slug {slug} does not exist") + + async def get_tags_for_article_by_slug(self, *, slug: str) -> List[str]: + tag_rows = await queries.get_tags_for_article_by_slug( + self.connection, + slug=slug, + ) + return [row["tag"] for row in tag_rows] + + async def get_favorites_count_for_article_by_slug(self, *, slug: str) -> int: + return ( + await queries.get_favorites_count_for_article(self.connection, slug=slug) + )["favorites_count"] + + async def is_article_favorited_by_user(self, *, slug: str, user: User) -> bool: + return ( + await queries.is_article_in_favorites( + self.connection, + username=user.username, + slug=slug, + ) + )["favorited"] + + async def add_article_into_favorites(self, *, article: Article, user: User) -> None: + await queries.add_article_to_favorites( + self.connection, + username=user.username, + slug=article.slug, + ) + + async def remove_article_from_favorites( + self, + *, + article: Article, + user: User, + ) -> None: + await queries.remove_article_from_favorites( + self.connection, + username=user.username, + slug=article.slug, + ) + + async def _get_article_from_db_record( + self, + *, + article_row: Record, + slug: str, + author_username: str, + requested_user: Optional[User], + ) -> Article: + cover = article_row.get("cover") if "cover" in article_row else None + views = article_row.get("views", 0) + is_top = bool(article_row.get("is_top", False)) + is_featured = bool(article_row.get("is_featured", False)) + sort_weight = int(article_row.get("sort_weight", 0) or 0) + + return Article( + id_=article_row["id"], + slug=slug, + title=article_row["title"], + description=article_row["description"], + body=article_row["body"], + cover=cover, + is_top=is_top, + is_featured=is_featured, + sort_weight=sort_weight, + views=views, + author=await self._profiles_repo.get_profile_by_username( + username=author_username, + requested_user=requested_user, + ), + tags=await self.get_tags_for_article_by_slug(slug=slug), + favorites_count=await self.get_favorites_count_for_article_by_slug( + slug=slug, + ), + favorited=await self.is_article_favorited_by_user( + slug=slug, + user=requested_user, + ) + if requested_user + else False, + created_at=article_row["created_at"], + updated_at=article_row["updated_at"], + ) + + async def increment_article_views(self, *, slug: str) -> int: + result = await queries.increment_article_views(self.connection, slug=slug) + return result["views"] + + async def _link_article_with_tags(self, *, slug: str, tags: Sequence[str]) -> None: + """ + 把 tag 列表绑定到文章。 + """ + for tag in tags: + await queries.add_tags_to_article( + self.connection, + slug=slug, + tag=tag, + ) + + async def list_articles_for_admin( + self, + *, + search: Optional[str] = None, + author: Optional[str] = None, + limit: int = 20, + offset: int = 0, + ) -> Tuple[List[Article], int]: + await self._ensure_article_flag_columns() + + clauses: List[str] = [] + params: List[object] = [] + + if author: + placeholder = f"${len(params) + 1}" + params.append(author) + clauses.append(f"u.username = {placeholder}") + + if search: + placeholder = f"${len(params) + 1}" + params.append(f"%{search}%") + clauses.append( + f"(a.title ILIKE {placeholder} OR a.slug ILIKE {placeholder} OR a.description ILIKE {placeholder})", + ) + + where_sql = "" + if clauses: + where_sql = "WHERE " + " AND ".join(clauses) + + count_sql = f""" + SELECT COUNT(*) + FROM articles a + LEFT JOIN users u ON u.id = a.author_id + {where_sql} + """ + total = await self.connection.fetchval(count_sql, *params) + + list_params = list(params) + list_params.extend([limit, offset]) + list_sql = f""" + SELECT + a.id, + a.slug, + a.title, + a.description, + a.body, + a.cover, + a.views, + a.created_at, + a.updated_at, + a.is_top, + a.is_featured, + a.sort_weight, + u.username AS {AUTHOR_USERNAME_ALIAS} + FROM articles a + LEFT JOIN users u ON u.id = a.author_id + {where_sql} + ORDER BY a.is_top DESC, a.sort_weight DESC, a.updated_at DESC, a.created_at DESC + LIMIT ${len(params) + 1} + OFFSET ${len(params) + 2} + """ + rows = await self.connection.fetch(list_sql, *list_params) + articles = [ + await self._get_article_from_db_record( + article_row=row, + slug=row[SLUG_ALIAS], + author_username=row[AUTHOR_USERNAME_ALIAS], + requested_user=None, + ) + for row in rows + ] + return articles, int(total or 0) + + async def admin_update_article( + self, + *, + article: Article, + slug: Optional[str] = None, + title: Optional[str] = None, + body: Optional[str] = None, + description: Optional[str] = None, + cover: Optional[str] = None, + is_top: Optional[bool] = None, + is_featured: Optional[bool] = None, + sort_weight: Optional[int] = None, + cover_provided: bool = False, + ) -> Article: + await self._ensure_article_flag_columns() + + updated_article = article.copy(deep=True) + updated_article.slug = slug or updated_article.slug + updated_article.title = title or article.title + updated_article.body = body or article.body + updated_article.description = description or article.description + + if is_top is not None: + updated_article.is_top = is_top + if is_featured is not None: + updated_article.is_featured = is_featured + if sort_weight is not None: + updated_article.sort_weight = sort_weight + + old_cover = article.cover + if cover_provided: + updated_article.cover = cover or None + + async with self.connection.transaction(): + updated_row = await self.connection.fetchrow( + """ + UPDATE articles + SET slug = COALESCE($2, slug), + title = COALESCE($3, title), + body = COALESCE($4, body), + description = COALESCE($5, description), + cover = $6, + is_top = COALESCE($7, is_top), + is_featured = COALESCE($8, is_featured), + sort_weight = COALESCE($9, sort_weight) + WHERE id = $1 + RETURNING updated_at + """, + article.id_, + updated_article.slug, + updated_article.title, + updated_article.body, + updated_article.description, + updated_article.cover, + updated_article.is_top, + updated_article.is_featured, + updated_article.sort_weight, + ) + updated_article.updated_at = updated_row["updated_at"] + + if cover_provided and old_cover and old_cover != updated_article.cover: + self._try_delete_cover_file(old_cover) + + return updated_article + + async def admin_delete_article(self, *, article: Article) -> None: + await self._ensure_article_flag_columns() + + async with self.connection.transaction(): + await self.connection.execute( + "DELETE FROM articles WHERE id = $1", + article.id_, + ) + if article.cover: + self._try_delete_cover_file(article.cover) diff --git a/backend/app/db/repositories/base.py b/backend/app/db/repositories/base.py new file mode 100644 index 0000000..8f8a5c3 --- /dev/null +++ b/backend/app/db/repositories/base.py @@ -0,0 +1,10 @@ +from asyncpg.connection import Connection + + +class BaseRepository: + def __init__(self, conn: Connection) -> None: + self._conn = conn + + @property + def connection(self) -> Connection: + return self._conn diff --git a/backend/app/db/repositories/comments.py b/backend/app/db/repositories/comments.py new file mode 100644 index 0000000..7c21911 --- /dev/null +++ b/backend/app/db/repositories/comments.py @@ -0,0 +1,103 @@ +from typing import List, Optional + +from asyncpg import Connection, Record + +from app.db.errors import EntityDoesNotExist +from app.db.queries.queries import queries +from app.db.repositories.base import BaseRepository +from app.db.repositories.profiles import ProfilesRepository +from app.models.domain.articles import Article +from app.models.domain.comments import Comment +from app.models.domain.users import User + + +class CommentsRepository(BaseRepository): + def __init__(self, conn: Connection) -> None: + super().__init__(conn) + self._profiles_repo = ProfilesRepository(conn) + + async def get_comment_by_id( + self, + *, + comment_id: int, + article: Article, + user: Optional[User] = None, + ) -> Comment: + comment_row = await queries.get_comment_by_id_and_slug( + self.connection, + comment_id=comment_id, + article_slug=article.slug, + ) + if comment_row: + return await self._get_comment_from_db_record( + comment_row=comment_row, + author_username=comment_row["author_username"], + requested_user=user, + ) + + raise EntityDoesNotExist( + "comment with id {0} does not exist".format(comment_id), + ) + + async def get_comments_for_article( + self, + *, + article: Article, + user: Optional[User] = None, + ) -> List[Comment]: + comments_rows = await queries.get_comments_for_article_by_slug( + self.connection, + slug=article.slug, + ) + return [ + await self._get_comment_from_db_record( + comment_row=comment_row, + author_username=comment_row["author_username"], + requested_user=user, + ) + for comment_row in comments_rows + ] + + async def create_comment_for_article( + self, + *, + body: str, + article: Article, + user: User, + ) -> Comment: + comment_row = await queries.create_new_comment( + self.connection, + body=body, + article_slug=article.slug, + author_username=user.username, + ) + return await self._get_comment_from_db_record( + comment_row=comment_row, + author_username=comment_row["author_username"], + requested_user=user, + ) + + async def delete_comment(self, *, comment: Comment) -> None: + await queries.delete_comment_by_id( + self.connection, + comment_id=comment.id_, + author_username=comment.author.username, + ) + + async def _get_comment_from_db_record( + self, + *, + comment_row: Record, + author_username: str, + requested_user: Optional[User], + ) -> Comment: + return Comment( + id_=comment_row["id"], + body=comment_row["body"], + author=await self._profiles_repo.get_profile_by_username( + username=author_username, + requested_user=requested_user, + ), + created_at=comment_row["created_at"], + updated_at=comment_row["updated_at"], + ) diff --git a/backend/app/db/repositories/email_codes.py b/backend/app/db/repositories/email_codes.py new file mode 100644 index 0000000..3cf2fd1 --- /dev/null +++ b/backend/app/db/repositories/email_codes.py @@ -0,0 +1,68 @@ +# app/services/mailer.py +import smtplib +import ssl +from email.message import EmailMessage +from typing import Optional + +from loguru import logger +from app.core.config import get_app_settings + + +def _build_message(*, from_email: str, to_email: str, subject: str, html: str) -> EmailMessage: + msg = EmailMessage() + msg["From"] = from_email + msg["To"] = to_email + msg["Subject"] = subject + msg.set_content("Your email client does not support HTML.") + msg.add_alternative(html, subtype="html") + return msg + + +def send_email(to_email: str, subject: str, html: str) -> bool: + """ + 同步发送;成功返回 True,失败返回 False,并打印详细日志。 + - 端口 465:使用 SMTP_SSL + - 其他端口:使用 SMTP + (可选)STARTTLS + """ + s = get_app_settings() + from_email = str(s.mail_from) + smtp_host = s.smtp_host + smtp_port = int(s.smtp_port) + smtp_user: Optional[str] = s.smtp_user.get_secret_value() if s.smtp_user else None + smtp_pass: Optional[str] = s.smtp_password.get_secret_value() if s.smtp_password else None + + msg = _build_message(from_email=from_email, to_email=to_email, subject=subject, html=html) + + logger.info( + "SMTP send start → host={} port={} tls={} from={} to={}", + smtp_host, smtp_port, s.smtp_tls, from_email, to_email, + ) + + try: + if smtp_port == 465: + context = ssl.create_default_context() + with smtplib.SMTP_SSL(smtp_host, smtp_port, context=context, timeout=20) as server: + if smtp_user and smtp_pass: + server.login(smtp_user, smtp_pass) + server.send_message(msg) + else: + with smtplib.SMTP(smtp_host, smtp_port, timeout=20) as server: + server.ehlo() + if s.smtp_tls: + context = ssl.create_default_context() + server.starttls(context=context) + server.ehlo() + if smtp_user and smtp_pass: + server.login(smtp_user, smtp_pass) + server.send_message(msg) + + logger.info("SMTP send OK to {}", to_email) + return True + + except smtplib.SMTPResponseException as e: + # 能拿到服务端 code/resp 的错误 + logger.error("SMTPResponseException: code={} msg={}", getattr(e, "smtp_code", None), getattr(e, "smtp_error", None)) + return False + except Exception as e: + logger.exception("SMTP send failed: {}", e) + return False diff --git a/backend/app/db/repositories/home_featured.py b/backend/app/db/repositories/home_featured.py new file mode 100644 index 0000000..0b3ad29 --- /dev/null +++ b/backend/app/db/repositories/home_featured.py @@ -0,0 +1,82 @@ +# app/db/repositories/home_featured.py +from typing import List, Sequence + +from asyncpg import Connection + +from app.db.repositories.base import BaseRepository + + +class HomeFeaturedRepository(BaseRepository): + """ + 维护首页推送文章的排序列表(最多 10 条)。 + 仅存 slug + sort_order,真正返回文章数据时再通过 ArticlesRepository 拉取。 + """ + + def __init__(self, conn: Connection) -> None: + super().__init__(conn) + + async def _ensure_table(self) -> None: + await self.connection.execute( + """ + CREATE TABLE IF NOT EXISTS home_featured_articles ( + slug TEXT PRIMARY KEY, + sort_order INT NOT NULL DEFAULT 0, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ); + """, + ) + + async def list_slugs(self, *, limit: int = 10) -> List[str]: + await self._ensure_table() + rows = await self.connection.fetch( + """ + SELECT slug + FROM home_featured_articles + ORDER BY sort_order ASC, updated_at DESC, created_at DESC + LIMIT $1; + """, + limit, + ) + return [row["slug"] for row in rows] + + async def save_slugs(self, *, slugs: Sequence[str], limit: int = 10) -> List[str]: + """ + 保存首页推送顺序,自动去重并截断到 limit。 + 返回最终生效的 slug 顺序。 + """ + await self._ensure_table() + clean_slugs: List[str] = [] + for slug in slugs: + normalized = str(slug).strip() + if not normalized: + continue + if normalized not in clean_slugs: + clean_slugs.append(normalized) + clean_slugs = clean_slugs[:limit] + + async with self.connection.transaction(): + if clean_slugs: + await self.connection.execute( + """ + DELETE FROM home_featured_articles + WHERE slug <> ALL($1::text[]); + """, + clean_slugs, + ) + for idx, slug in enumerate(clean_slugs): + await self.connection.execute( + """ + INSERT INTO home_featured_articles (slug, sort_order) + VALUES ($1, $2) + ON CONFLICT (slug) DO UPDATE + SET sort_order = EXCLUDED.sort_order, + updated_at = NOW(); + """, + slug, + idx, + ) + else: + await self.connection.execute("DELETE FROM home_featured_articles;") + + return clean_slugs diff --git a/backend/app/db/repositories/menu_slots.py b/backend/app/db/repositories/menu_slots.py new file mode 100644 index 0000000..1df92e7 --- /dev/null +++ b/backend/app/db/repositories/menu_slots.py @@ -0,0 +1,90 @@ +# app/db/repositories/menu_slots.py +from typing import List, Optional, Sequence + +from asyncpg import Connection, Record + +from app.db.repositories.base import BaseRepository + +DEFAULT_MENU_SLOTS = [ + {"slot_key": "news", "label": "资讯广场"}, + {"slot_key": "tutorial", "label": "使用教程"}, + {"slot_key": "community", "label": "社区"}, +] + + +class MenuSlotsRepository(BaseRepository): + def __init__(self, conn: Connection) -> None: + super().__init__(conn) + + async def _ensure_table(self) -> None: + await self.connection.execute( + """ + CREATE TABLE IF NOT EXISTS menu_slots ( + slot_key TEXT PRIMARY KEY, + label TEXT NOT NULL, + tags TEXT[] NOT NULL DEFAULT '{}'::text[], + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ); + """, + ) + + async def _ensure_default_slots(self) -> None: + await self._ensure_table() + for slot in DEFAULT_MENU_SLOTS: + await self.connection.execute( + """ + INSERT INTO menu_slots (slot_key, label) + VALUES ($1, $2) + ON CONFLICT (slot_key) DO NOTHING; + """, + slot["slot_key"], + slot["label"], + ) + + async def list_slots(self) -> List[Record]: + await self._ensure_default_slots() + rows = await self.connection.fetch( + """ + SELECT slot_key, label, tags, created_at, updated_at + FROM menu_slots + ORDER BY slot_key ASC; + """, + ) + return list(rows) + + async def get_slot(self, slot_key: str) -> Optional[Record]: + await self._ensure_default_slots() + return await self.connection.fetchrow( + """ + SELECT slot_key, label, tags, created_at, updated_at + FROM menu_slots + WHERE slot_key = $1 + LIMIT 1; + """, + slot_key, + ) + + async def upsert_slot_tags( + self, + *, + slot_key: str, + tags: Sequence[str], + label: Optional[str] = None, + ) -> Record: + await self._ensure_default_slots() + clean_tags = [t.strip() for t in tags if str(t).strip()] + return await self.connection.fetchrow( + """ + INSERT INTO menu_slots (slot_key, label, tags) + VALUES ($1, COALESCE($2, $1), $3::text[]) + ON CONFLICT (slot_key) DO UPDATE + SET tags = EXCLUDED.tags, + label = COALESCE(EXCLUDED.label, menu_slots.label), + updated_at = NOW() + RETURNING slot_key, label, tags, created_at, updated_at; + """, + slot_key, + label or slot_key, + clean_tags, + ) diff --git a/backend/app/db/repositories/password_reset.py b/backend/app/db/repositories/password_reset.py new file mode 100644 index 0000000..95c342b --- /dev/null +++ b/backend/app/db/repositories/password_reset.py @@ -0,0 +1,61 @@ +# app/db/repositories/password_reset.py +import hashlib +from typing import Optional, Dict, Any +from datetime import datetime, timedelta, timezone + +from asyncpg import Connection +from app.db.queries.queries import queries + + +class PasswordResetRepository: + def __init__(self, conn: Connection) -> None: + self.connection = conn + + @staticmethod + def _hash(token: str) -> str: + return hashlib.sha256(token.encode("utf-8")).hexdigest() + + async def create( + self, + *, + user_id: int, + token: str, + ttl_minutes: int, + request_ip: Optional[str], + user_agent: Optional[str], + ) -> Dict[str, Any]: + """ + 创建一次性重置令牌(仅存 token 的哈希)。 + 返回数据库返回的行(dict/record 兼容为 Dict[str, Any])。 + """ + return await queries.create_password_reset_token( + self.connection, + user_id=user_id, + token_hash=self._hash(token), + expires_at=datetime.now(timezone.utc) + timedelta(minutes=ttl_minutes), + request_ip=request_ip, + user_agent=user_agent, + ) + + async def get_valid(self, *, token: str) -> Optional[Dict[str, Any]]: + """ + 根据明文 token 查找并校验是否可用: + - 存在 + - 未使用 + - 未过期 + 返回行 dict;无效则返回 None。 + """ + row = await queries.get_password_reset_token_by_hash( + self.connection, token_hash=self._hash(token) + ) + if not row: + return None + if row["used_at"] is not None: + return None + if row["expires_at"] <= datetime.now(timezone.utc): + return None + return row + + async def mark_used(self, *, token_id: int) -> None: + """将重置令牌标记为已使用。""" + await queries.mark_password_reset_token_used(self.connection, id=token_id) diff --git a/backend/app/db/repositories/profiles.py b/backend/app/db/repositories/profiles.py new file mode 100644 index 0000000..20d43a0 --- /dev/null +++ b/backend/app/db/repositories/profiles.py @@ -0,0 +1,74 @@ +from typing import Optional, Union + +from asyncpg import Connection + +from app.db.queries.queries import queries +from app.db.repositories.base import BaseRepository +from app.db.repositories.users import UsersRepository +from app.models.domain.profiles import Profile +from app.models.domain.users import User + +UserLike = Union[User, Profile] + + +class ProfilesRepository(BaseRepository): + def __init__(self, conn: Connection): + super().__init__(conn) + self._users_repo = UsersRepository(conn) + + async def get_profile_by_username( + self, + *, + username: str, + requested_user: Optional[UserLike], + ) -> Profile: + user = await self._users_repo.get_user_by_username(username=username) + + profile = Profile(username=user.username, bio=user.bio, image=user.image) + if requested_user: + profile.following = await self.is_user_following_for_another_user( + target_user=user, + requested_user=requested_user, + ) + + return profile + + async def is_user_following_for_another_user( + self, + *, + target_user: UserLike, + requested_user: UserLike, + ) -> bool: + return ( + await queries.is_user_following_for_another( + self.connection, + follower_username=requested_user.username, + following_username=target_user.username, + ) + )["is_following"] + + async def add_user_into_followers( + self, + *, + target_user: UserLike, + requested_user: UserLike, + ) -> None: + async with self.connection.transaction(): + await queries.subscribe_user_to_another( + self.connection, + follower_username=requested_user.username, + following_username=target_user.username, + ) + + async def remove_user_from_followers( + self, + *, + target_user: UserLike, + requested_user: UserLike, + ) -> None: + async with self.connection.transaction(): + await queries.unsubscribe_user_from_another( + self.connection, + follower_username=requested_user.username, + following_username=target_user.username, + ) diff --git a/backend/app/db/repositories/roles.py b/backend/app/db/repositories/roles.py new file mode 100644 index 0000000..2182cc9 --- /dev/null +++ b/backend/app/db/repositories/roles.py @@ -0,0 +1,143 @@ +from __future__ import annotations + +import json +from typing import Iterable, List, Optional + +from app.db.errors import EntityDoesNotExist +from app.db.queries.queries import queries +from app.db.repositories.base import BaseRepository +from app.models.domain.roles import Role + + +class RolesRepository(BaseRepository): + def _convert_role_row(self, row) -> dict: + permissions = row.get("permissions") if row else [] + if isinstance(permissions, str): + try: + permissions = json.loads(permissions) + except ValueError: + permissions = [] + permissions = permissions or [] + return { + **row, + "permissions": permissions, + } + + async def list_roles(self) -> List[Role]: + rows = await queries.list_roles(self.connection) + return [Role(**self._convert_role_row(row)) for row in rows] + + async def get_role_by_id(self, role_id: int) -> Role: + row = await queries.get_role_by_id(self.connection, role_id=role_id) + if not row: + raise EntityDoesNotExist(f"role {role_id} does not exist") + return Role(**self._convert_role_row(row)) + + async def get_role_by_name(self, *, name: str) -> Optional[Role]: + row = await self.connection.fetchrow( + """ + SELECT id, name, description, permissions, created_at, updated_at + FROM roles + WHERE name = $1 + """, + name, + ) + if not row: + return None + return Role(**self._convert_role_row(dict(row))) + + async def create_role( + self, + *, + name: str, + description: Optional[str] = "", + permissions: Optional[Iterable[str]] = None, + ) -> Role: + row = await queries.create_role( + self.connection, + name=name, + description=description or "", + permissions=list(permissions or []), + ) + return Role(**self._convert_role_row(row)) + + async def update_role( + self, + *, + role_id: int, + name: Optional[str] = None, + description: Optional[str] = None, + permissions: Optional[Iterable[str]] = None, + ) -> Role: + row = await queries.update_role( + self.connection, + role_id=role_id, + name=name, + description=description, + permissions=list(permissions) if permissions is not None else None, + ) + if not row: + raise EntityDoesNotExist(f"role {role_id} does not exist") + return Role(**self._convert_role_row(row)) + + async def ensure_role( + self, + *, + name: str, + description: Optional[str] = "", + permissions: Optional[Iterable[str]] = None, + ) -> Role: + existing = await self.get_role_by_name(name=name) + if existing: + return existing + return await self.create_role( + name=name, + description=description or "", + permissions=permissions or [], + ) + + async def delete_role(self, *, role_id: int) -> None: + await queries.delete_role(self.connection, role_id=role_id) + + async def get_roles_for_user(self, *, user_id: int) -> List[Role]: + rows = await queries.get_roles_for_user(self.connection, user_id=user_id) + return [Role(**self._convert_role_row(row)) for row in rows] + + async def get_role_names_for_user(self, *, user_id: int) -> List[str]: + return [role.name for role in await self.get_roles_for_user(user_id=user_id)] + + async def assign_role_to_user(self, *, user_id: int, role_id: int) -> None: + await queries.assign_role_to_user( + self.connection, + user_id=user_id, + role_id=role_id, + ) + + async def revoke_role_from_user(self, *, user_id: int, role_id: int) -> None: + await queries.revoke_role_from_user( + self.connection, + user_id=user_id, + role_id=role_id, + ) + + async def set_roles_for_user(self, *, user_id: int, role_ids: Iterable[int]) -> None: + role_ids = list(dict.fromkeys(role_ids)) + async with self.connection.transaction(): + await self.connection.execute( + "DELETE FROM user_roles WHERE user_id = $1", + user_id, + ) + for role_id in role_ids: + await queries.assign_role_to_user( + self.connection, + user_id=user_id, + role_id=role_id, + ) + + async def user_has_role(self, *, user_id: int, role_name: str) -> bool: + row = await queries.user_has_role( + self.connection, + user_id=user_id, + role_name=role_name, + ) + return bool(row and row.get("has_role")) diff --git a/backend/app/db/repositories/tags.py b/backend/app/db/repositories/tags.py new file mode 100644 index 0000000..5734992 --- /dev/null +++ b/backend/app/db/repositories/tags.py @@ -0,0 +1,13 @@ +from typing import List, Sequence + +from app.db.queries.queries import queries +from app.db.repositories.base import BaseRepository + + +class TagsRepository(BaseRepository): + async def get_all_tags(self) -> List[str]: + tags_row = await queries.get_all_tags(self.connection) + return [tag[0] for tag in tags_row] + + async def create_tags_that_dont_exist(self, *, tags: Sequence[str]) -> None: + await queries.create_new_tags(self.connection, [{"tag": tag} for tag in tags]) diff --git a/backend/app/db/repositories/users.py b/backend/app/db/repositories/users.py new file mode 100644 index 0000000..a9dc266 --- /dev/null +++ b/backend/app/db/repositories/users.py @@ -0,0 +1,186 @@ +from typing import Optional + +from loguru import logger + +from app.db.errors import EntityDoesNotExist +from app.db.queries.queries import queries +from app.db.repositories.base import BaseRepository +from app.db.repositories.roles import RolesRepository +from app.models.domain.users import User, UserInDB +from app.core.config import get_app_settings + + +class UsersRepository(BaseRepository): + """ + User repository with helpers for both public auth flows and admin features. + """ + + def __init__(self, conn) -> None: + super().__init__(conn) + self._roles_repo = RolesRepository(conn) + + async def _attach_roles(self, user: Optional[UserInDB]) -> Optional[UserInDB]: + if user and getattr(user, "id", None): + if not user.roles: + # 兜底从 user_roles/roles 联查,确保 roles 填充 + rows = await self.connection.fetch( + """ + SELECT r.name + FROM user_roles ur + JOIN roles r ON r.id = ur.role_id + WHERE ur.user_id = $1 + ORDER BY r.name + """, + user.id, + ) + user.roles = [row["name"] for row in rows] + return user + + async def get_user_by_email_optional(self, *, email: str) -> Optional[UserInDB]: + user_row = await queries.get_user_by_email(self.connection, email=email) + if not user_row: + return None + return await self._attach_roles(UserInDB(**user_row)) + + async def get_user_id_by_email(self, *, email: str) -> Optional[int]: + user_id = await self.connection.fetchval( + "SELECT id FROM users WHERE email = $1", + email, + ) + return int(user_id) if user_id is not None else None + + async def get_user_by_id(self, *, id_: int) -> UserInDB: + user_row = await queries.get_user_by_id(self.connection, id=id_) + if not user_row: + raise EntityDoesNotExist(f"user with id={id_} does not exist") + return await self._attach_roles(UserInDB(**user_row)) + + async def get_user_by_email(self, *, email: str) -> UserInDB: + user_row = await queries.get_user_by_email(self.connection, email=email) + if not user_row: + raise EntityDoesNotExist(f"user with email {email} does not exist") + return await self._attach_roles(UserInDB(**user_row)) + + async def get_user_by_username(self, *, username: str) -> UserInDB: + user_row = await queries.get_user_by_username( + self.connection, + username=username, + ) + if not user_row: + raise EntityDoesNotExist(f"user with username {username} does not exist") + return await self._attach_roles(UserInDB(**user_row)) + + async def create_user( + self, + *, + username: str, + email: str, + password: str, + ) -> UserInDB: + user = UserInDB(username=username, email=email) + user.change_password(password) + + async with self.connection.transaction(): + user_row = await queries.create_new_user( + self.connection, + username=user.username, + email=user.email, + salt=user.salt, + hashed_password=user.hashed_password, + ) + + created = user.copy(update=dict(user_row)) + created.roles = [] + return created + + async def update_user( # noqa: WPS211 + self, + *, + user: User, + username: Optional[str] = None, + email: Optional[str] = None, + password: Optional[str] = None, + bio: Optional[str] = None, + image: Optional[str] = None, + phone: Optional[str] = None, + user_type: Optional[str] = None, + company_name: Optional[str] = None, + ) -> UserInDB: + user_in_db = await self.get_user_by_username(username=user.username) + + user_in_db.username = username or user_in_db.username + user_in_db.email = email or user_in_db.email + user_in_db.bio = bio if bio is not None else user_in_db.bio + user_in_db.image = image if image is not None else user_in_db.image + user_in_db.phone = phone if phone is not None else user_in_db.phone + user_in_db.user_type = user_type if user_type is not None else user_in_db.user_type + user_in_db.company_name = company_name if company_name is not None else user_in_db.company_name + if password: + user_in_db.change_password(password) + + async with self.connection.transaction(): + user_in_db.updated_at = await queries.update_user_by_username( + self.connection, + username=user.username, + new_username=user_in_db.username, + new_email=user_in_db.email, + new_salt=user_in_db.salt, + new_password=user_in_db.hashed_password, + new_bio=user_in_db.bio, + new_image=user_in_db.image, + new_phone=user_in_db.phone, + new_user_type=user_in_db.user_type, + new_company_name=user_in_db.company_name, + ) + + return await self._attach_roles(user_in_db) + + async def set_email_verified(self, *, email: str, verified: bool = True) -> None: + await queries.set_user_email_verified( + self.connection, + email=email, + verified=verified, + ) + + async def update_user_by_id( # noqa: WPS211 + self, + *, + user_id: int, + username: Optional[str] = None, + email: Optional[str] = None, + password: Optional[str] = None, + bio: Optional[str] = None, + image: Optional[str] = None, + phone: Optional[str] = None, + user_type: Optional[str] = None, + company_name: Optional[str] = None, + ) -> UserInDB: + user_in_db = await self.get_user_by_id(id_=user_id) + + user_in_db.username = username or user_in_db.username + user_in_db.email = email or user_in_db.email + user_in_db.bio = bio if bio is not None else user_in_db.bio + user_in_db.image = image if image is not None else user_in_db.image + user_in_db.phone = phone if phone is not None else user_in_db.phone + user_in_db.user_type = user_type if user_type is not None else user_in_db.user_type + user_in_db.company_name = company_name if company_name is not None else user_in_db.company_name + if password: + user_in_db.change_password(password) + + updated_row = await queries.admin_update_user_by_id( + self.connection, + id=user_id, + new_username=user_in_db.username, + new_email=user_in_db.email, + new_salt=user_in_db.salt, + new_password=user_in_db.hashed_password, + new_bio=user_in_db.bio, + new_image=user_in_db.image, + new_phone=user_in_db.phone, + new_user_type=user_in_db.user_type, + new_company_name=user_in_db.company_name, + ) + return await self._attach_roles(UserInDB(**updated_row)) + + async def delete_user_by_id(self, *, user_id: int) -> None: + await queries.admin_delete_user(self.connection, id=user_id) diff --git a/backend/app/main.py b/backend/app/main.py new file mode 100644 index 0000000..6ec9d52 --- /dev/null +++ b/backend/app/main.py @@ -0,0 +1,58 @@ +# app/main.py (或你当前这个文件名) + +from fastapi import FastAPI +from fastapi.exceptions import RequestValidationError +from fastapi.staticfiles import StaticFiles # ✅ 新增:静态文件 +from starlette.exceptions import HTTPException +from starlette.middleware.cors import CORSMiddleware + +from app.api.errors.http_error import http_error_handler +from app.api.errors.validation_error import http422_error_handler +from app.api.routes.api import router as api_router +from app.core.config import get_app_settings +from app.core.events import create_start_app_handler, create_stop_app_handler + + +def get_application() -> FastAPI: + settings = get_app_settings() + + settings.configure_logging() + + application = FastAPI(**settings.fastapi_kwargs) + + application.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_origin_regex=".*", + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + expose_headers=["*"], + ) + + application.add_event_handler( + "startup", + create_start_app_handler(application, settings), + ) + application.add_event_handler( + "shutdown", + create_stop_app_handler(application), + ) + + application.add_exception_handler(HTTPException, http_error_handler) + application.add_exception_handler(RequestValidationError, http422_error_handler) + + # 所有业务 API 挂在 /api 前缀下(上传接口也在这里:/api/upload-image) + application.include_router(api_router, prefix=settings.api_prefix) + + # ✅ 静态资源:让 /static/... 可直接访问(封面、正文图片等) + application.mount( + "/static", + StaticFiles(directory="static"), + name="static", + ) + + return application + + +app = get_application() diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/models/common.py b/backend/app/models/common.py new file mode 100644 index 0000000..fdc515b --- /dev/null +++ b/backend/app/models/common.py @@ -0,0 +1,19 @@ +import datetime + +from pydantic import BaseModel, Field, validator + + +class DateTimeModelMixin(BaseModel): + created_at: datetime.datetime = None # type: ignore + updated_at: datetime.datetime = None # type: ignore + + @validator("created_at", "updated_at", pre=True) + def default_datetime( + cls, # noqa: N805 + value: datetime.datetime, # noqa: WPS110 + ) -> datetime.datetime: + return value or datetime.datetime.now() + + +class IDModelMixin(BaseModel): + id_: int = Field(0, alias="id") diff --git a/backend/app/models/domain/__init__.py b/backend/app/models/domain/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/models/domain/articles.py b/backend/app/models/domain/articles.py new file mode 100644 index 0000000..b8e16d6 --- /dev/null +++ b/backend/app/models/domain/articles.py @@ -0,0 +1,27 @@ +# app/models/domain/articles.py +from typing import List, Optional + +from app.models.common import DateTimeModelMixin, IDModelMixin +from app.models.domain.profiles import Profile +from app.models.domain.rwmodel import RWModel + + +class Article(IDModelMixin, DateTimeModelMixin, RWModel): + slug: str + title: str + description: str + body: str + + # 封面(可选,不影响老数据) + cover: Optional[str] = None + + # 置顶 / 推荐 / 权重(camelCase 输出) + is_top: bool = False + is_featured: bool = False + sort_weight: int = 0 + + tags: List[str] + author: Profile + favorited: bool + favorites_count: int + views: int = 0 diff --git a/backend/app/models/domain/comments.py b/backend/app/models/domain/comments.py new file mode 100644 index 0000000..ce5fc6a --- /dev/null +++ b/backend/app/models/domain/comments.py @@ -0,0 +1,8 @@ +from app.models.common import DateTimeModelMixin, IDModelMixin +from app.models.domain.profiles import Profile +from app.models.domain.rwmodel import RWModel + + +class Comment(IDModelMixin, DateTimeModelMixin, RWModel): + body: str + author: Profile diff --git a/backend/app/models/domain/profiles.py b/backend/app/models/domain/profiles.py new file mode 100644 index 0000000..b1e6ac0 --- /dev/null +++ b/backend/app/models/domain/profiles.py @@ -0,0 +1,10 @@ +from typing import Optional + +from app.models.domain.rwmodel import RWModel + + +class Profile(RWModel): + username: str + bio: str = "" + image: Optional[str] = None + following: bool = False diff --git a/backend/app/models/domain/roles.py b/backend/app/models/domain/roles.py new file mode 100644 index 0000000..5fdba9f --- /dev/null +++ b/backend/app/models/domain/roles.py @@ -0,0 +1,10 @@ +from typing import List + +from app.models.common import DateTimeModelMixin, IDModelMixin +from app.models.domain.rwmodel import RWModel + + +class Role(IDModelMixin, DateTimeModelMixin, RWModel): + name: str + description: str = "" + permissions: List[str] = [] diff --git a/backend/app/models/domain/rwmodel.py b/backend/app/models/domain/rwmodel.py new file mode 100644 index 0000000..1c34f3b --- /dev/null +++ b/backend/app/models/domain/rwmodel.py @@ -0,0 +1,21 @@ +import datetime + +from pydantic import BaseConfig, BaseModel + + +def convert_datetime_to_realworld(dt: datetime.datetime) -> str: + return dt.replace(tzinfo=datetime.timezone.utc).isoformat().replace("+00:00", "Z") + + +def convert_field_to_camel_case(string: str) -> str: + return "".join( + word if index == 0 else word.capitalize() + for index, word in enumerate(string.split("_")) + ) + + +class RWModel(BaseModel): + class Config(BaseConfig): + allow_population_by_field_name = True + json_encoders = {datetime.datetime: convert_datetime_to_realworld} + alias_generator = convert_field_to_camel_case diff --git a/backend/app/models/domain/users.py b/backend/app/models/domain/users.py new file mode 100644 index 0000000..ffbdaf7 --- /dev/null +++ b/backend/app/models/domain/users.py @@ -0,0 +1,39 @@ +# app/models/domain/users.py +from typing import List, Optional + +from pydantic import Field + +from app.models.common import DateTimeModelMixin, IDModelMixin +from app.models.domain.rwmodel import RWModel +from app.services import security + + +class User(RWModel): + """ + 公开用户信息(会被用于文章作者、Profile 等场景)。 + 新增 email_verified 字段,便于前端展示与登录后逻辑判断。 + """ + username: str + email: str + bio: str = "" + image: Optional[str] = None + phone: Optional[str] = None + user_type: Optional[str] = None + company_name: Optional[str] = None + email_verified: bool = False + roles: List[str] = Field(default_factory=list) + + +class UserInDB(IDModelMixin, DateTimeModelMixin, User): + """ + 数据库存储模型(私有字段) + """ + salt: str = "" + hashed_password: str = "" + + def check_password(self, password: str) -> bool: + return security.verify_password(self.salt + password, self.hashed_password) + + def change_password(self, password: str) -> None: + self.salt = security.generate_salt() + self.hashed_password = security.get_password_hash(self.salt + password) diff --git a/backend/app/models/schemas/__init__.py b/backend/app/models/schemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/models/schemas/admin.py b/backend/app/models/schemas/admin.py new file mode 100644 index 0000000..aa86bbd --- /dev/null +++ b/backend/app/models/schemas/admin.py @@ -0,0 +1,88 @@ +from datetime import datetime +from typing import List, Optional + +from pydantic import EmailStr, Field + +from app.models.schemas.rwschema import RWSchema + + +class AdminRoleLite(RWSchema): + id: int + name: str + description: str = "" + permissions: List[str] = Field(default_factory=list) + + +class AdminUserSummary(RWSchema): + id: int + username: str + email: EmailStr + bio: Optional[str] = None + image: Optional[str] = None + roles: List[AdminRoleLite] = Field(default_factory=list) + created_at: datetime + updated_at: datetime + + +class AdminUserCreate(RWSchema): + username: str + email: EmailStr + password: str = Field(min_length=6, max_length=64) + bio: Optional[str] = None + image: Optional[str] = None + role_ids: List[int] = Field(default_factory=list) + + +class AdminUserUpdate(RWSchema): + username: Optional[str] = None + email: Optional[EmailStr] = None + password: Optional[str] = Field(default=None, min_length=6, max_length=64) + bio: Optional[str] = None + image: Optional[str] = None + role_ids: Optional[List[int]] = None + + +class AdminUserResponse(RWSchema): + user: AdminUserSummary + + +class AdminUserListResponse(RWSchema): + users: List[AdminUserSummary] + total: int + + +class AdminDashboardStats(RWSchema): + users: int + roles: int + articles: int + total_views: int + published_today: int + + +class AdminMenuSlot(RWSchema): + slot_key: str = Field(..., alias="slotKey") + label: str + tags: List[str] = Field(default_factory=list) + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None + + +class AdminMenuSlotUpdate(RWSchema): + tags: List[str] = Field(default_factory=list) + label: Optional[str] = None + + +class AdminMenuSlotResponse(RWSchema): + slot: AdminMenuSlot + + +class AdminMenuSlotListResponse(RWSchema): + slots: List[AdminMenuSlot] = Field(default_factory=list) + + +class AdminHomeFeaturedItem(RWSchema): + slug: str + + +class AdminHomeFeaturedUpdate(RWSchema): + articles: List[AdminHomeFeaturedItem] = Field(default_factory=list) diff --git a/backend/app/models/schemas/articles.py b/backend/app/models/schemas/articles.py new file mode 100644 index 0000000..7e43375 --- /dev/null +++ b/backend/app/models/schemas/articles.py @@ -0,0 +1,75 @@ +from typing import List, Optional + +from pydantic import BaseModel, Field + +from app.models.domain.articles import Article +from app.models.schemas.rwschema import RWSchema + +DEFAULT_ARTICLES_LIMIT = 20 +DEFAULT_ARTICLES_OFFSET = 0 + + +class ArticleForResponse(RWSchema, Article): + """ + 返回给前端的文章结构: + - 继承 Article(包含 cover、tags、author 等) + - tags 字段通过 alias 暴露为 tagList,兼容前端 + """ + tags: List[str] = Field(..., alias="tagList") + + +class ArticleInResponse(RWSchema): + article: ArticleForResponse + + +class ArticleInCreate(RWSchema): + """ + 创建文章时请求体: + { + "article": { + "title": "...", + "description": "...", + "body": "...", + "tagList": ["..."], + "cover": "可选封面URL" + } + } + """ + title: str + description: str + body: str + tags: List[str] = Field([], alias="tagList") + cover: Optional[str] = None + + +class ArticleInUpdate(RWSchema): + """ + 更新文章时请求体(全部可选): + - 不传的字段不改 + - cover: + - 不传:不改 + - 传 null / "":清空封面(配合 repo 的 cover_provided 使用) + - 传字符串:更新为新封面 + """ + title: Optional[str] = None + description: Optional[str] = None + body: Optional[str] = None + cover: Optional[str] = None + is_top: Optional[bool] = None + is_featured: Optional[bool] = None + sort_weight: Optional[int] = None + + +class ListOfArticlesInResponse(RWSchema): + articles: List[ArticleForResponse] + articles_count: int + + +class ArticlesFilters(BaseModel): + tag: Optional[str] = None + tags: Optional[List[str]] = None + author: Optional[str] = None + favorited: Optional[str] = None + search: Optional[str] = None + limit: int = Field(DEFAULT_ARTICLES_LIMIT, ge=1) + offset: int = Field(DEFAULT_ARTICLES_OFFSET, ge=0) diff --git a/backend/app/models/schemas/comments.py b/backend/app/models/schemas/comments.py new file mode 100644 index 0000000..e230697 --- /dev/null +++ b/backend/app/models/schemas/comments.py @@ -0,0 +1,16 @@ +from typing import List + +from app.models.domain.comments import Comment +from app.models.schemas.rwschema import RWSchema + + +class ListOfCommentsInResponse(RWSchema): + comments: List[Comment] + + +class CommentInResponse(RWSchema): + comment: Comment + + +class CommentInCreate(RWSchema): + body: str diff --git a/backend/app/models/schemas/email_code.py b/backend/app/models/schemas/email_code.py new file mode 100644 index 0000000..6da46ee --- /dev/null +++ b/backend/app/models/schemas/email_code.py @@ -0,0 +1,18 @@ +# app/models/schemas/email_code.py +from enum import Enum +from pydantic import BaseModel, EmailStr + + +class EmailScene(str, Enum): + register = "register" + reset = "reset" + login = "login" + + +class EmailCodeSendIn(BaseModel): + email: EmailStr + scene: EmailScene = EmailScene.register + + +class EmailCodeSendOut(BaseModel): + ok: bool = True diff --git a/backend/app/models/schemas/jwt.py b/backend/app/models/schemas/jwt.py new file mode 100644 index 0000000..56d1fa3 --- /dev/null +++ b/backend/app/models/schemas/jwt.py @@ -0,0 +1,12 @@ +from datetime import datetime + +from pydantic import BaseModel + + +class JWTMeta(BaseModel): + exp: datetime + sub: str + + +class JWTUser(BaseModel): + username: str diff --git a/backend/app/models/schemas/profiles.py b/backend/app/models/schemas/profiles.py new file mode 100644 index 0000000..5662dfc --- /dev/null +++ b/backend/app/models/schemas/profiles.py @@ -0,0 +1,7 @@ +from pydantic import BaseModel + +from app.models.domain.profiles import Profile + + +class ProfileInResponse(BaseModel): + profile: Profile diff --git a/backend/app/models/schemas/roles.py b/backend/app/models/schemas/roles.py new file mode 100644 index 0000000..c3efebb --- /dev/null +++ b/backend/app/models/schemas/roles.py @@ -0,0 +1,30 @@ +from typing import List, Optional + +from pydantic import Field + +from app.models.domain.roles import Role +from app.models.schemas.rwschema import RWSchema + + +class RoleBase(RWSchema): + name: str = Field(..., min_length=2, max_length=64) + description: Optional[str] = "" + permissions: List[str] = Field(default_factory=list) + + +class RoleInCreate(RoleBase): + pass + + +class RoleInUpdate(RWSchema): + name: Optional[str] = Field(default=None, min_length=2, max_length=64) + description: Optional[str] = None + permissions: Optional[List[str]] = None + + +class RoleInResponse(RWSchema): + role: Role + + +class ListOfRolesInResponse(RWSchema): + roles: List[Role] diff --git a/backend/app/models/schemas/rwschema.py b/backend/app/models/schemas/rwschema.py new file mode 100644 index 0000000..018ad4b --- /dev/null +++ b/backend/app/models/schemas/rwschema.py @@ -0,0 +1,6 @@ +from app.models.domain.rwmodel import RWModel + + +class RWSchema(RWModel): + class Config(RWModel.Config): + orm_mode = True diff --git a/backend/app/models/schemas/tags.py b/backend/app/models/schemas/tags.py new file mode 100644 index 0000000..e9655fb --- /dev/null +++ b/backend/app/models/schemas/tags.py @@ -0,0 +1,7 @@ +from typing import List + +from pydantic import BaseModel + + +class TagsInList(BaseModel): + tags: List[str] diff --git a/backend/app/models/schemas/users.py b/backend/app/models/schemas/users.py new file mode 100644 index 0000000..0c8d181 --- /dev/null +++ b/backend/app/models/schemas/users.py @@ -0,0 +1,73 @@ +# app/models/schemas/users.py +from typing import Optional + +from pydantic import BaseModel, EmailStr, Field + +from app.models.domain.users import User +from app.models.schemas.rwschema import RWSchema + + +# =============================== +# 旧有登录/创建(兼容保留) +# =============================== +class UserInLogin(RWSchema): + email: EmailStr + password: str + + +class UserInCreate(UserInLogin): + username: str + + +class UserInUpdate(BaseModel): + username: Optional[str] = None + email: Optional[EmailStr] = None + password: Optional[str] = None + bio: Optional[str] = None + image: Optional[str] = None + phone: Optional[str] = None + user_type: Optional[str] = None + company_name: Optional[str] = None + + +# =============================== +# 新增:邮箱验证码注册/登录/Token +# =============================== +class SendEmailCodeIn(RWSchema): + """发送邮箱验证码""" + email: EmailStr + # 可选场景:register/reset/login,不传默认 register + scene: Optional[str] = "register" + + +class RegisterWithEmailIn(RWSchema): + """邮箱注册:邮箱 + 密码 + 确认密码 + 验证码""" + email: EmailStr + password: str = Field(min_length=6, max_length=64) + confirm_password: str = Field(min_length=6, max_length=64) + code: str = Field(min_length=4, max_length=8) + + +class LoginWithPasswordIn(RWSchema): + """邮箱 + 密码登录""" + email: EmailStr + password: str + + +class TokenOut(RWSchema): + token: str + token_type: str = "Token" + + +# =============================== +# 响应模型(带 email_verified) +# =============================== +class UserWithToken(User): + token: str + phone: Optional[str] = None + user_type: Optional[str] = None + company_name: Optional[str] = None + + +class UserInResponse(RWSchema): + user: UserWithToken diff --git a/backend/app/resources/__init__.py b/backend/app/resources/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/resources/strings.py b/backend/app/resources/strings.py new file mode 100644 index 0000000..a137dc2 --- /dev/null +++ b/backend/app/resources/strings.py @@ -0,0 +1,25 @@ +# API messages + +USER_DOES_NOT_EXIST_ERROR = "user does not exist" +ARTICLE_DOES_NOT_EXIST_ERROR = "article does not exist" +ARTICLE_ALREADY_EXISTS = "article already exists" +USER_IS_NOT_AUTHOR_OF_ARTICLE = "you are not an author of this article" + +INCORRECT_LOGIN_INPUT = "incorrect email or password" +USERNAME_TAKEN = "user with this username already exists" +EMAIL_TAKEN = "user with this email already exists" + +UNABLE_TO_FOLLOW_YOURSELF = "user can not follow him self" +UNABLE_TO_UNSUBSCRIBE_FROM_YOURSELF = "user can not unsubscribe from him self" +USER_IS_NOT_FOLLOWED = "you don't follow this user" +USER_IS_ALREADY_FOLLOWED = "you follow this user already" + +WRONG_TOKEN_PREFIX = "unsupported authorization type" # noqa: S105 +MALFORMED_PAYLOAD = "could not validate credentials" + +ARTICLE_IS_ALREADY_FAVORITED = "you are already marked this articles as favorite" +ARTICLE_IS_NOT_FAVORITED = "article is not favorited" + +COMMENT_DOES_NOT_EXIST = "comment does not exist" + +AUTHENTICATION_REQUIRED = "authentication required" diff --git a/backend/app/services/__init__.py b/backend/app/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/services/articles.py b/backend/app/services/articles.py new file mode 100644 index 0000000..fd6a34d --- /dev/null +++ b/backend/app/services/articles.py @@ -0,0 +1,24 @@ +# app\services\articles.py +from slugify import slugify + +from app.db.errors import EntityDoesNotExist +from app.db.repositories.articles import ArticlesRepository +from app.models.domain.articles import Article +from app.models.domain.users import User + + +async def check_article_exists(articles_repo: ArticlesRepository, slug: str) -> bool: + try: + await articles_repo.get_article_by_slug(slug=slug) + except EntityDoesNotExist: + return False + + return True + + +def get_slug_for_article(title: str) -> str: + return slugify(title) + + +def check_user_can_modify_article(article: Article, user: User) -> bool: + return article.author.username == user.username diff --git a/backend/app/services/authentication.py b/backend/app/services/authentication.py new file mode 100644 index 0000000..403fd24 --- /dev/null +++ b/backend/app/services/authentication.py @@ -0,0 +1,52 @@ +# app/services/authentication.py +from app.db.errors import EntityDoesNotExist +from app.db.repositories.users import UsersRepository + + +async def check_username_is_taken(repo: UsersRepository, username: str) -> bool: + """ + 返回 True 表示用户名已被占用 + """ + try: + await repo.get_user_by_username(username=username) + except EntityDoesNotExist: + return False + return True + + +async def check_email_is_taken(repo: UsersRepository, email: str) -> bool: + """ + 返回 True 表示邮箱已被占用 + """ + try: + await repo.get_user_by_email(email=email) + except EntityDoesNotExist: + return False + return True + + +def assert_passwords_match(password: str, confirm_password: str) -> None: + """ + 两次密码一致性校验,不一致抛 ValueError + """ + if password != confirm_password: + raise ValueError("Passwords do not match") + + +async def make_unique_username(repo: UsersRepository, email: str) -> str: + """ + 由邮箱前缀自动生成唯一 username,例如: + - 先用 local-part: foo + - 若被占用,则 foo1、foo2…直到找到可用 + """ + base = (email.split("@", 1)[0] or "user").strip().lower() + # 兜底:避免空前缀 + if not base: + base = "user" + + candidate = base + suffix = 0 + while await check_username_is_taken(repo, candidate): + suffix += 1 + candidate = f"{base}{suffix}" + return candidate diff --git a/backend/app/services/comments.py b/backend/app/services/comments.py new file mode 100644 index 0000000..33f220e --- /dev/null +++ b/backend/app/services/comments.py @@ -0,0 +1,6 @@ +from app.models.domain.comments import Comment +from app.models.domain.users import User + + +def check_user_can_modify_comment(comment: Comment, user: User) -> bool: + return comment.author.username == user.username diff --git a/backend/app/services/jwt.py b/backend/app/services/jwt.py new file mode 100644 index 0000000..95a79bd --- /dev/null +++ b/backend/app/services/jwt.py @@ -0,0 +1,116 @@ +from datetime import datetime, timedelta +from typing import Dict, Optional, Literal + +import jwt +from pydantic import ValidationError + +from app.models.domain.users import User +from app.models.schemas.jwt import JWTMeta, JWTUser + +# === 配置 === +ALGORITHM = "HS256" + +# 统一区分两类 token 的 subject +JWT_SUBJECT_ACCESS = "access" +JWT_SUBJECT_REFRESH = "refresh" + +# 有效期(按你的新方案) +ACCESS_TOKEN_EXPIRE_MINUTES = 15 # 15 分钟 +REFRESH_TOKEN_EXPIRE_DAYS = 30 # 30 天 + + +def _create_jwt_token( + *, + jwt_content: Dict[str, str], + secret_key: str, + expires_delta: timedelta, + subject: Literal["access", "refresh"], +) -> str: + """ + 生成 JWT:在 payload 中注入 exp / sub,并用指定算法签名。 + jwt_content 通常来自 Pydantic 模型(例如 JWTUser(username=...)) + """ + to_encode = jwt_content.copy() + expire = datetime.utcnow() + expires_delta + to_encode.update(JWTMeta(exp=expire, sub=subject).dict()) + return jwt.encode(to_encode, secret_key, algorithm=ALGORITHM) + + +# ========== Access Token(给前端放到 Authorization 里用) ========== +def create_access_token_for_user(user: User, secret_key: str) -> str: + """ + 签发 Access Token(有效期 15 分钟;sub=access) + """ + return _create_jwt_token( + jwt_content=JWTUser(username=user.username).dict(), + secret_key=secret_key, + expires_delta=timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES), + subject=JWT_SUBJECT_ACCESS, + ) + + +# ========== Refresh Token(仅通过 HttpOnly Cookie 下发/使用) ========== +def create_refresh_token_for_user(user: User, secret_key: str) -> str: + """ + 签发 Refresh Token(有效期 30 天;sub=refresh) + 说明:最小改造版本使用 JWT 作为 refresh;若要更安全可改为随机串并服务端存哈希。 + """ + return _create_jwt_token( + jwt_content=JWTUser(username=user.username).dict(), + secret_key=secret_key, + expires_delta=timedelta(days=REFRESH_TOKEN_EXPIRE_DAYS), + subject=JWT_SUBJECT_REFRESH, + ) + + +# ========== 解码与校验工具 ========== +def _decode_token(token: str, secret_key: str) -> Dict: + """ + 解码并返回原始 payload;失败时抛 ValueError。 + """ + try: + return jwt.decode(token, secret_key, algorithms=[ALGORITHM]) + except jwt.PyJWTError as decode_error: + raise ValueError("unable to decode JWT token") from decode_error + + +def get_username_from_token( + token: str, + secret_key: str, + expected_subject: Literal["access", "refresh"] = JWT_SUBJECT_ACCESS, +) -> str: + """ + 解析 token 并返回用户名;同时校验 sub 是否符合预期(默认 access)。 + - 用于受保护接口:expected_subject='access' + - 用于刷新流程:expected_subject='refresh' + """ + try: + payload = _decode_token(token, secret_key) + # 主动校验 sub,避免把 refresh 当成 access 用 + sub = payload.get("sub") + if sub != expected_subject: + raise ValueError(f"invalid token subject: expected '{expected_subject}', got '{sub}'") + + # 用 Pydantic 做字段校验/提取 + return JWTUser(**payload).username + except ValidationError as validation_error: + raise ValueError("malformed payload in token") from validation_error + + +# ========== 兼容旧用法的别名(如果你项目其他地方直接调用了它) ========== +def create_jwt_token( + *, + jwt_content: Dict[str, str], + secret_key: str, + expires_delta: timedelta, +) -> str: + """ + 兼容旧签发函数:默认当作 Access Token 使用(sub=access)。 + 建议新代码直接使用 create_access_token_for_user / create_refresh_token_for_user。 + """ + return _create_jwt_token( + jwt_content=jwt_content, + secret_key=secret_key, + expires_delta=expires_delta, + subject=JWT_SUBJECT_ACCESS, + ) diff --git a/backend/app/services/mailer.py b/backend/app/services/mailer.py new file mode 100644 index 0000000..e6db7f8 --- /dev/null +++ b/backend/app/services/mailer.py @@ -0,0 +1,56 @@ +# app/services/mailer.py +import smtplib +import ssl +from email.message import EmailMessage + +from app.core.config import get_app_settings + + +def send_email(to_email: str, subject: str, html: str) -> None: + """ + 发送邮件: + - 端口为 465 时使用 SSL 直连(SMTP_SSL) + - 其他端口:按配置 smtp_tls 决定是否 STARTTLS + """ + s = get_app_settings() + + host = s.smtp_host + port = int(s.smtp_port) + use_starttls = bool(s.smtp_tls) + + # SecretStr / Optional 兼容处理 + smtp_user = s.smtp_user.get_secret_value() if getattr(s, "smtp_user", None) else None + smtp_pass = s.smtp_password.get_secret_value() if getattr(s, "smtp_password", None) else None + + msg = EmailMessage() + msg["From"] = str(s.mail_from) # EmailStr -> str + msg["To"] = to_email + msg["Subject"] = subject + # 纯文本 + HTML(多部件) + msg.set_content("Your mail client does not support HTML.") + msg.add_alternative(html, subtype="html") + + ctx = ssl.create_default_context() + + # === 建立连接 === + if port == 465: + server = smtplib.SMTP_SSL(host, port, context=ctx, timeout=20) + else: + server = smtplib.SMTP(host, port, timeout=20) + server.ehlo() + if use_starttls: + server.starttls(context=ctx) + server.ehlo() + + try: + # 如配置了用户名/密码,则登录 + if smtp_user and smtp_pass: + server.login(smtp_user, smtp_pass) + + server.send_message(msg) + finally: + try: + server.quit() + except Exception: + # 连接已断开也无妨,忽略 + pass diff --git a/backend/app/services/password_reset.py b/backend/app/services/password_reset.py new file mode 100644 index 0000000..ecd0412 --- /dev/null +++ b/backend/app/services/password_reset.py @@ -0,0 +1,218 @@ +# app/services/password_reset.py +import os +import hashlib +import secrets +from datetime import datetime, timedelta, timezone + +from fastapi import HTTPException, Request +from asyncpg import Connection + +from app.db.repositories.users import UsersRepository +from app.db.queries.queries import queries # aiosql 生成的 Queries 对象 +from app.services import security # ✅ 使用项目原有 passlib 封装 +from app.db.errors import EntityDoesNotExist # 用于兜底 try/except + +# 业务常量 +RESET_SCENE = "reset" +RESET_PURPOSE = "reset" +CODE_TTL_MINUTES = 30 # 验证码有效期(分钟) + + +# ===== 小工具 ===== +def _sha256_hex(s: str) -> str: + return hashlib.sha256(s.encode("utf-8")).hexdigest() + + +def _email_html(code: str) -> str: + return f""" +
+

重置你的密码

+

你的验证码({CODE_TTL_MINUTES} 分钟内有效):

+

{code}

+

若非本人操作请忽略此邮件。

+
+ """ + + +def _first_row(maybe_rows): + """ + aiosql + asyncpg 在 SELECT 时可能返回: + - asyncpg.Record + - list[Record] + - dict-like + 统一取“第一条/单条”。 + """ + if maybe_rows is None: + return None + if isinstance(maybe_rows, list): + return maybe_rows[0] if maybe_rows else None + return maybe_rows + + +def _get_key(row, key: str): + """ + 兼容 asyncpg.Record / dict / tuple(list) + 仅用于取 'id' 这种关键字段 + """ + if row is None: + return None + # dict-like / Record + try: + if key in row: + return row[key] # type: ignore[index] + except Exception: + pass + # 某些驱动可能支持 .get + try: + return row.get(key) # type: ignore[attr-defined] + except Exception: + pass + # 最后尝试属性 + return getattr(row, key, None) + + +async def _get_user_by_email_optional(users_repo: UsersRepository, *, email: str): + """ + 安全获取用户: + - 若仓库实现了 get_user_by_email_optional,直接用 + - 否则回退到 get_user_by_email,并用 try/except 屏蔽不存在异常 + 返回 UserInDB 或 None + """ + # 新接口:优先调用 + if hasattr(users_repo, "get_user_by_email_optional"): + try: + return await users_repo.get_user_by_email_optional(email=email) # type: ignore[attr-defined] + except Exception: + return None + + # 旧接口:try/except 防止抛出不存在 + try: + return await users_repo.get_user_by_email(email=email) + except EntityDoesNotExist: + return None + + +# ===== 主流程 ===== +async def send_reset_code_by_email( + request: Request, + conn: Connection, + users_repo: UsersRepository, + email: str, +) -> None: + """ + 若邮箱存在:生成 6 位验证码 -> 只存哈希 -> 发送邮件(或开发阶段打印) + 若不存在:静默返回,防止枚举邮箱 + """ + user = await _get_user_by_email_optional(users_repo, email=email) + if not user: + return # 静默 + + # 6 位数字验证码(明文只用于发送/展示,数据库只存哈希) + code = f"{secrets.randbelow(1_000_000):06d}" + code_hash = _sha256_hex(code) + expires_at = datetime.now(timezone.utc) + timedelta(minutes=CODE_TTL_MINUTES) + request_ip = request.client.host if request.client else None + user_agent = request.headers.get("user-agent", "") + + await queries.create_email_code( + conn, + email=email, + scene=RESET_SCENE, + purpose=RESET_PURPOSE, + code_hash=code_hash, + expires_at=expires_at, + request_ip=request_ip, + user_agent=user_agent, + ) + + # === 发送邮件 === + try: + # 如果你已有统一邮件服务,可直接调用;没有则打印在开发日志 + from app.services.mailer import send_email # 可选 + # 你的 send_email 若是异步函数,这里 await;若是同步也能正常抛异常被捕获 + maybe_coro = send_email( + to_email=email, + subject="重置密码验证码", + html=_email_html(code), + ) + if hasattr(maybe_coro, "__await__"): + await maybe_coro # 兼容 async 版本 + except Exception: + print(f"[DEV] reset code for {email}: {code} (expires in {CODE_TTL_MINUTES}m)") + + +async def reset_password_with_code( + conn: Connection, + users_repo: UsersRepository, + *, + email: str, + code: str, + new_password: str, +) -> None: + """ + 校验验证码 -> 修改密码 -> 标记验证码已使用 -> 清理历史 + """ + code_hash = _sha256_hex(code.strip()) + + # 1) 校验验证码(只接受未使用且未过期) + rec = await queries.get_valid_email_code( + conn, + email=email, + scene=RESET_SCENE, + purpose=RESET_PURPOSE, + code_hash=code_hash, + ) + rec = _first_row(rec) + if rec is None: + raise HTTPException(status_code=400, detail="验证码无效或已过期") + + # 2) 查用户(安全获取,避免抛异常 & 防枚举) + user = await _get_user_by_email_optional(users_repo, email=email) + if user is None: + # 与验证码错误同样提示,避免暴露邮箱存在性 + raise HTTPException(status_code=400, detail="验证码无效或已过期") + + # 3) 生成新 salt / hash —— ✅ 使用项目原有 passlib 方案 + # 关键点:和登录校验保持一致,对 (salt + plain_password) 做 passlib 哈希 + new_salt = os.urandom(16).hex() + new_hashed = security.get_password_hash(new_salt + new_password) + + # 4) 优先用 id 更新;若没有 id(历史坑),则回退用 email 更新 + updated = None + try: + user_id = getattr(user, "id", None) + if user_id: + updated = await queries.update_user_password_by_id( + conn, + id=user_id, + new_salt=new_salt, + new_password=new_hashed, # ✅ passlib 生成的带前缀哈希 + ) + else: + updated = await queries.update_user_password_by_email( + conn, + email=email, + new_salt=new_salt, + new_password=new_hashed, + ) + except Exception: + # 极端情况下,id 更新失败也再补 email 更新,确保不中断 + updated = await queries.update_user_password_by_email( + conn, + email=email, + new_salt=new_salt, + new_password=new_hashed, + ) + + # aiosql 有时会返回 list,若是空 list 视为失败 + if isinstance(updated, list) and not updated: + raise HTTPException(status_code=500, detail="密码更新失败") + + # 5) 标记验证码已用 & 清理 + rec_id = _get_key(rec, "id") + if rec_id is not None: + await queries.mark_email_code_used(conn, id=rec_id) + else: + print("[WARN] Could not resolve email_code.id to mark consumed.") + + await queries.delete_expired_email_codes(conn) diff --git a/backend/app/services/security.py b/backend/app/services/security.py new file mode 100644 index 0000000..08c523b --- /dev/null +++ b/backend/app/services/security.py @@ -0,0 +1,16 @@ +import bcrypt +from passlib.context import CryptContext + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + + +def generate_salt() -> str: + return bcrypt.gensalt().decode() + + +def verify_password(plain_password: str, hashed_password: str) -> bool: + return pwd_context.verify(plain_password, hashed_password) + + +def get_password_hash(password: str) -> str: + return pwd_context.hash(password) diff --git a/backend/docker-compose.yml b/backend/docker-compose.yml new file mode 100644 index 0000000..cb35ad1 --- /dev/null +++ b/backend/docker-compose.yml @@ -0,0 +1,22 @@ +version: '3' + +services: + app: + build: . + restart: on-failure + ports: + - "8000:8000" + environment: + DATABASE_URL: "postgresql://postgres:postgres@db/postgres" + env_file: + - .env + depends_on: + - db + db: + image: postgres:11.5-alpine + ports: + - "5432:5432" + env_file: + - .env + volumes: + - ./postgres-data:/var/lib/postgresql/data:cached diff --git a/backend/openapi.json b/backend/openapi.json new file mode 100644 index 0000000..f45035e --- /dev/null +++ b/backend/openapi.json @@ -0,0 +1,1799 @@ +{ + "openapi": "3.0.2", + "info": { + "title": "FastAPI example application", + "version": "0.0.0" + }, + "paths": { + "/api/auth/email-code": { + "post": { + "tags": [ + "authentication" + ], + "summary": "Auth:Email-Code", + "description": "发送邮箱验证码并写入数据库(若仓库存在)。", + "operationId": "auth_email_code_api_auth_email_code_post", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EmailCodeSendIn" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EmailCodeSendOut" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/api/auth/login": { + "post": { + "tags": [ + "authentication" + ], + "summary": "Auth:Login", + "description": "邮箱 + 密码登录(签发 Access & Set-Cookie Refresh)", + "operationId": "auth_login_api_auth_login_post", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_auth_login_api_auth_login_post" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/api/auth": { + "post": { + "tags": [ + "authentication" + ], + "summary": "Auth:Register", + "description": "注册流程:\n1) 校验两次密码一致\n2) 校验邮箱未被占用\n3) 校验验证码(若存在验证码仓库)\n4) 生成唯一用户名\n5) 创建用户\n6) 如仓库提供 set_email_verified,则置为 True\n7) 签发 Access & Set-Cookie Refresh", + "operationId": "auth_register_api_auth_post", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_auth_register_api_auth_post" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/api/auth/refresh": { + "post": { + "tags": [ + "authentication" + ], + "summary": "Auth:Refresh", + "description": "从 HttpOnly Cookie 读取 refresh,校验后签发新的 access,并重置 refresh Cookie。\n最小改造版本:refresh 不轮换(如需轮换/重放检测,请走“增表方案”)。", + "operationId": "auth_refresh_api_auth_refresh_post", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + + } + } + } + } + } + } + }, + "/api/auth/logout": { + "post": { + "tags": [ + "authentication" + ], + "summary": "Auth:Logout", + "operationId": "auth_logout_api_auth_logout_post", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + + } + } + } + } + } + } + }, + "/api/auth/password/forgot": { + "post": { + "tags": [ + "auth-password" + ], + "summary": "Forgot Password", + "operationId": "forgot_password_api_auth_password_forgot_post", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PasswordForgotIn" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/api/auth/password/reset": { + "post": { + "tags": [ + "auth-password" + ], + "summary": "Reset Password", + "operationId": "reset_password_api_auth_password_reset_post", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PasswordResetIn" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/api/user": { + "get": { + "tags": [ + "users" + ], + "summary": "Users:Get-Current-User", + "operationId": "users_get_current_user_api_user_get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserInResponse" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + } + ] + }, + "put": { + "tags": [ + "users" + ], + "summary": "Users:Update-Current-User", + "operationId": "users_update_current_user_api_user_put", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_users_update_current_user_api_user_put" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + } + ] + } + }, + "/api/profiles/{username}": { + "get": { + "tags": [ + "profiles" + ], + "summary": "Profiles:Get-Profile", + "operationId": "profiles_get_profile_api_profiles__username__get", + "parameters": [ + { + "required": true, + "schema": { + "title": "Username", + "minLength": 1, + "type": "string" + }, + "name": "username", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProfileInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + } + ] + } + }, + "/api/profiles/{username}/follow": { + "post": { + "tags": [ + "profiles" + ], + "summary": "Profiles:Follow-User", + "operationId": "profiles_follow_user_api_profiles__username__follow_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Username", + "minLength": 1, + "type": "string" + }, + "name": "username", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProfileInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + }, + { + "RWAPIKeyHeader": [] + } + ] + }, + "delete": { + "tags": [ + "profiles" + ], + "summary": "Profiles:Unsubscribe-From-User", + "operationId": "profiles_unsubscribe_from_user_api_profiles__username__follow_delete", + "parameters": [ + { + "required": true, + "schema": { + "title": "Username", + "minLength": 1, + "type": "string" + }, + "name": "username", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProfileInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + }, + { + "RWAPIKeyHeader": [] + } + ] + } + }, + "/api/articles/feed": { + "get": { + "tags": [ + "articles" + ], + "summary": "Articles:Get-User-Feed-Articles", + "operationId": "articles_get_user_feed_articles_api_articles_feed_get", + "parameters": [ + { + "required": false, + "schema": { + "title": "Limit", + "minimum": 1, + "type": "integer", + "default": 20 + }, + "name": "limit", + "in": "query" + }, + { + "required": false, + "schema": { + "title": "Offset", + "minimum": 0, + "type": "integer", + "default": 0 + }, + "name": "offset", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ListOfArticlesInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + } + ] + } + }, + "/api/articles/{slug}/favorite": { + "post": { + "tags": [ + "articles" + ], + "summary": "Articles:Mark-Article-Favorite", + "operationId": "articles_mark_article_favorite_api_articles__slug__favorite_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Slug", + "minLength": 1, + "type": "string" + }, + "name": "slug", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ArticleInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + }, + { + "RWAPIKeyHeader": [] + } + ] + }, + "delete": { + "tags": [ + "articles" + ], + "summary": "Articles:Unmark-Article-Favorite", + "operationId": "articles_unmark_article_favorite_api_articles__slug__favorite_delete", + "parameters": [ + { + "required": true, + "schema": { + "title": "Slug", + "minLength": 1, + "type": "string" + }, + "name": "slug", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ArticleInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + }, + { + "RWAPIKeyHeader": [] + } + ] + } + }, + "/api/articles": { + "get": { + "tags": [ + "articles" + ], + "summary": "Articles:List-Articles", + "operationId": "articles_list_articles_api_articles_get", + "parameters": [ + { + "required": false, + "schema": { + "title": "Tag", + "type": "string" + }, + "name": "tag", + "in": "query" + }, + { + "required": false, + "schema": { + "title": "Author", + "type": "string" + }, + "name": "author", + "in": "query" + }, + { + "required": false, + "schema": { + "title": "Favorited", + "type": "string" + }, + "name": "favorited", + "in": "query" + }, + { + "required": false, + "schema": { + "title": "Limit", + "minimum": 1, + "type": "integer", + "default": 20 + }, + "name": "limit", + "in": "query" + }, + { + "required": false, + "schema": { + "title": "Offset", + "minimum": 0, + "type": "integer", + "default": 0 + }, + "name": "offset", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ListOfArticlesInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + } + ] + }, + "post": { + "tags": [ + "articles" + ], + "summary": "Articles:Create-Article", + "operationId": "articles_create_article_api_articles_post", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_articles_create_article_api_articles_post" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ArticleInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + } + ] + } + }, + "/api/articles/{slug}": { + "get": { + "tags": [ + "articles" + ], + "summary": "Articles:Get-Article", + "description": "文章详情:对所有人开放访问。\n- 未登录 / token 缺失 / token 无效 -\u003E user 为 None,正常返回文章。\n- 已登录且 token 有效 -\u003E user 有值,可用于 favorited 等字段计算。", + "operationId": "articles_get_article_api_articles__slug__get", + "parameters": [ + { + "required": true, + "schema": { + "title": "Slug", + "type": "string" + }, + "name": "slug", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ArticleInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + } + ] + }, + "put": { + "tags": [ + "articles" + ], + "summary": "Articles:Update-Article", + "operationId": "articles_update_article_api_articles__slug__put", + "parameters": [ + { + "required": true, + "schema": { + "title": "Slug", + "minLength": 1, + "type": "string" + }, + "name": "slug", + "in": "path" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_articles_update_article_api_articles__slug__put" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ArticleInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + }, + { + "RWAPIKeyHeader": [] + } + ] + }, + "delete": { + "tags": [ + "articles" + ], + "summary": "Articles:Delete-Article", + "operationId": "articles_delete_article_api_articles__slug__delete", + "parameters": [ + { + "required": true, + "schema": { + "title": "Slug", + "minLength": 1, + "type": "string" + }, + "name": "slug", + "in": "path" + } + ], + "responses": { + "204": { + "description": "Successful Response" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + }, + { + "RWAPIKeyHeader": [] + } + ] + } + }, + "/api/articles/{slug}/comments": { + "get": { + "tags": [ + "comments" + ], + "summary": "Comments:Get-Comments-For-Article", + "operationId": "comments_get_comments_for_article_api_articles__slug__comments_get", + "parameters": [ + { + "required": true, + "schema": { + "title": "Slug", + "minLength": 1, + "type": "string" + }, + "name": "slug", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ListOfCommentsInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + } + ] + }, + "post": { + "tags": [ + "comments" + ], + "summary": "Comments:Create-Comment-For-Article", + "operationId": "comments_create_comment_for_article_api_articles__slug__comments_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Slug", + "minLength": 1, + "type": "string" + }, + "name": "slug", + "in": "path" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_comments_create_comment_for_article_api_articles__slug__comments_post" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CommentInResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + }, + { + "RWAPIKeyHeader": [] + } + ] + } + }, + "/api/articles/{slug}/comments/{comment_id}": { + "delete": { + "tags": [ + "comments" + ], + "summary": "Comments:Delete-Comment-From-Article", + "operationId": "comments_delete_comment_from_article_api_articles__slug__comments__comment_id__delete", + "parameters": [ + { + "required": true, + "schema": { + "title": "Comment Id", + "minimum": 1, + "type": "integer" + }, + "name": "comment_id", + "in": "path" + }, + { + "required": true, + "schema": { + "title": "Slug", + "minLength": 1, + "type": "string" + }, + "name": "slug", + "in": "path" + } + ], + "responses": { + "204": { + "description": "Successful Response" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "RWAPIKeyHeader": [] + }, + { + "RWAPIKeyHeader": [] + } + ] + } + }, + "/api/tags": { + "get": { + "tags": [ + "tags" + ], + "summary": "Tags:Get-All", + "operationId": "tags_get_all_api_tags_get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TagsInList" + } + } + } + } + } + } + }, + "/api/upload-image": { + "post": { + "tags": [ + "uploads" + ], + "summary": "Upload Image", + "operationId": "upload_image_api_upload_image_post", + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_upload_image_api_upload_image_post" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + } + }, + "components": { + "schemas": { + "ArticleForResponse": { + "title": "ArticleForResponse", + "required": [ + "slug", + "title", + "description", + "body", + "tagList", + "author", + "favorited", + "favoritesCount" + ], + "type": "object", + "properties": { + "createdAt": { + "title": "Createdat", + "type": "string", + "format": "date-time" + }, + "updatedAt": { + "title": "Updatedat", + "type": "string", + "format": "date-time" + }, + "id": { + "title": "Id", + "type": "integer", + "default": 0 + }, + "slug": { + "title": "Slug", + "type": "string" + }, + "title": { + "title": "Title", + "type": "string" + }, + "description": { + "title": "Description", + "type": "string" + }, + "body": { + "title": "Body", + "type": "string" + }, + "cover": { + "title": "Cover", + "type": "string" + }, + "tagList": { + "title": "Taglist", + "type": "array", + "items": { + "type": "string" + } + }, + "author": { + "$ref": "#/components/schemas/Profile" + }, + "favorited": { + "title": "Favorited", + "type": "boolean" + }, + "favoritesCount": { + "title": "Favoritescount", + "type": "integer" + } + }, + "description": "返回给前端的文章结构:\n- 继承 Article(包含 cover、tags、author 等)\n- 其中 tags 字段通过 alias 暴露为 tagList,兼容现有前端" + }, + "ArticleInCreate": { + "title": "ArticleInCreate", + "required": [ + "title", + "description", + "body" + ], + "type": "object", + "properties": { + "title": { + "title": "Title", + "type": "string" + }, + "description": { + "title": "Description", + "type": "string" + }, + "body": { + "title": "Body", + "type": "string" + }, + "tagList": { + "title": "Taglist", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "cover": { + "title": "Cover", + "type": "string" + } + }, + "description": "创建文章时请求体:\n{\n \"article\": {\n \"title\": \"...\",\n \"description\": \"...\",\n \"body\": \"...\",\n \"tagList\": [\"...\"],\n \"cover\": \"可选封面URL\"\n }\n}" + }, + "ArticleInResponse": { + "title": "ArticleInResponse", + "required": [ + "article" + ], + "type": "object", + "properties": { + "article": { + "$ref": "#/components/schemas/ArticleForResponse" + } + } + }, + "ArticleInUpdate": { + "title": "ArticleInUpdate", + "type": "object", + "properties": { + "title": { + "title": "Title", + "type": "string" + }, + "description": { + "title": "Description", + "type": "string" + }, + "body": { + "title": "Body", + "type": "string" + }, + "cover": { + "title": "Cover", + "type": "string" + } + }, + "description": "更新文章时请求体(全部可选):\n- 不传的字段不改\n- cover:\n - 不传:不改\n - 传 null / \"\":清空封面(配合 repo 里 cover_provided 使用)\n - 传字符串:更新为新封面" + }, + "Body_articles_create_article_api_articles_post": { + "title": "Body_articles_create_article_api_articles_post", + "required": [ + "article" + ], + "type": "object", + "properties": { + "article": { + "$ref": "#/components/schemas/ArticleInCreate" + } + } + }, + "Body_articles_update_article_api_articles__slug__put": { + "title": "Body_articles_update_article_api_articles__slug__put", + "required": [ + "article" + ], + "type": "object", + "properties": { + "article": { + "$ref": "#/components/schemas/ArticleInUpdate" + } + } + }, + "Body_auth_login_api_auth_login_post": { + "title": "Body_auth_login_api_auth_login_post", + "required": [ + "user" + ], + "type": "object", + "properties": { + "user": { + "$ref": "#/components/schemas/UserInLogin" + } + } + }, + "Body_auth_register_api_auth_post": { + "title": "Body_auth_register_api_auth_post", + "required": [ + "user" + ], + "type": "object", + "properties": { + "user": { + "$ref": "#/components/schemas/RegisterWithEmailIn" + } + } + }, + "Body_comments_create_comment_for_article_api_articles__slug__comments_post": { + "title": "Body_comments_create_comment_for_article_api_articles__slug__comments_post", + "required": [ + "comment" + ], + "type": "object", + "properties": { + "comment": { + "$ref": "#/components/schemas/CommentInCreate" + } + } + }, + "Body_upload_image_api_upload_image_post": { + "title": "Body_upload_image_api_upload_image_post", + "required": [ + "file" + ], + "type": "object", + "properties": { + "file": { + "title": "File", + "type": "string", + "format": "binary" + } + } + }, + "Body_users_update_current_user_api_user_put": { + "title": "Body_users_update_current_user_api_user_put", + "required": [ + "user" + ], + "type": "object", + "properties": { + "user": { + "$ref": "#/components/schemas/UserInUpdate" + } + } + }, + "Comment": { + "title": "Comment", + "required": [ + "body", + "author" + ], + "type": "object", + "properties": { + "createdAt": { + "title": "Createdat", + "type": "string", + "format": "date-time" + }, + "updatedAt": { + "title": "Updatedat", + "type": "string", + "format": "date-time" + }, + "id": { + "title": "Id", + "type": "integer", + "default": 0 + }, + "body": { + "title": "Body", + "type": "string" + }, + "author": { + "$ref": "#/components/schemas/Profile" + } + } + }, + "CommentInCreate": { + "title": "CommentInCreate", + "required": [ + "body" + ], + "type": "object", + "properties": { + "body": { + "title": "Body", + "type": "string" + } + } + }, + "CommentInResponse": { + "title": "CommentInResponse", + "required": [ + "comment" + ], + "type": "object", + "properties": { + "comment": { + "$ref": "#/components/schemas/Comment" + } + } + }, + "EmailCodeSendIn": { + "title": "EmailCodeSendIn", + "required": [ + "email" + ], + "type": "object", + "properties": { + "email": { + "title": "Email", + "type": "string", + "format": "email" + }, + "scene": { + "allOf": [ + { + "$ref": "#/components/schemas/EmailScene" + } + ], + "default": "register" + } + } + }, + "EmailCodeSendOut": { + "title": "EmailCodeSendOut", + "type": "object", + "properties": { + "ok": { + "title": "Ok", + "type": "boolean", + "default": true + } + } + }, + "EmailScene": { + "title": "EmailScene", + "enum": [ + "register", + "reset", + "login" + ], + "type": "string", + "description": "An enumeration." + }, + "HTTPValidationError": { + "title": "HTTPValidationError", + "type": "object", + "properties": { + "errors": { + "title": "Errors", + "type": "array", + "items": { + "$ref": "#/components/schemas/ValidationError" + } + } + } + }, + "ListOfArticlesInResponse": { + "title": "ListOfArticlesInResponse", + "required": [ + "articles", + "articlesCount" + ], + "type": "object", + "properties": { + "articles": { + "title": "Articles", + "type": "array", + "items": { + "$ref": "#/components/schemas/ArticleForResponse" + } + }, + "articlesCount": { + "title": "Articlescount", + "type": "integer" + } + } + }, + "ListOfCommentsInResponse": { + "title": "ListOfCommentsInResponse", + "required": [ + "comments" + ], + "type": "object", + "properties": { + "comments": { + "title": "Comments", + "type": "array", + "items": { + "$ref": "#/components/schemas/Comment" + } + } + } + }, + "PasswordForgotIn": { + "title": "PasswordForgotIn", + "required": [ + "email" + ], + "type": "object", + "properties": { + "email": { + "title": "Email", + "type": "string", + "format": "email" + } + } + }, + "PasswordResetIn": { + "title": "PasswordResetIn", + "required": [ + "email", + "code", + "password", + "confirm_password" + ], + "type": "object", + "properties": { + "email": { + "title": "Email", + "type": "string", + "format": "email" + }, + "code": { + "title": "Code", + "maxLength": 12, + "minLength": 4, + "type": "string" + }, + "password": { + "title": "Password", + "minLength": 6, + "type": "string" + }, + "confirm_password": { + "title": "Confirm Password", + "minLength": 6, + "type": "string" + } + } + }, + "Profile": { + "title": "Profile", + "required": [ + "username" + ], + "type": "object", + "properties": { + "username": { + "title": "Username", + "type": "string" + }, + "bio": { + "title": "Bio", + "type": "string", + "default": "" + }, + "image": { + "title": "Image", + "type": "string" + }, + "following": { + "title": "Following", + "type": "boolean", + "default": false + } + } + }, + "ProfileInResponse": { + "title": "ProfileInResponse", + "required": [ + "profile" + ], + "type": "object", + "properties": { + "profile": { + "$ref": "#/components/schemas/Profile" + } + } + }, + "RegisterWithEmailIn": { + "title": "RegisterWithEmailIn", + "required": [ + "email", + "password", + "confirmPassword", + "code" + ], + "type": "object", + "properties": { + "email": { + "title": "Email", + "type": "string", + "format": "email" + }, + "password": { + "title": "Password", + "maxLength": 64, + "minLength": 6, + "type": "string" + }, + "confirmPassword": { + "title": "Confirmpassword", + "maxLength": 64, + "minLength": 6, + "type": "string" + }, + "code": { + "title": "Code", + "maxLength": 8, + "minLength": 4, + "type": "string" + } + }, + "description": "邮箱注册:邮箱 + 密码 + 确认密码 + 验证码" + }, + "TagsInList": { + "title": "TagsInList", + "required": [ + "tags" + ], + "type": "object", + "properties": { + "tags": { + "title": "Tags", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "UserInLogin": { + "title": "UserInLogin", + "required": [ + "email", + "password" + ], + "type": "object", + "properties": { + "email": { + "title": "Email", + "type": "string", + "format": "email" + }, + "password": { + "title": "Password", + "type": "string" + } + } + }, + "UserInResponse": { + "title": "UserInResponse", + "required": [ + "user" + ], + "type": "object", + "properties": { + "user": { + "$ref": "#/components/schemas/UserWithToken" + } + } + }, + "UserInUpdate": { + "title": "UserInUpdate", + "type": "object", + "properties": { + "username": { + "title": "Username", + "type": "string" + }, + "email": { + "title": "Email", + "type": "string", + "format": "email" + }, + "password": { + "title": "Password", + "type": "string" + }, + "bio": { + "title": "Bio", + "type": "string" + }, + "image": { + "title": "Image", + "maxLength": 2083, + "minLength": 1, + "type": "string", + "format": "uri" + } + } + }, + "UserWithToken": { + "title": "UserWithToken", + "required": [ + "username", + "email", + "token" + ], + "type": "object", + "properties": { + "username": { + "title": "Username", + "type": "string" + }, + "email": { + "title": "Email", + "type": "string" + }, + "bio": { + "title": "Bio", + "type": "string", + "default": "" + }, + "image": { + "title": "Image", + "type": "string" + }, + "emailVerified": { + "title": "Emailverified", + "type": "boolean", + "default": false + }, + "token": { + "title": "Token", + "type": "string" + } + }, + "description": "公开用户信息(会被用于文章作者、Profile 等场景)。\n新增 email_verified 字段,便于前端展示与登录后逻辑判断。" + }, + "ValidationError": { + "title": "ValidationError", + "required": [ + "loc", + "msg", + "type" + ], + "type": "object", + "properties": { + "loc": { + "title": "Location", + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + } + ] + } + }, + "msg": { + "title": "Message", + "type": "string" + }, + "type": { + "title": "Error Type", + "type": "string" + } + } + } + }, + "securitySchemes": { + "RWAPIKeyHeader": { + "type": "apiKey", + "in": "header", + "name": "Authorization" + } + } + } +} \ No newline at end of file diff --git a/backend/poetry.lock b/backend/poetry.lock new file mode 100644 index 0000000..b02ba83 --- /dev/null +++ b/backend/poetry.lock @@ -0,0 +1,1970 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "aiosql" +version = "6.2" +description = "Simple SQL in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosql-6.2-py3-none-any.whl", hash = "sha256:b4b2b9a3ca129aa0fb44fa28d67d817bf4ac6c6fd663bfd15ed7e2c36562869d"}, + {file = "aiosql-6.2.tar.gz", hash = "sha256:644b488c17143c0897084db158977b44e1801805329bed8a7e0d34d5ac4aa0ef"}, +] + +[[package]] +name = "alembic" +version = "1.8.1" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.7" +files = [ + {file = "alembic-1.8.1-py3-none-any.whl", hash = "sha256:0a024d7f2de88d738d7395ff866997314c837be6104e90c5724350313dee4da4"}, + {file = "alembic-1.8.1.tar.gz", hash = "sha256:cd0b5e45b14b706426b833f06369b9a6d5ee03f826ec3238723ce8caaf6e5ffa"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" + +[package.extras] +tz = ["python-dateutil"] + +[[package]] +name = "anyio" +version = "3.5.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.6.2" +files = [ + {file = "anyio-3.5.0-py3-none-any.whl", hash = "sha256:b5fa16c5ff93fa1046f2eeb5bbff2dad4d3514d6cda61d02816dba34fa8c3c2e"}, + {file = "anyio-3.5.0.tar.gz", hash = "sha256:a0aeffe2fb1fdf374a8e4b471444f0f3ac4fb9f5a5b542b48824475e0042a5a6"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=6.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16)"] + +[[package]] +name = "asgi-lifespan" +version = "1.0.1" +description = "Programmatic startup/shutdown of ASGI apps." +optional = false +python-versions = ">=3.6" +files = [ + {file = "asgi-lifespan-1.0.1.tar.gz", hash = "sha256:9a33e7da2073c4764bc79bd6136501d6c42f60e3d2168ba71235e84122eadb7f"}, + {file = "asgi_lifespan-1.0.1-py3-none-any.whl", hash = "sha256:9ea969dc5eb5cf08e52c08dce6f61afcadd28112e72d81c972b1d8eb8691ab53"}, +] + +[package.dependencies] +sniffio = "*" + +[[package]] +name = "astor" +version = "0.8.1" +description = "Read/rewrite/write Python ASTs" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5"}, + {file = "astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e"}, +] + +[[package]] +name = "asyncpg" +version = "0.26.0" +description = "An asyncio PostgreSQL driver" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "asyncpg-0.26.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2ed3880b3aec8bda90548218fe0914d251d641f798382eda39a17abfc4910af0"}, + {file = "asyncpg-0.26.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5bd99ee7a00e87df97b804f178f31086e88c8106aca9703b1d7be5078999e68"}, + {file = "asyncpg-0.26.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:868a71704262834065ca7113d80b1f679609e2df77d837747e3d92150dd5a39b"}, + {file = "asyncpg-0.26.0-cp310-cp310-win32.whl", hash = "sha256:838e4acd72da370ad07243898e886e93d3c0c9413f4444d600ba60a5cc206014"}, + {file = "asyncpg-0.26.0-cp310-cp310-win_amd64.whl", hash = "sha256:a254d09a3a989cc1839ba2c34448b879cdd017b528a0cda142c92fbb6c13d957"}, + {file = "asyncpg-0.26.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3ecbe8ed3af4c739addbfbd78f7752866cce2c4e9cc3f953556e4960349ae360"}, + {file = "asyncpg-0.26.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ce7d8c0ab4639bbf872439eba86ef62dd030b245ad0e17c8c675d93d7a6b2d"}, + {file = "asyncpg-0.26.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7129bd809990fd119e8b2b9982e80be7712bb6041cd082be3e415e60e5e2e98f"}, + {file = "asyncpg-0.26.0-cp36-cp36m-win32.whl", hash = "sha256:03f44926fa7ff7ccd59e98f05c7e227e9de15332a7da5bbcef3654bf468ee597"}, + {file = "asyncpg-0.26.0-cp36-cp36m-win_amd64.whl", hash = "sha256:b1f7b173af649b85126429e11a628d01a5b75973d2a55d64dba19ad8f0e9f904"}, + {file = "asyncpg-0.26.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efe056fd22fc6ed5c1ab353b6510808409566daac4e6f105e2043797f17b8dad"}, + {file = "asyncpg-0.26.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d96cf93e01df9fb03cef5f62346587805e6c0ca6f654c23b8d35315bdc69af59"}, + {file = "asyncpg-0.26.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:235205b60d4d014921f7b1cdca0e19669a9a8978f7606b3eb8237ca95f8e716e"}, + {file = "asyncpg-0.26.0-cp37-cp37m-win32.whl", hash = "sha256:0de408626cfc811ef04f372debfcdd5e4ab5aeb358f2ff14d1bdc246ed6272b5"}, + {file = "asyncpg-0.26.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f92d501bf213b16fabad4fbb0061398d2bceae30ddc228e7314c28dcc6641b79"}, + {file = "asyncpg-0.26.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9acb22a7b6bcca0d80982dce3d67f267d43e960544fb5dd934fd3abe20c48014"}, + {file = "asyncpg-0.26.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e550d8185f2c4725c1e8d3c555fe668b41bd092143012ddcc5343889e1c2a13d"}, + {file = "asyncpg-0.26.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:050e339694f8c5d9aebcf326ca26f6622ef23963a6a3a4f97aeefc743954afd5"}, + {file = "asyncpg-0.26.0-cp38-cp38-win32.whl", hash = "sha256:b0c3f39ebfac06848ba3f1e280cb1fada7cc1229538e3dad3146e8d1f9deb92a"}, + {file = "asyncpg-0.26.0-cp38-cp38-win_amd64.whl", hash = "sha256:49fc7220334cc31d14866a0b77a575d6a5945c0fa3bb67f17304e8b838e2a02b"}, + {file = "asyncpg-0.26.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d156e53b329e187e2dbfca8c28c999210045c45ef22a200b50de9b9e520c2694"}, + {file = "asyncpg-0.26.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b4051012ca75defa9a1dc6b78185ca58cdc3a247187eb76a6bcf55dfaa2fad4"}, + {file = "asyncpg-0.26.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6d60f15a0ac18c54a6ca6507c28599c06e2e87a0901e7b548f15243d71905b18"}, + {file = "asyncpg-0.26.0-cp39-cp39-win32.whl", hash = "sha256:ede1a3a2c377fe12a3930f4b4dd5340e8b32929541d5db027a21816852723438"}, + {file = "asyncpg-0.26.0-cp39-cp39-win_amd64.whl", hash = "sha256:8e1e79f0253cbd51fc43c4d0ce8804e46ee71f6c173fdc75606662ad18756b52"}, + {file = "asyncpg-0.26.0.tar.gz", hash = "sha256:77e684a24fee17ba3e487ca982d0259ed17bae1af68006f4cf284b23ba20ea2c"}, +] + +[package.extras] +dev = ["Cython (>=0.29.24,<0.30.0)", "Sphinx (>=4.1.2,<4.2.0)", "flake8 (>=3.9.2,<3.10.0)", "pycodestyle (>=2.7.0,<2.8.0)", "pytest (>=6.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "uvloop (>=0.15.3)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["flake8 (>=3.9.2,<3.10.0)", "pycodestyle (>=2.7.0,<2.8.0)", "uvloop (>=0.15.3)"] + +[[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] + +[[package]] +name = "attrs" +version = "21.4.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] + +[package.extras] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] +tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] + +[[package]] +name = "autoflake" +version = "1.4" +description = "Removes unused imports and unused variables" +optional = false +python-versions = "*" +files = [ + {file = "autoflake-1.4.tar.gz", hash = "sha256:61a353012cff6ab94ca062823d1fb2f692c4acda51c76ff83a8d77915fba51ea"}, +] + +[package.dependencies] +pyflakes = ">=1.1.0" + +[[package]] +name = "bandit" +version = "1.7.2" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.7" +files = [ + {file = "bandit-1.7.2-py3-none-any.whl", hash = "sha256:e20402cadfd126d85b68ed4c8862959663c8c372dbbb1fca8f8e2c9f55a067ec"}, + {file = "bandit-1.7.2.tar.gz", hash = "sha256:6d11adea0214a43813887bfe71a377b5a9955e4c826c8ffd341b494e3ab25260"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +GitPython = ">=1.0.1" +PyYAML = ">=5.3.1" +stevedore = ">=1.20.0" + +[package.extras] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] +toml = ["toml"] +yaml = ["PyYAML"] + +[[package]] +name = "bcrypt" +version = "3.2.0" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.6" +files = [ + {file = "bcrypt-3.2.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b589229207630484aefe5899122fb938a5b017b0f4349f769b8c13e78d99a8fd"}, + {file = "bcrypt-3.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c95d4cbebffafcdd28bd28bb4e25b31c50f6da605c81ffd9ad8a3d1b2ab7b1b6"}, + {file = "bcrypt-3.2.0-cp36-abi3-manylinux1_x86_64.whl", hash = "sha256:63d4e3ff96188e5898779b6057878fecf3f11cfe6ec3b313ea09955d587ec7a7"}, + {file = "bcrypt-3.2.0-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1"}, + {file = "bcrypt-3.2.0-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d"}, + {file = "bcrypt-3.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a0584a92329210fcd75eb8a3250c5a941633f8bfaf2a18f81009b097732839b7"}, + {file = "bcrypt-3.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:56e5da069a76470679f312a7d3d23deb3ac4519991a0361abc11da837087b61d"}, + {file = "bcrypt-3.2.0-cp36-abi3-win32.whl", hash = "sha256:a67fb841b35c28a59cebed05fbd3e80eea26e6d75851f0574a9273c80f3e9b55"}, + {file = "bcrypt-3.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:81fec756feff5b6818ea7ab031205e1d323d8943d237303baca2c5f9c7846f34"}, + {file = "bcrypt-3.2.0.tar.gz", hash = "sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29"}, +] + +[package.dependencies] +cffi = ">=1.1" +six = ">=1.4.1" + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "black" +version = "22.6.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.6.2" +files = [ + {file = "black-22.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69"}, + {file = "black-22.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807"}, + {file = "black-22.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e"}, + {file = "black-22.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def"}, + {file = "black-22.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666"}, + {file = "black-22.6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d"}, + {file = "black-22.6.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256"}, + {file = "black-22.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78"}, + {file = "black-22.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849"}, + {file = "black-22.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c"}, + {file = "black-22.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90"}, + {file = "black-22.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f"}, + {file = "black-22.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e"}, + {file = "black-22.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6"}, + {file = "black-22.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad"}, + {file = "black-22.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf"}, + {file = "black-22.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c"}, + {file = "black-22.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2"}, + {file = "black-22.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee"}, + {file = "black-22.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b"}, + {file = "black-22.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4"}, + {file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"}, + {file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2021.10.8" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = "*" +files = [ + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, +] + +[[package]] +name = "cffi" +version = "1.15.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, + {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, + {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, + {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, + {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, + {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, + {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, + {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, + {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, + {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, + {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, + {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, + {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, + {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, + {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, + {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, + {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "click" +version = "8.0.3" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.6" +files = [ + {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, + {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] + +[[package]] +name = "coverage" +version = "6.4.4" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"}, + {file = "coverage-6.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fde17bc42e0716c94bf19d92e4c9f5a00c5feb401f5bc01101fdf2a8b7cacf60"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdbb0d89923c80dbd435b9cf8bba0ff55585a3cdb28cbec65f376c041472c60d"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67f9346aeebea54e845d29b487eb38ec95f2ecf3558a3cffb26ee3f0dcc3e760"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c499c14efd858b98c4e03595bf914089b98400d30789511577aa44607a1b74"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c35cca192ba700979d20ac43024a82b9b32a60da2f983bec6c0f5b84aead635c"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9cc4f107009bca5a81caef2fca843dbec4215c05e917a59dec0c8db5cff1d2aa"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f444627b3664b80d078c05fe6a850dd711beeb90d26731f11d492dcbadb6973"}, + {file = "coverage-6.4.4-cp310-cp310-win32.whl", hash = "sha256:66e6df3ac4659a435677d8cd40e8eb1ac7219345d27c41145991ee9bf4b806a0"}, + {file = "coverage-6.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:35ef1f8d8a7a275aa7410d2f2c60fa6443f4a64fae9be671ec0696a68525b875"}, + {file = "coverage-6.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c1328d0c2f194ffda30a45f11058c02410e679456276bfa0bbe0b0ee87225fac"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61b993f3998ee384935ee423c3d40894e93277f12482f6e777642a0141f55782"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5dd4b8e9cd0deb60e6fcc7b0647cbc1da6c33b9e786f9c79721fd303994832f"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7026f5afe0d1a933685d8f2169d7c2d2e624f6255fb584ca99ccca8c0e966fd7"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9c7b9b498eb0c0d48b4c2abc0e10c2d78912203f972e0e63e3c9dc21f15abdaa"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ee2b2fb6eb4ace35805f434e0f6409444e1466a47f620d1d5763a22600f0f892"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab066f5ab67059d1f1000b5e1aa8bbd75b6ed1fc0014559aea41a9eb66fc2ce0"}, + {file = "coverage-6.4.4-cp311-cp311-win32.whl", hash = "sha256:9d6e1f3185cbfd3d91ac77ea065d85d5215d3dfa45b191d14ddfcd952fa53796"}, + {file = "coverage-6.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e3d3c4cc38b2882f9a15bafd30aec079582b819bec1b8afdbde8f7797008108a"}, + {file = "coverage-6.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a095aa0a996ea08b10580908e88fbaf81ecf798e923bbe64fb98d1807db3d68a"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef6f44409ab02e202b31a05dd6666797f9de2aa2b4b3534e9d450e42dea5e817"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b7101938584d67e6f45f0015b60e24a95bf8dea19836b1709a80342e01b472f"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a32ec68d721c3d714d9b105c7acf8e0f8a4f4734c811eda75ff3718570b5e3"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6a864733b22d3081749450466ac80698fe39c91cb6849b2ef8752fd7482011f3"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08002f9251f51afdcc5e3adf5d5d66bb490ae893d9e21359b085f0e03390a820"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a3b2752de32c455f2521a51bd3ffb53c5b3ae92736afde67ce83477f5c1dd928"}, + {file = "coverage-6.4.4-cp37-cp37m-win32.whl", hash = "sha256:f855b39e4f75abd0dfbcf74a82e84ae3fc260d523fcb3532786bcbbcb158322c"}, + {file = "coverage-6.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ee6ae6bbcac0786807295e9687169fba80cb0617852b2fa118a99667e8e6815d"}, + {file = "coverage-6.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:564cd0f5b5470094df06fab676c6d77547abfdcb09b6c29c8a97c41ad03b103c"}, + {file = "coverage-6.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cbbb0e4cd8ddcd5ef47641cfac97d8473ab6b132dd9a46bacb18872828031685"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6113e4df2fa73b80f77663445be6d567913fb3b82a86ceb64e44ae0e4b695de1"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d032bfc562a52318ae05047a6eb801ff31ccee172dc0d2504614e911d8fa83e"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e431e305a1f3126477abe9a184624a85308da8edf8486a863601d58419d26ffa"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cf2afe83a53f77aec067033199797832617890e15bed42f4a1a93ea24794ae3e"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:783bc7c4ee524039ca13b6d9b4186a67f8e63d91342c713e88c1865a38d0892a"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff934ced84054b9018665ca3967fc48e1ac99e811f6cc99ea65978e1d384454b"}, + {file = "coverage-6.4.4-cp38-cp38-win32.whl", hash = "sha256:e1fabd473566fce2cf18ea41171d92814e4ef1495e04471786cbc943b89a3781"}, + {file = "coverage-6.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:4179502f210ebed3ccfe2f78bf8e2d59e50b297b598b100d6c6e3341053066a2"}, + {file = "coverage-6.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c0b9e9b572893cdb0a00e66cf961a238f8d870d4e1dc8e679eb8bdc2eb1b86"}, + {file = "coverage-6.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc600f6ec19b273da1d85817eda339fb46ce9eef3e89f220055d8696e0a06908"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a98d6bf6d4ca5c07a600c7b4e0c5350cd483c85c736c522b786be90ea5bac4f"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01778769097dbd705a24e221f42be885c544bb91251747a8a3efdec6eb4788f2"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfa0b97eb904255e2ab24166071b27408f1f69c8fbda58e9c0972804851e0558"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fcbe3d9a53e013f8ab88734d7e517eb2cd06b7e689bedf22c0eb68db5e4a0a19"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:15e38d853ee224e92ccc9a851457fb1e1f12d7a5df5ae44544ce7863691c7a0d"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6913dddee2deff8ab2512639c5168c3e80b3ebb0f818fed22048ee46f735351a"}, + {file = "coverage-6.4.4-cp39-cp39-win32.whl", hash = "sha256:354df19fefd03b9a13132fa6643527ef7905712109d9c1c1903f2133d3a4e145"}, + {file = "coverage-6.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:1238b08f3576201ebf41f7c20bf59baa0d05da941b123c6656e42cdb668e9827"}, + {file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"}, + {file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "darglint" +version = "1.8.1" +description = "A utility for ensuring Google-style docstrings stay up to date with the source code." +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d"}, + {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"}, +] + +[[package]] +name = "databases" +version = "0.6.1" +description = "Async database support for Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "databases-0.6.1-py3-none-any.whl", hash = "sha256:47fae85d82d8227049f08b154019913c3ad2f6057ceb0b5ebb36703be6f5666b"}, + {file = "databases-0.6.1.tar.gz", hash = "sha256:0a69c6983a27e10a5b75ffa094486f1febadd9d5a8db016e69b8c2f6a354dc30"}, +] + +[package.dependencies] +sqlalchemy = ">=1.4,<1.5" + +[package.extras] +aiomysql = ["aiomysql"] +aiopg = ["aiopg"] +aiosqlite = ["aiosqlite"] +asyncmy = ["asyncmy"] +asyncpg = ["asyncpg"] +mysql = ["aiomysql"] +postgresql = ["asyncpg"] +sqlite = ["aiosqlite"] + +[[package]] +name = "dnspython" +version = "2.2.0" +description = "DNS toolkit" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "dnspython-2.2.0-py3-none-any.whl", hash = "sha256:081649da27ced5e75709a1ee542136eaba9842a0fe4c03da4fb0a3d3ed1f3c44"}, + {file = "dnspython-2.2.0.tar.gz", hash = "sha256:e79351e032d0b606b98d38a4b0e6e2275b31a5b85c873e587cc11b73aca026d6"}, +] + +[package.extras] +curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] +dnssec = ["cryptography (>=2.6,<37.0)"] +doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] +idna = ["idna (>=2.1,<4.0)"] +trio = ["trio (>=0.14,<0.20)"] +wmi = ["wmi (>=1.5.1,<2.0.0)"] + +[[package]] +name = "docutils" +version = "0.18.1" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"}, + {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"}, +] + +[[package]] +name = "email-validator" +version = "1.1.3" +description = "A robust email syntax and deliverability validation library for Python 2.x/3.x." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "email_validator-1.1.3-py2.py3-none-any.whl", hash = "sha256:5675c8ceb7106a37e40e2698a57c056756bf3f272cfa8682a4f87ebd95d8440b"}, + {file = "email_validator-1.1.3.tar.gz", hash = "sha256:aa237a65f6f4da067119b7df3f13e89c25c051327b2b5b66dc075f33d62480d7"}, +] + +[package.dependencies] +dnspython = ">=1.15.0" +idna = ">=2.0.0" + +[[package]] +name = "eradicate" +version = "2.0.0" +description = "Removes commented-out code." +optional = false +python-versions = "*" +files = [ + {file = "eradicate-2.0.0.tar.gz", hash = "sha256:27434596f2c5314cc9b31410c93d8f7e8885747399773cd088d3adea647a60c8"}, +] + +[[package]] +name = "execnet" +version = "1.9.0" +description = "execnet: rapid multi-Python deployment" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, + {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, +] + +[package.extras] +testing = ["pre-commit"] + +[[package]] +name = "fastapi" +version = "0.79.1" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "fastapi-0.79.1-py3-none-any.whl", hash = "sha256:3c584179c64e265749e88221c860520fc512ea37e253282dab378cc503dfd7fd"}, + {file = "fastapi-0.79.1.tar.gz", hash = "sha256:006862dec0f0f5683ac21fb0864af2ff12a931e7ba18920f28cc8eceed51896b"}, +] + +[package.dependencies] +pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" +starlette = "0.19.1" + +[package.extras] +all = ["email_validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] +dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] +doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer (>=0.4.1,<0.5.0)"] +test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.3.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "email_validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "orjson (>=3.2.1,<4.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-dataclasses (==0.6.5)", "types-orjson (==3.6.2)", "types-ujson (==4.2.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] + +[[package]] +name = "flake8" +version = "3.9.2" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, + {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, +] + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.7.0,<2.8.0" +pyflakes = ">=2.3.0,<2.4.0" + +[[package]] +name = "flake8-bandit" +version = "2.1.2" +description = "Automated security testing with bandit and flake8." +optional = false +python-versions = "*" +files = [ + {file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"}, +] + +[package.dependencies] +bandit = "*" +flake8 = "*" +flake8-polyfill = "*" +pycodestyle = "*" + +[[package]] +name = "flake8-broken-line" +version = "0.3.0" +description = "Flake8 plugin to forbid backslashes for line breaks" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "flake8-broken-line-0.3.0.tar.gz", hash = "sha256:f74e052833324a9e5f0055032f7ccc54b23faabafe5a26241c2f977e70b10b50"}, + {file = "flake8_broken_line-0.3.0-py3-none-any.whl", hash = "sha256:611f79c7f27118e7e5d3dc098ef7681c40aeadf23783700c5dbee840d2baf3af"}, +] + +[package.dependencies] +flake8 = ">=3.5,<4.0" + +[[package]] +name = "flake8-bugbear" +version = "21.11.29" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "flake8-bugbear-21.11.29.tar.gz", hash = "sha256:8b04cb2fafc6a78e1a9d873bd3988e4282f7959bb6b0d7c1ae648ec09b937a7b"}, + {file = "flake8_bugbear-21.11.29-py36.py37.py38-none-any.whl", hash = "sha256:179e41ddae5de5e3c20d1f61736feeb234e70958fbb56ab3c28a67739c8e9a82"}, +] + +[package.dependencies] +attrs = ">=19.2.0" +flake8 = ">=3.0.0" + +[package.extras] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"] + +[[package]] +name = "flake8-commas" +version = "2.1.0" +description = "Flake8 lint for trailing commas." +optional = false +python-versions = "*" +files = [ + {file = "flake8-commas-2.1.0.tar.gz", hash = "sha256:940441ab8ee544df564ae3b3f49f20462d75d5c7cac2463e0b27436e2050f263"}, + {file = "flake8_commas-2.1.0-py2.py3-none-any.whl", hash = "sha256:ebb96c31e01d0ef1d0685a21f3f0e2f8153a0381430e748bf0bbbb5d5b453d54"}, +] + +[package.dependencies] +flake8 = ">=2" + +[[package]] +name = "flake8-comprehensions" +version = "3.8.0" +description = "A flake8 plugin to help you write better list/set/dict comprehensions." +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8-comprehensions-3.8.0.tar.gz", hash = "sha256:8e108707637b1d13734f38e03435984f6b7854fa6b5a4e34f93e69534be8e521"}, + {file = "flake8_comprehensions-3.8.0-py3-none-any.whl", hash = "sha256:9406314803abe1193c064544ab14fdc43c58424c0882f6ff8a581eb73fc9bb58"}, +] + +[package.dependencies] +flake8 = ">=3.0,<3.2.0 || >3.2.0" + +[[package]] +name = "flake8-debugger" +version = "4.0.0" +description = "ipdb/pdb statement checker plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "flake8-debugger-4.0.0.tar.gz", hash = "sha256:e43dc777f7db1481db473210101ec2df2bd39a45b149d7218a618e954177eda6"}, + {file = "flake8_debugger-4.0.0-py3-none-any.whl", hash = "sha256:82e64faa72e18d1bdd0000407502ebb8ecffa7bc027c62b9d4110ce27c091032"}, +] + +[package.dependencies] +flake8 = ">=3.0" +pycodestyle = "*" +six = "*" + +[[package]] +name = "flake8-docstrings" +version = "1.6.0" +description = "Extension for flake8 which uses pydocstyle to check docstrings" +optional = false +python-versions = "*" +files = [ + {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, + {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, +] + +[package.dependencies] +flake8 = ">=3" +pydocstyle = ">=2.1" + +[[package]] +name = "flake8-eradicate" +version = "1.2.0" +description = "Flake8 plugin to find commented out code" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "flake8-eradicate-1.2.0.tar.gz", hash = "sha256:acaa1b6839ff00d284b805c432fdfa6047262bd15a5504ec945797e87b4de1fa"}, + {file = "flake8_eradicate-1.2.0-py3-none-any.whl", hash = "sha256:51dc660d0c1c1ed93af0f813540bbbf72ab2d3466c14e3f3bac371c618b6042f"}, +] + +[package.dependencies] +attrs = "*" +eradicate = ">=2.0,<3.0" +flake8 = ">=3.5,<5" + +[[package]] +name = "flake8-fixme" +version = "1.1.1" +description = "Check for FIXME, TODO and other temporary developer notes. Plugin for flake8." +optional = false +python-versions = "*" +files = [ + {file = "flake8-fixme-1.1.1.tar.gz", hash = "sha256:50cade07d27a4c30d4f12351478df87339e67640c83041b664724bda6d16f33a"}, + {file = "flake8_fixme-1.1.1-py2.py3-none-any.whl", hash = "sha256:226a6f2ef916730899f29ac140bed5d4a17e5aba79f00a0e3ae1eff1997cb1ac"}, +] + +[[package]] +name = "flake8-isort" +version = "4.1.1" +description = "flake8 plugin that integrates isort ." +optional = false +python-versions = "*" +files = [ + {file = "flake8-isort-4.1.1.tar.gz", hash = "sha256:d814304ab70e6e58859bc5c3e221e2e6e71c958e7005239202fee19c24f82717"}, + {file = "flake8_isort-4.1.1-py3-none-any.whl", hash = "sha256:c4e8b6dcb7be9b71a02e6e5d4196cefcef0f3447be51e82730fb336fff164949"}, +] + +[package.dependencies] +flake8 = ">=3.2.1,<5" +isort = ">=4.3.5,<6" +testfixtures = ">=6.8.0,<7" + +[package.extras] +test = ["pytest-cov"] + +[[package]] +name = "flake8-polyfill" +version = "1.0.2" +description = "Polyfill package for Flake8 plugins" +optional = false +python-versions = "*" +files = [ + {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, + {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, +] + +[package.dependencies] +flake8 = "*" + +[[package]] +name = "flake8-quotes" +version = "3.3.1" +description = "Flake8 lint for quotes." +optional = false +python-versions = "*" +files = [ + {file = "flake8-quotes-3.3.1.tar.gz", hash = "sha256:633adca6fb8a08131536af0d750b44d6985b9aba46f498871e21588c3e6f525a"}, +] + +[package.dependencies] +flake8 = "*" + +[[package]] +name = "flake8-rst-docstrings" +version = "0.2.5" +description = "Python docstring reStructuredText (RST) validator" +optional = false +python-versions = ">=3.6" +files = [ + {file = "flake8-rst-docstrings-0.2.5.tar.gz", hash = "sha256:4fe93f997dea45d9d3c8bd220f12f0b6c359948fb943b5b48021a3f927edd816"}, + {file = "flake8_rst_docstrings-0.2.5-py3-none-any.whl", hash = "sha256:b99d9041b769b857efe45a448dc8c71b1bb311f9cacbdac5de82f96498105082"}, +] + +[package.dependencies] +flake8 = ">=3.0.0" +pygments = "*" +restructuredtext-lint = "*" + +[[package]] +name = "flake8-string-format" +version = "0.3.0" +description = "string format checker, plugin for flake8" +optional = false +python-versions = "*" +files = [ + {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, + {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, +] + +[package.dependencies] +flake8 = "*" + +[[package]] +name = "gitdb" +version = "4.0.9" +description = "Git Object Database" +optional = false +python-versions = ">=3.6" +files = [ + {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, + {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.26" +description = "GitPython is a python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.26-py3-none-any.whl", hash = "sha256:26ac35c212d1f7b16036361ca5cff3ec66e11753a0d677fb6c48fa4e1a9dd8d6"}, + {file = "GitPython-3.1.26.tar.gz", hash = "sha256:fc8868f63a2e6d268fb25f481995ba185a85a66fcad126f039323ff6635669ee"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[[package]] +name = "greenlet" +version = "1.1.2" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "greenlet-1.1.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6"}, + {file = "greenlet-1.1.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a"}, + {file = "greenlet-1.1.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d"}, + {file = "greenlet-1.1.2-cp27-cp27m-win32.whl", hash = "sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713"}, + {file = "greenlet-1.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40"}, + {file = "greenlet-1.1.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d"}, + {file = "greenlet-1.1.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8"}, + {file = "greenlet-1.1.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d"}, + {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497"}, + {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1"}, + {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58"}, + {file = "greenlet-1.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965"}, + {file = "greenlet-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708"}, + {file = "greenlet-1.1.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23"}, + {file = "greenlet-1.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee"}, + {file = "greenlet-1.1.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c"}, + {file = "greenlet-1.1.2-cp35-cp35m-win32.whl", hash = "sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963"}, + {file = "greenlet-1.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e"}, + {file = "greenlet-1.1.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168"}, + {file = "greenlet-1.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f"}, + {file = "greenlet-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa"}, + {file = "greenlet-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d"}, + {file = "greenlet-1.1.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5"}, + {file = "greenlet-1.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe"}, + {file = "greenlet-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc"}, + {file = "greenlet-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06"}, + {file = "greenlet-1.1.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b"}, + {file = "greenlet-1.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2"}, + {file = "greenlet-1.1.2-cp38-cp38-win32.whl", hash = "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd"}, + {file = "greenlet-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3"}, + {file = "greenlet-1.1.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3"}, + {file = "greenlet-1.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3"}, + {file = "greenlet-1.1.2-cp39-cp39-win32.whl", hash = "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf"}, + {file = "greenlet-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd"}, + {file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"}, +] + +[package.extras] +docs = ["Sphinx"] + +[[package]] +name = "h11" +version = "0.12.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.6" +files = [ + {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, + {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, +] + +[[package]] +name = "httpcore" +version = "0.15.0" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"}, + {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"}, +] + +[package.dependencies] +anyio = "==3.*" +certifi = "*" +h11 = ">=0.11,<0.13" +sniffio = "==1.*" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "httpx" +version = "0.23.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"}, + {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"}, +] + +[package.dependencies] +certifi = "*" +httpcore = ">=0.15.0,<0.16.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<13)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +optional = false +python-versions = "*" +files = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] + +[[package]] +name = "isort" +version = "5.10.1" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.6.1,<4.0" +files = [ + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile-deprecated-finder = ["pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "loguru" +version = "0.6.0" +description = "Python logging made (stupidly) simple" +optional = false +python-versions = ">=3.5" +files = [ + {file = "loguru-0.6.0-py3-none-any.whl", hash = "sha256:4e2414d534a2ab57573365b3e6d0234dfb1d84b68b7f3b948e6fb743860a77c3"}, + {file = "loguru-0.6.0.tar.gz", hash = "sha256:066bd06758d0a513e9836fd9c6b5a75bfb3fd36841f4b996bc60b547a309d41c"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["Sphinx (>=4.1.1)", "black (>=19.10b0)", "colorama (>=0.3.4)", "docutils (==0.16)", "flake8 (>=3.7.7)", "isort (>=5.1.1)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "tox (>=3.9.0)"] + +[[package]] +name = "mako" +version = "1.1.6" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Mako-1.1.6-py2.py3-none-any.whl", hash = "sha256:afaf8e515d075b22fad7d7b8b30e4a1c90624ff2f3733a06ec125f5a5f043a57"}, + {file = "Mako-1.1.6.tar.gz", hash = "sha256:4e9e345a41924a954251b95b4b28e14a301145b544901332e658907a7464b6b2"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] + +[[package]] +name = "markupsafe" +version = "2.0.1" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.6" +files = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, +] + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = "*" +files = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] + +[[package]] +name = "mypy" +version = "0.971" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mypy-0.971-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2899a3cbd394da157194f913a931edfd4be5f274a88041c9dc2d9cdcb1c315c"}, + {file = "mypy-0.971-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98e02d56ebe93981c41211c05adb630d1d26c14195d04d95e49cd97dbc046dc5"}, + {file = "mypy-0.971-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:19830b7dba7d5356d3e26e2427a2ec91c994cd92d983142cbd025ebe81d69cf3"}, + {file = "mypy-0.971-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02ef476f6dcb86e6f502ae39a16b93285fef97e7f1ff22932b657d1ef1f28655"}, + {file = "mypy-0.971-cp310-cp310-win_amd64.whl", hash = "sha256:25c5750ba5609a0c7550b73a33deb314ecfb559c350bb050b655505e8aed4103"}, + {file = "mypy-0.971-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d3348e7eb2eea2472db611486846742d5d52d1290576de99d59edeb7cd4a42ca"}, + {file = "mypy-0.971-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3fa7a477b9900be9b7dd4bab30a12759e5abe9586574ceb944bc29cddf8f0417"}, + {file = "mypy-0.971-cp36-cp36m-win_amd64.whl", hash = "sha256:2ad53cf9c3adc43cf3bea0a7d01a2f2e86db9fe7596dfecb4496a5dda63cbb09"}, + {file = "mypy-0.971-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:855048b6feb6dfe09d3353466004490b1872887150c5bb5caad7838b57328cc8"}, + {file = "mypy-0.971-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:23488a14a83bca6e54402c2e6435467a4138785df93ec85aeff64c6170077fb0"}, + {file = "mypy-0.971-cp37-cp37m-win_amd64.whl", hash = "sha256:4b21e5b1a70dfb972490035128f305c39bc4bc253f34e96a4adf9127cf943eb2"}, + {file = "mypy-0.971-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9796a2ba7b4b538649caa5cecd398d873f4022ed2333ffde58eaf604c4d2cb27"}, + {file = "mypy-0.971-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a361d92635ad4ada1b1b2d3630fc2f53f2127d51cf2def9db83cba32e47c856"}, + {file = "mypy-0.971-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b793b899f7cf563b1e7044a5c97361196b938e92f0a4343a5d27966a53d2ec71"}, + {file = "mypy-0.971-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d1ea5d12c8e2d266b5fb8c7a5d2e9c0219fedfeb493b7ed60cd350322384ac27"}, + {file = "mypy-0.971-cp38-cp38-win_amd64.whl", hash = "sha256:23c7ff43fff4b0df93a186581885c8512bc50fc4d4910e0f838e35d6bb6b5e58"}, + {file = "mypy-0.971-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f7656b69974a6933e987ee8ffb951d836272d6c0f81d727f1d0e2696074d9e6"}, + {file = "mypy-0.971-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2022bfadb7a5c2ef410d6a7c9763188afdb7f3533f22a0a32be10d571ee4bbe"}, + {file = "mypy-0.971-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef943c72a786b0f8d90fd76e9b39ce81fb7171172daf84bf43eaf937e9f220a9"}, + {file = "mypy-0.971-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d744f72eb39f69312bc6c2abf8ff6656973120e2eb3f3ec4f758ed47e414a4bf"}, + {file = "mypy-0.971-cp39-cp39-win_amd64.whl", hash = "sha256:77a514ea15d3007d33a9e2157b0ba9c267496acf12a7f2b9b9f8446337aac5b0"}, + {file = "mypy-0.971-py3-none-any.whl", hash = "sha256:0d054ef16b071149917085f51f89555a576e2618d5d9dd70bd6eea6410af3ac9"}, + {file = "mypy-0.971.tar.gz", hash = "sha256:40b0f21484238269ae6a57200c807d80debc6459d444c0489a102d7c6a75fa56"}, +] + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +optional = false +python-versions = "*" +files = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.6" +files = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "passlib" +version = "1.7.4" +description = "comprehensive password hashing framework supporting over 30 schemes" +optional = false +python-versions = "*" +files = [ + {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"}, + {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"}, +] + +[package.dependencies] +bcrypt = {version = ">=3.1.0", optional = true, markers = "extra == \"bcrypt\""} + +[package.extras] +argon2 = ["argon2-cffi (>=18.2.0)"] +bcrypt = ["bcrypt (>=3.1.0)"] +build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"] +totp = ["cryptography"] + +[[package]] +name = "pathspec" +version = "0.9.0" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, +] + +[[package]] +name = "pbr" +version = "5.8.1" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-5.8.1-py2.py3-none-any.whl", hash = "sha256:27108648368782d07bbf1cb468ad2e2eeef29086affd14087a6d04b7de8af4ec"}, + {file = "pbr-5.8.1.tar.gz", hash = "sha256:66bc5a34912f408bb3925bf21231cb6f59206267b7f63f3503ef865c1a292e25"}, +] + +[[package]] +name = "pep8-naming" +version = "0.11.1" +description = "Check PEP-8 naming conventions, plugin for flake8" +optional = false +python-versions = "*" +files = [ + {file = "pep8-naming-0.11.1.tar.gz", hash = "sha256:a1dd47dd243adfe8a83616e27cf03164960b507530f155db94e10b36a6cd6724"}, + {file = "pep8_naming-0.11.1-py2.py3-none-any.whl", hash = "sha256:f43bfe3eea7e0d73e8b5d07d6407ab47f2476ccaeff6937c84275cd30b016738"}, +] + +[package.dependencies] +flake8-polyfill = ">=1.0.2,<2" + +[[package]] +name = "platformdirs" +version = "2.4.1" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-2.4.1-py3-none-any.whl", hash = "sha256:1d7385c7db91728b83efd0ca99a5afb296cab9d0ed8313a45ed8ba17967ecfca"}, + {file = "platformdirs-2.4.1.tar.gz", hash = "sha256:440633ddfebcc36264232365d7840a970e75e1018d15b4327d11f91909045fda"}, +] + +[package.extras] +docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "psycopg2-binary" +version = "2.9.3" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.6" +files = [ + {file = "psycopg2-binary-2.9.3.tar.gz", hash = "sha256:761df5313dc15da1502b21453642d7599d26be88bff659382f8f9747c7ebea4e"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:539b28661b71da7c0e428692438efbcd048ca21ea81af618d845e06ebfd29478"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f2534ab7dc7e776a263b463a16e189eb30e85ec9bbe1bff9e78dae802608932"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e82d38390a03da28c7985b394ec3f56873174e2c88130e6966cb1c946508e65"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57804fc02ca3ce0dbfbef35c4b3a4a774da66d66ea20f4bda601294ad2ea6092"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:083a55275f09a62b8ca4902dd11f4b33075b743cf0d360419e2051a8a5d5ff76"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:0a29729145aaaf1ad8bafe663131890e2111f13416b60e460dae0a96af5905c9"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a79d622f5206d695d7824cbf609a4f5b88ea6d6dab5f7c147fc6d333a8787e4"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:090f3348c0ab2cceb6dfbe6bf721ef61262ddf518cd6cc6ecc7d334996d64efa"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a9e1f75f96ea388fbcef36c70640c4efbe4650658f3d6a2967b4cc70e907352e"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c3ae8e75eb7160851e59adc77b3a19a976e50622e44fd4fd47b8b18208189d42"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-win32.whl", hash = "sha256:7b1e9b80afca7b7a386ef087db614faebbf8839b7f4db5eb107d0f1a53225029"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:8b344adbb9a862de0c635f4f0425b7958bf5a4b927c8594e6e8d261775796d53"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:e847774f8ffd5b398a75bc1c18fbb56564cda3d629fe68fd81971fece2d3c67e"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68641a34023d306be959101b345732360fc2ea4938982309b786f7be1b43a4a1"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3303f8807f342641851578ee7ed1f3efc9802d00a6f83c101d21c608cb864460"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:e3699852e22aa68c10de06524a3721ade969abf382da95884e6a10ff798f9281"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:526ea0378246d9b080148f2d6681229f4b5964543c170dd10bf4faaab6e0d27f"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b1c8068513f5b158cf7e29c43a77eb34b407db29aca749d3eb9293ee0d3103ca"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:15803fa813ea05bef089fa78835118b5434204f3a17cb9f1e5dbfd0b9deea5af"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:152f09f57417b831418304c7f30d727dc83a12761627bb826951692cc6491e57"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:404224e5fef3b193f892abdbf8961ce20e0b6642886cfe1fe1923f41aaa75c9d"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-win32.whl", hash = "sha256:1f6b813106a3abdf7b03640d36e24669234120c72e91d5cbaeb87c5f7c36c65b"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:2d872e3c9d5d075a2e104540965a1cf898b52274a5923936e5bfddb58c59c7c2"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:10bb90fb4d523a2aa67773d4ff2b833ec00857f5912bafcfd5f5414e45280fb1"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a52ecab70af13e899f7847b3e074eeb16ebac5615665db33bce8a1009cf33"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a29b3ca4ec9defec6d42bf5feb36bb5817ba3c0230dd83b4edf4bf02684cd0ae"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:12b11322ea00ad8db8c46f18b7dfc47ae215e4df55b46c67a94b4effbaec7094"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:53293533fcbb94c202b7c800a12c873cfe24599656b341f56e71dd2b557be063"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c381bda330ddf2fccbafab789d83ebc6c53db126e4383e73794c74eedce855ef"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d29409b625a143649d03d0fd7b57e4b92e0ecad9726ba682244b73be91d2fdb"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:183a517a3a63503f70f808b58bfbf962f23d73b6dccddae5aa56152ef2bcb232"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:15c4e4cfa45f5a60599d9cec5f46cd7b1b29d86a6390ec23e8eebaae84e64554"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-win32.whl", hash = "sha256:adf20d9a67e0b6393eac162eb81fb10bc9130a80540f4df7e7355c2dd4af9fba"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2f9ffd643bc7349eeb664eba8864d9e01f057880f510e4681ba40a6532f93c71"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:def68d7c21984b0f8218e8a15d514f714d96904265164f75f8d3a70f9c295667"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e6aa71ae45f952a2205377773e76f4e3f27951df38e69a4c95440c779e013560"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dffc08ca91c9ac09008870c9eb77b00a46b3378719584059c034b8945e26b272"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:280b0bb5cbfe8039205c7981cceb006156a675362a00fe29b16fbc264e242834"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:af9813db73395fb1fc211bac696faea4ca9ef53f32dc0cfa27e4e7cf766dcf24"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:63638d875be8c2784cfc952c9ac34e2b50e43f9f0a0660b65e2a87d656b3116c"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ffb7a888a047696e7f8240d649b43fb3644f14f0ee229077e7f6b9f9081635bd"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c9d5450c566c80c396b7402895c4369a410cab5a82707b11aee1e624da7d004"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:d1c1b569ecafe3a69380a94e6ae09a4789bbb23666f3d3a08d06bbd2451f5ef1"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8fc53f9af09426a61db9ba357865c77f26076d48669f2e1bb24d85a22fb52307"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-win32.whl", hash = "sha256:6472a178e291b59e7f16ab49ec8b4f3bdada0a879c68d3817ff0963e722a82ce"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:35168209c9d51b145e459e05c31a9eaeffa9a6b0fd61689b48e07464ffd1a83e"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:47133f3f872faf28c1e87d4357220e809dfd3fa7c64295a4a148bcd1e6e34ec9"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b3a24a1982ae56461cc24f6680604fffa2c1b818e9dc55680da038792e004d18"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91920527dea30175cc02a1099f331aa8c1ba39bf8b7762b7b56cbf54bc5cce42"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887dd9aac71765ac0d0bac1d0d4b4f2c99d5f5c1382d8b770404f0f3d0ce8a39"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:1f14c8b0942714eb3c74e1e71700cbbcb415acbc311c730370e70c578a44a25c"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:7af0dd86ddb2f8af5da57a976d27cd2cd15510518d582b478fbb2292428710b4"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93cd1967a18aa0edd4b95b1dfd554cf15af657cb606280996d393dadc88c3c35"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bda845b664bb6c91446ca9609fc69f7db6c334ec5e4adc87571c34e4f47b7ddb"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:01310cf4cf26db9aea5158c217caa92d291f0500051a6469ac52166e1a16f5b7"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99485cab9ba0fa9b84f1f9e1fef106f44a46ef6afdeec8885e0b88d0772b49e8"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-win32.whl", hash = "sha256:46f0e0a6b5fa5851bbd9ab1bc805eef362d3a230fbdfbc209f4a236d0a7a990d"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:accfe7e982411da3178ec690baaceaad3c278652998b2c45828aaac66cd8285f"}, +] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pycodestyle" +version = "2.7.0" +description = "Python style guide checker" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, + {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pydantic" +version = "1.9.2" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "pydantic-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c9e04a6cdb7a363d7cb3ccf0efea51e0abb48e180c0d31dca8d247967d85c6e"}, + {file = "pydantic-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fafe841be1103f340a24977f61dee76172e4ae5f647ab9e7fd1e1fca51524f08"}, + {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afacf6d2a41ed91fc631bade88b1d319c51ab5418870802cedb590b709c5ae3c"}, + {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ee0d69b2a5b341fc7927e92cae7ddcfd95e624dfc4870b32a85568bd65e6131"}, + {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ff68fc85355532ea77559ede81f35fff79a6a5543477e168ab3a381887caea76"}, + {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c0f5e142ef8217019e3eef6ae1b6b55f09a7a15972958d44fbd228214cede567"}, + {file = "pydantic-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:615661bfc37e82ac677543704437ff737418e4ea04bef9cf11c6d27346606044"}, + {file = "pydantic-1.9.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:328558c9f2eed77bd8fffad3cef39dbbe3edc7044517f4625a769d45d4cf7555"}, + {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bd446bdb7755c3a94e56d7bdfd3ee92396070efa8ef3a34fab9579fe6aa1d84"}, + {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0b214e57623a535936005797567231a12d0da0c29711eb3514bc2b3cd008d0f"}, + {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d8ce3fb0841763a89322ea0432f1f59a2d3feae07a63ea2c958b2315e1ae8adb"}, + {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b34ba24f3e2d0b39b43f0ca62008f7ba962cff51efa56e64ee25c4af6eed987b"}, + {file = "pydantic-1.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:84d76ecc908d917f4684b354a39fd885d69dd0491be175f3465fe4b59811c001"}, + {file = "pydantic-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4de71c718c9756d679420c69f216776c2e977459f77e8f679a4a961dc7304a56"}, + {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5803ad846cdd1ed0d97eb00292b870c29c1f03732a010e66908ff48a762f20e4"}, + {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8c5360a0297a713b4123608a7909e6869e1b56d0e96eb0d792c27585d40757f"}, + {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:cdb4272678db803ddf94caa4f94f8672e9a46bae4a44f167095e4d06fec12979"}, + {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19b5686387ea0d1ea52ecc4cffb71abb21702c5e5b2ac626fd4dbaa0834aa49d"}, + {file = "pydantic-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:32e0b4fb13ad4db4058a7c3c80e2569adbd810c25e6ca3bbd8b2a9cc2cc871d7"}, + {file = "pydantic-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91089b2e281713f3893cd01d8e576771cd5bfdfbff5d0ed95969f47ef6d676c3"}, + {file = "pydantic-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e631c70c9280e3129f071635b81207cad85e6c08e253539467e4ead0e5b219aa"}, + {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b3946f87e5cef3ba2e7bd3a4eb5a20385fe36521d6cc1ebf3c08a6697c6cfb3"}, + {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5565a49effe38d51882cb7bac18bda013cdb34d80ac336428e8908f0b72499b0"}, + {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd67cb2c2d9602ad159389c29e4ca964b86fa2f35c2faef54c3eb28b4efd36c8"}, + {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4aafd4e55e8ad5bd1b19572ea2df546ccace7945853832bb99422a79c70ce9b8"}, + {file = "pydantic-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:d70916235d478404a3fa8c997b003b5f33aeac4686ac1baa767234a0f8ac2326"}, + {file = "pydantic-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ca86b525264daa5f6b192f216a0d1e860b7383e3da1c65a1908f9c02f42801"}, + {file = "pydantic-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1061c6ee6204f4f5a27133126854948e3b3d51fcc16ead2e5d04378c199b2f44"}, + {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e78578f0c7481c850d1c969aca9a65405887003484d24f6110458fb02cca7747"}, + {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5da164119602212a3fe7e3bc08911a89db4710ae51444b4224c2382fd09ad453"}, + {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ead3cd020d526f75b4188e0a8d71c0dbbe1b4b6b5dc0ea775a93aca16256aeb"}, + {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7d0f183b305629765910eaad707800d2f47c6ac5bcfb8c6397abdc30b69eeb15"}, + {file = "pydantic-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1a68f4f65a9ee64b6ccccb5bf7e17db07caebd2730109cb8a95863cfa9c4e55"}, + {file = "pydantic-1.9.2-py3-none-any.whl", hash = "sha256:78a4d6bdfd116a559aeec9a4cfe77dda62acc6233f8b56a716edad2651023e5e"}, + {file = "pydantic-1.9.2.tar.gz", hash = "sha256:8cb0bc509bfb71305d7a59d00163d5f9fc4530f0881ea32c74ff4f74c85f3d3d"}, +] + +[package.dependencies] +email-validator = {version = ">=1.0.3", optional = true, markers = "extra == \"email\""} +python-dotenv = {version = ">=0.10.4", optional = true, markers = "extra == \"dotenv\""} +typing-extensions = ">=3.7.4.3" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pydocstyle" +version = "6.1.1" +description = "Python docstring style checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, + {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, +] + +[package.dependencies] +snowballstemmer = "*" + +[package.extras] +toml = ["toml"] + +[[package]] +name = "pyflakes" +version = "2.3.1" +description = "passive checker of Python programs" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, +] + +[[package]] +name = "pygments" +version = "2.11.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.5" +files = [ + {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, + {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, +] + +[[package]] +name = "pyjwt" +version = "2.4.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"}, + {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"}, +] + +[package.extras] +crypto = ["cryptography (>=3.3.1)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.3.1)", "mypy", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pyparsing" +version = "3.0.7" +description = "Python parsing module" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, + {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pypika" +version = "0.48.9" +description = "A SQL query builder API for Python" +optional = false +python-versions = "*" +files = [ + {file = "PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378"}, +] + +[[package]] +name = "pytest" +version = "7.1.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, + {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +tomli = ">=1.0.0" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.19.0" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.19.0.tar.gz", hash = "sha256:ac4ebf3b6207259750bc32f4c1d8fcd7e79739edbc67ad0c58dd150b1d072fed"}, + {file = "pytest_asyncio-0.19.0-py3-none-any.whl", hash = "sha256:7a97e37cfe1ed296e2e84941384bdd37c376453912d397ed39293e0916f521fa"}, +] + +[package.dependencies] +pytest = ">=6.1.0" + +[package.extras] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "pytest-cov" +version = "3.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, + {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-env" +version = "0.6.2" +description = "py.test plugin that allows you to add environment variables." +optional = false +python-versions = "*" +files = [ + {file = "pytest-env-0.6.2.tar.gz", hash = "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2"}, +] + +[package.dependencies] +pytest = ">=2.6.0" + +[[package]] +name = "pytest-forked" +version = "1.4.0" +description = "run tests in isolated forked subprocesses" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, + {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, +] + +[package.dependencies] +py = "*" +pytest = ">=3.10" + +[[package]] +name = "pytest-xdist" +version = "2.5.0" +description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, + {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, +] + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=6.2.0" +pytest-forked = "*" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + +[[package]] +name = "python-dotenv" +version = "0.19.2" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.5" +files = [ + {file = "python-dotenv-0.19.2.tar.gz", hash = "sha256:a5de49a31e953b45ff2d2fd434bbc2670e8db5273606c1e737cc6b93eff3655f"}, + {file = "python_dotenv-0.19.2-py2.py3-none-any.whl", hash = "sha256:32b2bdc1873fd3a3c346da1c6db83d0053c3c62f28f1f38516070c4c8971b1d3"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-multipart" +version = "0.0.20" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104"}, + {file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"}, +] + +[[package]] +name = "python-slugify" +version = "6.1.2" +description = "A Python slugify application that also handles Unicode" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "python-slugify-6.1.2.tar.gz", hash = "sha256:272d106cb31ab99b3496ba085e3fea0e9e76dcde967b5e9992500d1f785ce4e1"}, + {file = "python_slugify-6.1.2-py2.py3-none-any.whl", hash = "sha256:7b2c274c308b62f4269a9ba701aa69a797e9bca41aeee5b3a9e79e36b6656927"}, +] + +[package.dependencies] +text-unidecode = ">=1.3" + +[package.extras] +unidecode = ["Unidecode (>=1.1.1)"] + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] + +[[package]] +name = "restructuredtext-lint" +version = "1.3.2" +description = "reStructuredText linter" +optional = false +python-versions = "*" +files = [ + {file = "restructuredtext_lint-1.3.2.tar.gz", hash = "sha256:d3b10a1fe2ecac537e51ae6d151b223b78de9fafdd50e5eb6b08c243df173c80"}, +] + +[package.dependencies] +docutils = ">=0.11,<1.0" + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +optional = false +python-versions = "*" +files = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.6" +files = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] + +[[package]] +name = "sniffio" +version = "1.2.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, + {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sqlalchemy" +version = "1.4.31" +description = "Database Abstraction Library" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "SQLAlchemy-1.4.31-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c3abc34fed19fdeaead0ced8cf56dd121f08198008c033596aa6aae7cc58f59f"}, + {file = "SQLAlchemy-1.4.31-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8d0949b11681380b4a50ac3cd075e4816afe9fa4a8c8ae006c1ca26f0fa40ad8"}, + {file = "SQLAlchemy-1.4.31-cp27-cp27m-win32.whl", hash = "sha256:f3b7ec97e68b68cb1f9ddb82eda17b418f19a034fa8380a0ac04e8fe01532875"}, + {file = "SQLAlchemy-1.4.31-cp27-cp27m-win_amd64.whl", hash = "sha256:81f2dd355b57770fdf292b54f3e0a9823ec27a543f947fa2eb4ec0df44f35f0d"}, + {file = "SQLAlchemy-1.4.31-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4ad31cec8b49fd718470328ad9711f4dc703507d434fd45461096da0a7135ee0"}, + {file = "SQLAlchemy-1.4.31-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:05fa14f279d43df68964ad066f653193187909950aa0163320b728edfc400167"}, + {file = "SQLAlchemy-1.4.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dccff41478050e823271642837b904d5f9bda3f5cf7d371ce163f00a694118d6"}, + {file = "SQLAlchemy-1.4.31-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57205844f246bab9b666a32f59b046add8995c665d9ecb2b7b837b087df90639"}, + {file = "SQLAlchemy-1.4.31-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea8210090a816d48a4291a47462bac750e3bc5c2442e6d64f7b8137a7c3f9ac5"}, + {file = "SQLAlchemy-1.4.31-cp310-cp310-win32.whl", hash = "sha256:2e216c13ecc7fcdcbb86bb3225425b3ed338e43a8810c7089ddb472676124b9b"}, + {file = "SQLAlchemy-1.4.31-cp310-cp310-win_amd64.whl", hash = "sha256:e3a86b59b6227ef72ffc10d4b23f0fe994bef64d4667eab4fb8cd43de4223bec"}, + {file = "SQLAlchemy-1.4.31-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:2fd4d3ca64c41dae31228b80556ab55b6489275fb204827f6560b65f95692cf3"}, + {file = "SQLAlchemy-1.4.31-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f22c040d196f841168b1456e77c30a18a3dc16b336ddbc5a24ce01ab4e95ae0"}, + {file = "SQLAlchemy-1.4.31-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0c7171aa5a57e522a04a31b84798b6c926234cb559c0939840c3235cf068813"}, + {file = "SQLAlchemy-1.4.31-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d046a9aeba9bc53e88a41e58beb72b6205abb9a20f6c136161adf9128e589db5"}, + {file = "SQLAlchemy-1.4.31-cp36-cp36m-win32.whl", hash = "sha256:d86132922531f0dc5a4f424c7580a472a924dd737602638e704841c9cb24aea2"}, + {file = "SQLAlchemy-1.4.31-cp36-cp36m-win_amd64.whl", hash = "sha256:ca68c52e3cae491ace2bf39b35fef4ce26c192fd70b4cd90f040d419f70893b5"}, + {file = "SQLAlchemy-1.4.31-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:cf2cd387409b12d0a8b801610d6336ee7d24043b6dd965950eaec09b73e7262f"}, + {file = "SQLAlchemy-1.4.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb4b15fb1f0aafa65cbdc62d3c2078bea1ceecbfccc9a1f23a2113c9ac1191fa"}, + {file = "SQLAlchemy-1.4.31-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c317ddd7c586af350a6aef22b891e84b16bff1a27886ed5b30f15c1ed59caeaa"}, + {file = "SQLAlchemy-1.4.31-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c7ed6c69debaf6198fadb1c16ae1253a29a7670bbf0646f92582eb465a0b999"}, + {file = "SQLAlchemy-1.4.31-cp37-cp37m-win32.whl", hash = "sha256:6a01ec49ca54ce03bc14e10de55dfc64187a2194b3b0e5ac0fdbe9b24767e79e"}, + {file = "SQLAlchemy-1.4.31-cp37-cp37m-win_amd64.whl", hash = "sha256:330eb45395874cc7787214fdd4489e2afb931bc49e0a7a8f9cd56d6e9c5b1639"}, + {file = "SQLAlchemy-1.4.31-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:5e9c7b3567edbc2183607f7d9f3e7e89355b8f8984eec4d2cd1e1513c8f7b43f"}, + {file = "SQLAlchemy-1.4.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de85c26a5a1c72e695ab0454e92f60213b4459b8d7c502e0be7a6369690eeb1a"}, + {file = "SQLAlchemy-1.4.31-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:975f5c0793892c634c4920057da0de3a48bbbbd0a5c86f5fcf2f2fedf41b76da"}, + {file = "SQLAlchemy-1.4.31-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5c20c8415173b119762b6110af64448adccd4d11f273fb9f718a9865b88a99c"}, + {file = "SQLAlchemy-1.4.31-cp38-cp38-win32.whl", hash = "sha256:b35dca159c1c9fa8a5f9005e42133eed82705bf8e243da371a5e5826440e65ca"}, + {file = "SQLAlchemy-1.4.31-cp38-cp38-win_amd64.whl", hash = "sha256:b7b20c88873675903d6438d8b33fba027997193e274b9367421e610d9da76c08"}, + {file = "SQLAlchemy-1.4.31-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:85e4c244e1de056d48dae466e9baf9437980c19fcde493e0db1a0a986e6d75b4"}, + {file = "SQLAlchemy-1.4.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79e73d5ee24196d3057340e356e6254af4d10e1fc22d3207ea8342fc5ffb977"}, + {file = "SQLAlchemy-1.4.31-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:15a03261aa1e68f208e71ae3cd845b00063d242cbf8c87348a0c2c0fc6e1f2ac"}, + {file = "SQLAlchemy-1.4.31-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ddc5e5ccc0160e7ad190e5c61eb57560f38559e22586955f205e537cda26034"}, + {file = "SQLAlchemy-1.4.31-cp39-cp39-win32.whl", hash = "sha256:289465162b1fa1e7a982f8abe59d26a8331211cad4942e8031d2b7db1f75e649"}, + {file = "SQLAlchemy-1.4.31-cp39-cp39-win_amd64.whl", hash = "sha256:9e4fb2895b83993831ba2401b6404de953fdbfa9d7d4fa6a4756294a83bbc94f"}, + {file = "SQLAlchemy-1.4.31.tar.gz", hash = "sha256:582b59d1e5780a447aada22b461e50b404a9dc05768da1d87368ad8190468418"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} + +[package.extras] +aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql", "pymysql (<1)"] +sqlcipher = ["sqlcipher3-binary"] + +[[package]] +name = "starlette" +version = "0.19.1" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.6" +files = [ + {file = "starlette-0.19.1-py3-none-any.whl", hash = "sha256:5a60c5c2d051f3a8eb546136aa0c9399773a689595e099e0877704d5888279bf"}, + {file = "starlette-0.19.1.tar.gz", hash = "sha256:c6d21096774ecb9639acad41b86b7706e52ba3bf1dc13ea4ed9ad593d47e24c7"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"] + +[[package]] +name = "stevedore" +version = "3.5.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.6" +files = [ + {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, + {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, +] + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + +[[package]] +name = "testfixtures" +version = "6.18.3" +description = "A collection of helpers and mock objects for unit tests and doc tests." +optional = false +python-versions = "*" +files = [ + {file = "testfixtures-6.18.3-py2.py3-none-any.whl", hash = "sha256:6ddb7f56a123e1a9339f130a200359092bd0a6455e31838d6c477e8729bb7763"}, + {file = "testfixtures-6.18.3.tar.gz", hash = "sha256:2600100ae96ffd082334b378e355550fef8b4a529a6fa4c34f47130905c7426d"}, +] + +[package.extras] +build = ["setuptools-git", "twine", "wheel"] +docs = ["django", "django (<2)", "mock", "sphinx", "sybil", "twisted", "zope.component"] +test = ["django", "django (<2)", "mock", "pytest (>=3.6)", "pytest-cov", "pytest-django", "sybil", "twisted", "zope.component"] + +[[package]] +name = "text-unidecode" +version = "1.3" +description = "The most basic Text::Unidecode port" +optional = false +python-versions = "*" +files = [ + {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, + {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, +] + +[[package]] +name = "tomli" +version = "2.0.0" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.0-py3-none-any.whl", hash = "sha256:b5bde28da1fed24b9bd1d4d2b8cba62300bfb4ec9a6187a957e8ddb9434c5224"}, + {file = "tomli-2.0.0.tar.gz", hash = "sha256:c292c34f58502a1eb2bbb9f5bbc9a5ebc37bee10ffb8c2d6bbdfa8eb13cc14e1"}, +] + +[[package]] +name = "typing-extensions" +version = "3.10.0.2" +description = "Backported and Experimental Type Hints for Python 3.5+" +optional = false +python-versions = "*" +files = [ + {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, + {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, + {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, +] + +[[package]] +name = "unidecode" +version = "1.3.4" +description = "ASCII transliterations of Unicode text" +optional = false +python-versions = ">=3.5" +files = [ + {file = "Unidecode-1.3.4-py3-none-any.whl", hash = "sha256:afa04efcdd818a93237574791be9b2817d7077c25a068b00f8cff7baa4e59257"}, + {file = "Unidecode-1.3.4.tar.gz", hash = "sha256:8e4352fb93d5a735c788110d2e7ac8e8031eb06ccbfe8d324ab71735015f9342"}, +] + +[[package]] +name = "uvicorn" +version = "0.18.2" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.7" +files = [ + {file = "uvicorn-0.18.2-py3-none-any.whl", hash = "sha256:c19a057deb1c5bb060946e2e5c262fc01590c6529c0af2c3d9ce941e89bc30e0"}, + {file = "uvicorn-0.18.2.tar.gz", hash = "sha256:cade07c403c397f9fe275492a48c1b869efd175d5d8a692df649e6e7e2ed8f4e"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" + +[package.extras] +standard = ["PyYAML (>=5.1)", "colorama (>=0.4)", "httptools (>=0.4.0)", "python-dotenv (>=0.13)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.0)"] + +[[package]] +name = "wemake-python-styleguide" +version = "0.16.1" +description = "The strictest and most opinionated python linter ever" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "wemake-python-styleguide-0.16.1.tar.gz", hash = "sha256:4fcd78dd55732679b5fc8bc37fd7e04bbaa5cdc1b1a829ad265e8f6b0d853cf6"}, + {file = "wemake_python_styleguide-0.16.1-py3-none-any.whl", hash = "sha256:202c22ecfee1f5caf0555048602cd52f2435cd57903e6b0cd46b5aaa3f652140"}, +] + +[package.dependencies] +astor = ">=0.8,<0.9" +attrs = "*" +darglint = ">=1.2,<2.0" +flake8 = ">=3.7,<5" +flake8-bandit = ">=2.1,<4" +flake8-broken-line = ">=0.3,<0.5" +flake8-bugbear = ">=20.1,<23.0" +flake8-commas = ">=2.0,<3.0" +flake8-comprehensions = ">=3.1,<4.0" +flake8-debugger = ">=4.0,<5.0" +flake8-docstrings = ">=1.3,<2.0" +flake8-eradicate = ">=1.0,<2.0" +flake8-isort = ">=4.0,<5.0" +flake8-quotes = ">=3.0,<4.0" +flake8-rst-docstrings = ">=0.2,<0.3" +flake8-string-format = ">=0.3,<0.4" +pep8-naming = ">=0.11,<0.13" +pygments = ">=2.4,<3.0" +typing_extensions = ">=3.6,<5.0" + +[[package]] +name = "win32-setctime" +version = "1.1.0" +description = "A small Python utility to set file creation time on Windows" +optional = false +python-versions = ">=3.5" +files = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] + +[package.extras] +dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9" +content-hash = "0ace1defba055bbfdaec67c0468aea75b31bffb2009f8b2c97659366c23a27ab" diff --git a/backend/postman/Conduit.postman_collection.json b/backend/postman/Conduit.postman_collection.json new file mode 100644 index 0000000..98e2021 --- /dev/null +++ b/backend/postman/Conduit.postman_collection.json @@ -0,0 +1,2136 @@ +{ + "info": { + "_postman_id": "0574ad8a-a525-43ae-8e1e-5fd9756037f4", + "name": "Conduit", + "description": "Collection for testing the Conduit API\n\nhttps://github.com/gothinkster/realworld", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" + }, + "item": [ + { + "name": "Auth", + "item": [ + { + "name": "Register", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "if (!(environment.isIntegrationTest)) {", + "var responseJSON = JSON.parse(responseBody);", + "", + "tests['Response contains \"user\" property'] = responseJSON.hasOwnProperty('user');", + "", + "var user = responseJSON.user || {};", + "", + "tests['User has \"email\" property'] = user.hasOwnProperty('email');", + "tests['User has \"username\" property'] = user.hasOwnProperty('username');", + "tests['User has \"bio\" property'] = user.hasOwnProperty('bio');", + "tests['User has \"image\" property'] = user.hasOwnProperty('image');", + "tests['User has \"token\" property'] = user.hasOwnProperty('token');", + "}", + "" + ] + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + } + ], + "body": { + "mode": "raw", + "raw": "{\"user\":{\"email\":\"{{EMAIL}}\", \"password\":\"{{PASSWORD}}\", \"username\":\"{{USERNAME}}\"}}" + }, + "url": { + "raw": "{{APIURL}}/users", + "host": [ + "{{APIURL}}" + ], + "path": [ + "users" + ] + } + }, + "response": [] + }, + { + "name": "Login", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var responseJSON = JSON.parse(responseBody);", + "", + "tests['Response contains \"user\" property'] = responseJSON.hasOwnProperty('user');", + "", + "var user = responseJSON.user || {};", + "", + "tests['User has \"email\" property'] = user.hasOwnProperty('email');", + "tests['User has \"username\" property'] = user.hasOwnProperty('username');", + "tests['User has \"bio\" property'] = user.hasOwnProperty('bio');", + "tests['User has \"image\" property'] = user.hasOwnProperty('image');", + "tests['User has \"token\" property'] = user.hasOwnProperty('token');", + "" + ] + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + } + ], + "body": { + "mode": "raw", + "raw": "{\"user\":{\"email\":\"{{EMAIL}}\", \"password\":\"{{PASSWORD}}\"}}" + }, + "url": { + "raw": "{{APIURL}}/users/login", + "host": [ + "{{APIURL}}" + ], + "path": [ + "users", + "login" + ] + } + }, + "response": [] + }, + { + "name": "Login and Remember Token", + "event": [ + { + "listen": "test", + "script": { + "id": "a7674032-bf09-4ae7-8224-4afa2fb1a9f9", + "type": "text/javascript", + "exec": [ + "var responseJSON = JSON.parse(responseBody);", + "", + "tests['Response contains \"user\" property'] = responseJSON.hasOwnProperty('user');", + "", + "var user = responseJSON.user || {};", + "", + "tests['User has \"email\" property'] = user.hasOwnProperty('email');", + "tests['User has \"username\" property'] = user.hasOwnProperty('username');", + "tests['User has \"bio\" property'] = user.hasOwnProperty('bio');", + "tests['User has \"image\" property'] = user.hasOwnProperty('image');", + "tests['User has \"token\" property'] = user.hasOwnProperty('token');", + "", + "if(tests['User has \"token\" property']){", + " pm.globals.set('token', user.token);", + "}", + "", + "tests['Global variable \"token\" has been set'] = pm.globals.get('token') === user.token;", + "" + ] + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + } + ], + "body": { + "mode": "raw", + "raw": "{\"user\":{\"email\":\"{{EMAIL}}\", \"password\":\"{{PASSWORD}}\"}}" + }, + "url": { + "raw": "{{APIURL}}/users/login", + "host": [ + "{{APIURL}}" + ], + "path": [ + "users", + "login" + ] + } + }, + "response": [] + }, + { + "name": "Current User", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var responseJSON = JSON.parse(responseBody);", + "", + "tests['Response contains \"user\" property'] = responseJSON.hasOwnProperty('user');", + "", + "var user = responseJSON.user || {};", + "", + "tests['User has \"email\" property'] = user.hasOwnProperty('email');", + "tests['User has \"username\" property'] = user.hasOwnProperty('username');", + "tests['User has \"bio\" property'] = user.hasOwnProperty('bio');", + "tests['User has \"image\" property'] = user.hasOwnProperty('image');", + "tests['User has \"token\" property'] = user.hasOwnProperty('token');", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/user", + "host": [ + "{{APIURL}}" + ], + "path": [ + "user" + ] + } + }, + "response": [] + }, + { + "name": "Update User", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var responseJSON = JSON.parse(responseBody);", + "", + "tests['Response contains \"user\" property'] = responseJSON.hasOwnProperty('user');", + "", + "var user = responseJSON.user || {};", + "", + "tests['User has \"email\" property'] = user.hasOwnProperty('email');", + "tests['User has \"username\" property'] = user.hasOwnProperty('username');", + "tests['User has \"bio\" property'] = user.hasOwnProperty('bio');", + "tests['User has \"image\" property'] = user.hasOwnProperty('image');", + "tests['User has \"token\" property'] = user.hasOwnProperty('token');", + "" + ] + } + } + ], + "request": { + "method": "PUT", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "{\"user\":{\"email\":\"{{EMAIL}}\"}}" + }, + "url": { + "raw": "{{APIURL}}/user", + "host": [ + "{{APIURL}}" + ], + "path": [ + "user" + ] + } + }, + "response": [] + } + ] + }, + { + "name": "Articles", + "item": [ + { + "name": "All Articles", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + "", + " tests['Response contains \"articles\" property'] = responseJSON.hasOwnProperty('articles');", + " tests['Response contains \"articlesCount\" property'] = responseJSON.hasOwnProperty('articlesCount');", + " tests['articlesCount is an integer'] = Number.isInteger(responseJSON.articlesCount);", + "", + " if(responseJSON.articles.length){", + " var article = responseJSON.articles[0];", + "", + " tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + " tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + " tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + " tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + " tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + " tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + " tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + " tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + " tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + " tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + " tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + " tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + " tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + " tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + " } else {", + " tests['articlesCount is 0 when feed is empty'] = responseJSON.articlesCount === 0;", + " }", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles" + ] + } + }, + "response": [] + }, + { + "name": "Articles by Author", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + "", + " tests['Response contains \"articles\" property'] = responseJSON.hasOwnProperty('articles');", + " tests['Response contains \"articlesCount\" property'] = responseJSON.hasOwnProperty('articlesCount');", + " tests['articlesCount is an integer'] = Number.isInteger(responseJSON.articlesCount);", + "", + " if(responseJSON.articles.length){", + " var article = responseJSON.articles[0];", + "", + " tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + " tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + " tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + " tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + " tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + " tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + " tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + " tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + " tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + " tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + " tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + " tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + " tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + " tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + " } else {", + " tests['articlesCount is 0 when feed is empty'] = responseJSON.articlesCount === 0;", + " }", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles?author=johnjacob", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles" + ], + "query": [ + { + "key": "author", + "value": "johnjacob" + } + ] + } + }, + "response": [] + }, + { + "name": "Articles Favorited by Username", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + " ", + " tests['Response contains \"articles\" property'] = responseJSON.hasOwnProperty('articles');", + " tests['Response contains \"articlesCount\" property'] = responseJSON.hasOwnProperty('articlesCount');", + " tests['articlesCount is an integer'] = Number.isInteger(responseJSON.articlesCount);", + "", + " if(responseJSON.articles.length){", + " var article = responseJSON.articles[0];", + "", + " tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + " tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + " tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + " tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + " tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + " tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + " tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + " tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + " tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + " tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + " tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + " tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + " tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + " tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + " } else {", + " tests['articlesCount is 0 when feed is empty'] = responseJSON.articlesCount === 0;", + " }", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles?favorited=jane", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles" + ], + "query": [ + { + "key": "favorited", + "value": "jane" + } + ] + } + }, + "response": [] + }, + { + "name": "Articles by Tag", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + "", + " tests['Response contains \"articles\" property'] = responseJSON.hasOwnProperty('articles');", + " tests['Response contains \"articlesCount\" property'] = responseJSON.hasOwnProperty('articlesCount');", + " tests['articlesCount is an integer'] = Number.isInteger(responseJSON.articlesCount);", + "", + " if(responseJSON.articles.length){", + " var article = responseJSON.articles[0];", + "", + " tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + " tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + " tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + " tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + " tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + " tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + " tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + " tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + " tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + " tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + " tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + " tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + " tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + " tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + " } else {", + " tests['articlesCount is 0 when feed is empty'] = responseJSON.articlesCount === 0;", + " }", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles?tag=dragons", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles" + ], + "query": [ + { + "key": "tag", + "value": "dragons" + } + ] + } + }, + "response": [] + } + ] + }, + { + "name": "Articles, Favorite, Comments", + "item": [ + { + "name": "Create Article", + "event": [ + { + "listen": "test", + "script": { + "id": "e711dbf8-8065-4ba8-8b74-f1639a7d8208", + "type": "text/javascript", + "exec": [ + "var responseJSON = JSON.parse(responseBody);", + "", + "tests['Response contains \"article\" property'] = responseJSON.hasOwnProperty('article');", + "", + "var article = responseJSON.article || {};", + "", + "tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + "tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + "pm.globals.set('slug', article.slug);", + "", + "tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + "tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + "tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + "tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + "tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + "tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + "tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + "tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + "tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + "tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + "tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + "tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + "" + ] + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "{\"article\":{\"title\":\"How to train your dragon\", \"description\":\"Ever wonder how?\", \"body\":\"Very carefully.\", \"tagList\":[\"dragons\",\"training\"]}}" + }, + "url": { + "raw": "{{APIURL}}/articles", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles" + ] + } + }, + "response": [] + }, + { + "name": "Feed", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + "", + " tests['Response contains \"articles\" property'] = responseJSON.hasOwnProperty('articles');", + " tests['Response contains \"articlesCount\" property'] = responseJSON.hasOwnProperty('articlesCount');", + " tests['articlesCount is an integer'] = Number.isInteger(responseJSON.articlesCount);", + "", + " if(responseJSON.articles.length){", + " var article = responseJSON.articles[0];", + "", + " tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + " tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + " tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + " tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + " tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + " tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + " tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + " tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + " tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + " tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + " tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + " tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + " tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + " tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + " } else {", + " tests['articlesCount is 0 when feed is empty'] = responseJSON.articlesCount === 0;", + " }", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles/feed", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles", + "feed" + ] + } + }, + "response": [] + }, + { + "name": "All Articles", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + "", + " tests['Response contains \"articles\" property'] = responseJSON.hasOwnProperty('articles');", + " tests['Response contains \"articlesCount\" property'] = responseJSON.hasOwnProperty('articlesCount');", + " tests['articlesCount is an integer'] = Number.isInteger(responseJSON.articlesCount);", + "", + " if(responseJSON.articles.length){", + " var article = responseJSON.articles[0];", + "", + " tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + " tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + " tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + " tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + " tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + " tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + " tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + " tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + " tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + " tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + " tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + " tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + " tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + " tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + " } else {", + " tests['articlesCount is 0 when feed is empty'] = responseJSON.articlesCount === 0;", + " }", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles" + ] + } + }, + "response": [] + }, + { + "name": "All Articles with auth", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + "", + " tests['Response contains \"articles\" property'] = responseJSON.hasOwnProperty('articles');", + " tests['Response contains \"articlesCount\" property'] = responseJSON.hasOwnProperty('articlesCount');", + " tests['articlesCount is an integer'] = Number.isInteger(responseJSON.articlesCount);", + "", + " if(responseJSON.articles.length){", + " var article = responseJSON.articles[0];", + "", + " tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + " tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + " tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + " tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + " tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + " tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + " tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + " tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + " tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + " tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + " tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + " tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + " tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + " tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + " } else {", + " tests['articlesCount is 0 when feed is empty'] = responseJSON.articlesCount === 0;", + " }", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles" + ] + } + }, + "response": [] + }, + { + "name": "Articles by Author", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + "", + " tests['Response contains \"articles\" property'] = responseJSON.hasOwnProperty('articles');", + " tests['Response contains \"articlesCount\" property'] = responseJSON.hasOwnProperty('articlesCount');", + " tests['articlesCount is an integer'] = Number.isInteger(responseJSON.articlesCount);", + "", + " if(responseJSON.articles.length){", + " var article = responseJSON.articles[0];", + "", + " tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + " tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + " tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + " tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + " tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + " tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + " tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + " tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + " tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + " tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + " tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + " tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + " tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + " tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + " } else {", + " tests['articlesCount is 0 when feed is empty'] = responseJSON.articlesCount === 0;", + " }", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles?author={{USERNAME}}", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles" + ], + "query": [ + { + "key": "author", + "value": "{{USERNAME}}" + } + ] + } + }, + "response": [] + }, + { + "name": "Articles by Author with auth", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + "", + " tests['Response contains \"articles\" property'] = responseJSON.hasOwnProperty('articles');", + " tests['Response contains \"articlesCount\" property'] = responseJSON.hasOwnProperty('articlesCount');", + " tests['articlesCount is an integer'] = Number.isInteger(responseJSON.articlesCount);", + "", + " if(responseJSON.articles.length){", + " var article = responseJSON.articles[0];", + "", + " tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + " tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + " tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + " tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + " tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + " tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + " tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + " tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + " tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + " tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + " tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + " tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + " tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + " tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + " } else {", + " tests['articlesCount is 0 when feed is empty'] = responseJSON.articlesCount === 0;", + " }", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles?author={{USERNAME}}", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles" + ], + "query": [ + { + "key": "author", + "value": "{{USERNAME}}" + } + ] + } + }, + "response": [] + }, + { + "name": "Articles Favorited by Username", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + " ", + " tests['Response contains \"articles\" property'] = responseJSON.hasOwnProperty('articles');", + " tests['Response contains \"articlesCount\" property'] = responseJSON.hasOwnProperty('articlesCount');", + " tests['articlesCount is an integer'] = Number.isInteger(responseJSON.articlesCount);", + "", + " if(responseJSON.articles.length){", + " var article = responseJSON.articles[0];", + "", + " tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + " tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + " tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + " tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + " tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + " tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + " tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + " tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + " tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + " tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + " tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + " tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + " tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + " tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + " } else {", + " tests['articlesCount is 0 when feed is empty'] = responseJSON.articlesCount === 0;", + " }", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles?favorited=jane", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles" + ], + "query": [ + { + "key": "favorited", + "value": "jane" + } + ] + } + }, + "response": [] + }, + { + "name": "Articles Favorited by Username with auth", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + " ", + " tests['Response contains \"articles\" property'] = responseJSON.hasOwnProperty('articles');", + " tests['Response contains \"articlesCount\" property'] = responseJSON.hasOwnProperty('articlesCount');", + " tests['articlesCount is an integer'] = Number.isInteger(responseJSON.articlesCount);", + "", + " if(responseJSON.articles.length){", + " var article = responseJSON.articles[0];", + "", + " tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + " tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + " tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + " tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + " tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + " tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + " tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + " tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + " tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + " tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + " tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + " tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + " tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + " tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + " } else {", + " tests['articlesCount is 0 when feed is empty'] = responseJSON.articlesCount === 0;", + " }", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles?favorited=jane", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles" + ], + "query": [ + { + "key": "favorited", + "value": "jane" + } + ] + } + }, + "response": [] + }, + { + "name": "Single Article by slug", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var responseJSON = JSON.parse(responseBody);", + "", + "tests['Response contains \"article\" property'] = responseJSON.hasOwnProperty('article');", + "", + "var article = responseJSON.article || {};", + "", + "tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + "tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + "tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + "tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + "tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + "tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + "tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + "tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + "tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + "tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + "tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + "tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + "tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + "tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles/{{slug}}", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles", + "{{slug}}" + ] + } + }, + "response": [] + }, + { + "name": "Articles by Tag", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + "", + " tests['Response contains \"articles\" property'] = responseJSON.hasOwnProperty('articles');", + " tests['Response contains \"articlesCount\" property'] = responseJSON.hasOwnProperty('articlesCount');", + " tests['articlesCount is an integer'] = Number.isInteger(responseJSON.articlesCount);", + "", + " if(responseJSON.articles.length){", + " var article = responseJSON.articles[0];", + "", + " tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + " tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + " tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + " tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + " tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + " tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + " tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + " tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + " tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + " tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + " tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + " tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + " tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + " tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + " } else {", + " tests['articlesCount is 0 when feed is empty'] = responseJSON.articlesCount === 0;", + " }", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles?tag=dragons", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles" + ], + "query": [ + { + "key": "tag", + "value": "dragons" + } + ] + } + }, + "response": [] + }, + { + "name": "Update Article", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "if (!(environment.isIntegrationTest)) {", + "var responseJSON = JSON.parse(responseBody);", + "", + "tests['Response contains \"article\" property'] = responseJSON.hasOwnProperty('article');", + "", + "var article = responseJSON.article || {};", + "", + "tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + "tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + "tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + "tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + "tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + "tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + "tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + "tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + "tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + "tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + "tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + "tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + "tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + "tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + "}", + "" + ] + } + } + ], + "request": { + "method": "PUT", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "{\"article\":{\"body\":\"With two hands\"}}" + }, + "url": { + "raw": "{{APIURL}}/articles/{{slug}}", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles", + "{{slug}}" + ] + } + }, + "response": [] + }, + { + "name": "Favorite Article", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var responseJSON = JSON.parse(responseBody);", + "", + "tests['Response contains \"article\" property'] = responseJSON.hasOwnProperty('article');", + "", + "var article = responseJSON.article || {};", + "", + "tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + "tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + "tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + "tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + "tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + "tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + "tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + "tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + "tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + "tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + "tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + "tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + "tests[\"Article's 'favorited' property is true\"] = article.favorited === true;", + "tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + "tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + "tests[\"Article's 'favoritesCount' property is greater than 0\"] = article.favoritesCount > 0;", + "" + ] + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles/{{slug}}/favorite", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles", + "{{slug}}", + "favorite" + ] + } + }, + "response": [] + }, + { + "name": "Unfavorite Article", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var responseJSON = JSON.parse(responseBody);", + "", + "tests['Response contains \"article\" property'] = responseJSON.hasOwnProperty('article');", + "", + "var article = responseJSON.article || {};", + "", + "tests['Article has \"title\" property'] = article.hasOwnProperty('title');", + "tests['Article has \"slug\" property'] = article.hasOwnProperty('slug');", + "tests['Article has \"body\" property'] = article.hasOwnProperty('body');", + "tests['Article has \"createdAt\" property'] = article.hasOwnProperty('createdAt');", + "tests['Article\\'s \"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.createdAt);", + "tests['Article has \"updatedAt\" property'] = article.hasOwnProperty('updatedAt');", + "tests['Article\\'s \"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(article.updatedAt);", + "tests['Article has \"description\" property'] = article.hasOwnProperty('description');", + "tests['Article has \"tagList\" property'] = article.hasOwnProperty('tagList');", + "tests['Article\\'s \"tagList\" property is an Array'] = Array.isArray(article.tagList);", + "tests['Article has \"author\" property'] = article.hasOwnProperty('author');", + "tests['Article has \"favorited\" property'] = article.hasOwnProperty('favorited');", + "tests['Article has \"favoritesCount\" property'] = article.hasOwnProperty('favoritesCount');", + "tests['favoritesCount is an integer'] = Number.isInteger(article.favoritesCount);", + "tests[\"Article's \\\"favorited\\\" property is false\"] = article.favorited === false;", + "" + ] + } + } + ], + "request": { + "method": "DELETE", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles/{{slug}}/favorite", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles", + "{{slug}}", + "favorite" + ] + } + }, + "response": [] + }, + { + "name": "Create Comment for Article", + "event": [ + { + "listen": "test", + "script": { + "id": "9f90c364-cc68-4728-961a-85eb00197d7b", + "type": "text/javascript", + "exec": [ + "var responseJSON = JSON.parse(responseBody);", + "", + "tests['Response contains \"comment\" property'] = responseJSON.hasOwnProperty('comment');", + "", + "var comment = responseJSON.comment || {};", + "", + "tests['Comment has \"id\" property'] = comment.hasOwnProperty('id');", + "pm.globals.set('commentId', comment.id);", + "", + "tests['Comment has \"body\" property'] = comment.hasOwnProperty('body');", + "tests['Comment has \"createdAt\" property'] = comment.hasOwnProperty('createdAt');", + "tests['\"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(comment.createdAt);", + "tests['Comment has \"updatedAt\" property'] = comment.hasOwnProperty('updatedAt');", + "tests['\"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(comment.updatedAt);", + "tests['Comment has \"author\" property'] = comment.hasOwnProperty('author');", + "" + ] + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "{\"comment\":{\"body\":\"Thank you so much!\"}}" + }, + "url": { + "raw": "{{APIURL}}/articles/{{slug}}/comments", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles", + "{{slug}}", + "comments" + ] + } + }, + "response": [] + }, + { + "name": "All Comments for Article", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + "", + " tests['Response contains \"comments\" property'] = responseJSON.hasOwnProperty('comments');", + "", + " if(responseJSON.comments.length){", + " var comment = responseJSON.comments[0];", + "", + " tests['Comment has \"id\" property'] = comment.hasOwnProperty('id');", + " tests['Comment has \"body\" property'] = comment.hasOwnProperty('body');", + " tests['Comment has \"createdAt\" property'] = comment.hasOwnProperty('createdAt');", + " tests['\"createdAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(comment.createdAt);", + " tests['Comment has \"updatedAt\" property'] = comment.hasOwnProperty('updatedAt');", + " tests['\"updatedAt\" property is an ISO 8601 timestamp'] = /^\\d{4,}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d.\\d+(?:[+-][0-2]\\d:[0-5]\\d|Z)$/.test(comment.updatedAt);", + " tests['Comment has \"author\" property'] = comment.hasOwnProperty('author');", + " }", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles/{{slug}}/comments", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles", + "{{slug}}", + "comments" + ] + } + }, + "response": [] + }, + { + "name": "Delete Comment for Article", + "request": { + "method": "DELETE", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles/{{slug}}/comments/{{commentId}}", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles", + "{{slug}}", + "comments", + "{{commentId}}" + ] + } + }, + "response": [] + }, + { + "name": "Delete Article", + "request": { + "method": "DELETE", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/articles/{{slug}}", + "host": [ + "{{APIURL}}" + ], + "path": [ + "articles", + "{{slug}}" + ] + } + }, + "response": [] + } + ], + "event": [ + { + "listen": "prerequest", + "script": { + "id": "67853a4a-e972-4573-a295-dad12a46a9d7", + "type": "text/javascript", + "exec": [ + "" + ] + } + }, + { + "listen": "test", + "script": { + "id": "3057f989-15e4-484e-b8fa-a041043d0ac0", + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ] + }, + { + "name": "Profiles", + "item": [ + { + "name": "Register Celeb", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "if (!(environment.isIntegrationTest)) {", + "var responseJSON = JSON.parse(responseBody);", + "", + "tests['Response contains \"user\" property'] = responseJSON.hasOwnProperty('user');", + "", + "var user = responseJSON.user || {};", + "", + "tests['User has \"email\" property'] = user.hasOwnProperty('email');", + "tests['User has \"username\" property'] = user.hasOwnProperty('username');", + "tests['User has \"bio\" property'] = user.hasOwnProperty('bio');", + "tests['User has \"image\" property'] = user.hasOwnProperty('image');", + "tests['User has \"token\" property'] = user.hasOwnProperty('token');", + "}", + "" + ] + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + } + ], + "body": { + "mode": "raw", + "raw": "{\"user\":{\"email\":\"celeb_{{EMAIL}}\", \"password\":\"{{PASSWORD}}\", \"username\":\"celeb_{{USERNAME}}\"}}" + }, + "url": { + "raw": "{{APIURL}}/users", + "host": [ + "{{APIURL}}" + ], + "path": [ + "users" + ] + } + }, + "response": [] + }, + { + "name": "Profile", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "if (!(environment.isIntegrationTest)) {", + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + "", + " tests['Response contains \"profile\" property'] = responseJSON.hasOwnProperty('profile');", + " ", + " var profile = responseJSON.profile || {};", + " ", + " tests['Profile has \"username\" property'] = profile.hasOwnProperty('username');", + " tests['Profile has \"bio\" property'] = profile.hasOwnProperty('bio');", + " tests['Profile has \"image\" property'] = profile.hasOwnProperty('image');", + " tests['Profile has \"following\" property'] = profile.hasOwnProperty('following');", + "}", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/profiles/celeb_{{USERNAME}}", + "host": [ + "{{APIURL}}" + ], + "path": [ + "profiles", + "celeb_{{USERNAME}}" + ] + } + }, + "response": [] + }, + { + "name": "Follow Profile", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "if (!(environment.isIntegrationTest)) {", + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + "", + " tests['Response contains \"profile\" property'] = responseJSON.hasOwnProperty('profile');", + " ", + " var profile = responseJSON.profile || {};", + " ", + " tests['Profile has \"username\" property'] = profile.hasOwnProperty('username');", + " tests['Profile has \"bio\" property'] = profile.hasOwnProperty('bio');", + " tests['Profile has \"image\" property'] = profile.hasOwnProperty('image');", + " tests['Profile has \"following\" property'] = profile.hasOwnProperty('following');", + " tests['Profile\\'s \"following\" property is true'] = profile.following === true;", + "}", + "}", + "" + ] + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "{\"user\":{\"email\":\"{{EMAIL}}\"}}" + }, + "url": { + "raw": "{{APIURL}}/profiles/celeb_{{USERNAME}}/follow", + "host": [ + "{{APIURL}}" + ], + "path": [ + "profiles", + "celeb_{{USERNAME}}", + "follow" + ] + } + }, + "response": [] + }, + { + "name": "Unfollow Profile", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "if (!(environment.isIntegrationTest)) {", + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + "", + " tests['Response contains \"profile\" property'] = responseJSON.hasOwnProperty('profile');", + " ", + " var profile = responseJSON.profile || {};", + " ", + " tests['Profile has \"username\" property'] = profile.hasOwnProperty('username');", + " tests['Profile has \"bio\" property'] = profile.hasOwnProperty('bio');", + " tests['Profile has \"image\" property'] = profile.hasOwnProperty('image');", + " tests['Profile has \"following\" property'] = profile.hasOwnProperty('following');", + " tests['Profile\\'s \"following\" property is false'] = profile.following === false;", + "}", + "}", + "" + ] + } + } + ], + "request": { + "method": "DELETE", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + }, + { + "key": "Authorization", + "value": "Token {{token}}" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/profiles/celeb_{{USERNAME}}/follow", + "host": [ + "{{APIURL}}" + ], + "path": [ + "profiles", + "celeb_{{USERNAME}}", + "follow" + ] + } + }, + "response": [] + } + ] + }, + { + "name": "Tags", + "item": [ + { + "name": "All Tags", + "event": [ + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "var is200Response = responseCode.code === 200;", + "", + "tests['Response code is 200 OK'] = is200Response;", + "", + "if(is200Response){", + " var responseJSON = JSON.parse(responseBody);", + " ", + " tests['Response contains \"tags\" property'] = responseJSON.hasOwnProperty('tags');", + " tests['\"tags\" property returned as array'] = Array.isArray(responseJSON.tags);", + "}", + "" + ] + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "X-Requested-With", + "value": "XMLHttpRequest" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{APIURL}}/tags", + "host": [ + "{{APIURL}}" + ], + "path": [ + "tags" + ] + } + }, + "response": [] + } + ] + } + ] +} \ No newline at end of file diff --git a/backend/postman/run-api-tests.sh b/backend/postman/run-api-tests.sh new file mode 100644 index 0000000..2337fd8 --- /dev/null +++ b/backend/postman/run-api-tests.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -x + +SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" + +APIURL=${APIURL:-https://conduit.productionready.io/api} +USERNAME=${USERNAME:-u`date +%s`} +EMAIL=${EMAIL:-$USERNAME@mail.com} +PASSWORD=${PASSWORD:-password} + +npx newman run $SCRIPTDIR/Conduit.postman_collection.json \ + --delay-request 500 \ + --global-var "APIURL=$APIURL" \ + --global-var "USERNAME=$USERNAME" \ + --global-var "EMAIL=$EMAIL" \ + --global-var "PASSWORD=$PASSWORD" \ No newline at end of file diff --git a/backend/prod.env b/backend/prod.env new file mode 100644 index 0000000..d284540 --- /dev/null +++ b/backend/prod.env @@ -0,0 +1,17 @@ +# 邮件发信配置 +MAIL_FROM=orjiance@163.com + +SMTP_HOST=smtp.163.com +SMTP_PORT=465 +SMTP_USER=orjiance@163.com +SMTP_PASSWORD=NFZqrTavzBGDQLyQ +SMTP_TLS=true + +# 验证码 +EMAIL_CODE_EXPIRES_MINUTES=10 +# 可选:逗号分隔的场景列表(不写就用默认) +# EMAIL_CODE_SCENES=register,reset,login + +SECRET_KEY=secret +DEBUG=True +DATABASE_URL=postgresql://postgres:74110ZSH@localhost/aivise diff --git a/backend/pyproject.toml b/backend/pyproject.toml new file mode 100644 index 0000000..96d0cfc --- /dev/null +++ b/backend/pyproject.toml @@ -0,0 +1,72 @@ +package-mode = false +[tool.poetry] +name = "fastapi-realworld-example-app" +version = "0.0.0" +description = "Backend logic implementation for https://github.com/gothinkster/realworld with awesome FastAPI" +authors = ["Nik Sidnev "] +license = "MIT" + +[tool.poetry.dependencies] +python = "^3.9" +uvicorn = "^0.18.2" +fastapi = "^0.79.1" +pydantic = { version = "^1.9", extras = ["email", "dotenv"] } +passlib = { version = "^1.7", extras = ["bcrypt"] } +pyjwt = "^2.4" +databases = "^0.6.1" +asyncpg = "^0.26.0" +psycopg2-binary = "^2.9.3" +aiosql = "^6.2" +pypika = "^0.48.9" +alembic = "^1.8" +python-slugify = "^6.1" +Unidecode = "^1.3" +loguru = "^0.6.0" +python-multipart = "^0.0.20" + +[tool.poetry.dev-dependencies] +black = "^22.6.0" +isort = "^5.10" +autoflake = "^1.4" +wemake-python-styleguide = "^0.16.1" +mypy = "^0.971" +flake8-fixme = "^1.1" +pytest = "^7.1" +pytest-cov = "^3.0" +pytest-asyncio = "^0.19.0" +pytest-env = "^0.6.2" +pytest-xdist = "^2.4.0" +httpx = "^0.23.0" +asgi-lifespan = "^1.0.1" + +[tool.isort] +profile = "black" +src_paths = ["app", "tests"] +combine_as_imports = true + +[tool.pytest.ini_options] +testpaths = "tests" +filterwarnings = "error" +addopts = ''' + --strict-markers + --tb=short + --cov=app + --cov=tests + --cov-branch + --cov-report=term-missing + --cov-report=html + --cov-report=xml + --no-cov-on-fail + --cov-fail-under=100 + --numprocesses=auto + --asyncio-mode=auto +''' +env = [ + "SECRET_KEY=secret", + "MAX_CONNECTIONS_COUNT=1", + "MIN_CONNECTIONS_COUNT=1" +] + +[build-system] +requires = ["poetry>=1.0"] +build-backend = "poetry.masonry.api" diff --git a/backend/scripts/format b/backend/scripts/format new file mode 100644 index 0000000..64a9b14 --- /dev/null +++ b/backend/scripts/format @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -e + +isort --force-single-line-imports app tests +autoflake --recursive --remove-all-unused-imports --remove-unused-variables --in-place app tests +black app tests +isort app tests diff --git a/backend/scripts/lint b/backend/scripts/lint new file mode 100644 index 0000000..ea56cfe --- /dev/null +++ b/backend/scripts/lint @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +set -e +set -x + + +flake8 app --exclude=app/db/migrations +mypy app + +black --check app --diff +isort --check-only app diff --git a/backend/scripts/test b/backend/scripts/test new file mode 100644 index 0000000..23f48d1 --- /dev/null +++ b/backend/scripts/test @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +set -e +set -x + +pytest --cov=app --cov=tests --cov-report=term-missing --cov-config=setup.cfg ${@} diff --git a/backend/scripts/test-cov-html b/backend/scripts/test-cov-html new file mode 100644 index 0000000..de5f3b1 --- /dev/null +++ b/backend/scripts/test-cov-html @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +set -e +set -x + +bash scripts/test --cov-report=html ${@} diff --git a/backend/setup.cfg b/backend/setup.cfg new file mode 100644 index 0000000..c880ac5 --- /dev/null +++ b/backend/setup.cfg @@ -0,0 +1,88 @@ +[coverage:report] +precision = 2 +exclude_lines = + pragma: no cover + raise NotImplementedError + raise NotImplemented + +[coverage:run] +source = app +branch = True + +[mypy] +plugins = pydantic.mypy + +strict_optional = True +warn_redundant_casts = True +warn_unused_ignores = True +disallow_any_generics = True +check_untyped_defs = True + +disallow_untyped_defs = True + +[pydantic-mypy] +init_forbid_extra = True +init_typed = True +warn_required_dynamic_aliases = True +warn_untyped_fields = True + +[mypy-sqlalchemy.*] +ignore_missing_imports = True + +[mypy-alembic.*] +ignore_missing_imports = True + +[mypy-loguru.*] +ignore_missing_imports = True + +[mypy-asyncpg.*] +ignore_missing_imports = True + +[mypy-bcrypt.*] +ignore_missing_imports = True + +[mypy-passlib.*] +ignore_missing_imports = True + +[mypy-slugify.*] +ignore_missing_imports = True + +[mypy-pypika.*] +ignore_missing_imports = True + +[flake8] +format = wemake +max-line-length = 88 +per-file-ignores = + # ignore error on builtin names for TypedTable classes, since just mapper for SQL table + app/db/queries/tables.py: WPS125, + + # ignore black disabling in some places for queries building using pypika + app/db/repositories/*.py: E800, + + app/api/dependencies/authentication.py: WPS201, +ignore = + # common errors: + # FastAPI architecture requires a lot of functions calls as default arguments, so ignore it here. + B008, + # docs are missing in this project. + D, RST + + # WPS: 3xx + # IMO, but the obligation to specify the base class is redundant. + WPS306, + + # WPS: 4xx + # FastAPI architecture requires a lot of complex calls as default arguments, so ignore it here. + WPS404, + # again, FastAPI DI architecture involves a lot of nested functions as DI providers. + WPS430, + # used for pypika operations + WPS465, + + # WPS: 6xx + # pydantic defines models in dataclasses model style, but not supported by WPS. + WPS601, +no-accept-encodings = True +nested-classes-whitelist=Config +inline-quotes = double diff --git a/backend/smtp_test.py b/backend/smtp_test.py new file mode 100644 index 0000000..f917723 --- /dev/null +++ b/backend/smtp_test.py @@ -0,0 +1,44 @@ +# smtp_test.py +import os, smtplib, ssl +from email.message import EmailMessage +from dotenv import load_dotenv + +load_dotenv(".env") + +SMTP_HOST = os.getenv("SMTP_HOST", "smtp.163.com") +SMTP_USER = os.getenv("SMTP_USER") # 必须是完整邮箱 +SMTP_PASS = os.getenv("SMTP_PASSWORD") # 163“客户端授权码” +FROM = os.getenv("MAIL_FROM", SMTP_USER) # 建议与 SMTP_USER 相同 +TO = SMTP_USER # 先给自己发一封 + +assert SMTP_USER and SMTP_PASS, "缺少 SMTP_USER/SMTP_PASSWORD 环境变量" + +def make_msg(): + m = EmailMessage() + m["Subject"] = "SMTP 测试 - 465/587" + m["From"] = FROM + m["To"] = TO + m.set_content("hello from smtp_test.py") + return m + +ctx = ssl.create_default_context() + +# 先 465/SSL +try: + print(">>> try SSL 465") + with smtplib.SMTP_SSL(SMTP_HOST, 465, context=ctx, timeout=20) as s: + s.set_debuglevel(1) + s.login(SMTP_USER, SMTP_PASS) + s.send_message(make_msg()) + print("OK via 465/SSL") +except Exception as e: + print("465 failed:", e) + print(">>> fallback STARTTLS 587") + with smtplib.SMTP(SMTP_HOST, 587, timeout=20) as s: + s.set_debuglevel(1) + s.ehlo() + s.starttls(context=ctx) + s.ehlo() + s.login(SMTP_USER, SMTP_PASS) + s.send_message(make_msg()) + print("OK via 587/STARTTLS") diff --git a/backend/static/uploads/01871f886d654df8a25a092c3f7f4566.png b/backend/static/uploads/01871f886d654df8a25a092c3f7f4566.png new file mode 100644 index 0000000..b403195 Binary files /dev/null and b/backend/static/uploads/01871f886d654df8a25a092c3f7f4566.png differ diff --git a/backend/static/uploads/155a5f36ed29400db45bd8f9a81de010.jpg b/backend/static/uploads/155a5f36ed29400db45bd8f9a81de010.jpg new file mode 100644 index 0000000..4ba4c53 Binary files /dev/null and b/backend/static/uploads/155a5f36ed29400db45bd8f9a81de010.jpg differ diff --git a/backend/static/uploads/247363ac66ca4a4d870efdb0623e6b16.jpg b/backend/static/uploads/247363ac66ca4a4d870efdb0623e6b16.jpg new file mode 100644 index 0000000..4ba4c53 Binary files /dev/null and b/backend/static/uploads/247363ac66ca4a4d870efdb0623e6b16.jpg differ diff --git a/backend/static/uploads/2e90e2c103c9449587061dd2f1b9a526.png b/backend/static/uploads/2e90e2c103c9449587061dd2f1b9a526.png new file mode 100644 index 0000000..94c8180 Binary files /dev/null and b/backend/static/uploads/2e90e2c103c9449587061dd2f1b9a526.png differ diff --git a/backend/static/uploads/3c9238c408d344cda8c07a952cdbf2aa.jpg b/backend/static/uploads/3c9238c408d344cda8c07a952cdbf2aa.jpg new file mode 100644 index 0000000..4ba4c53 Binary files /dev/null and b/backend/static/uploads/3c9238c408d344cda8c07a952cdbf2aa.jpg differ diff --git a/backend/static/uploads/439318501e914f1d91b2d9ff46c21b89.png b/backend/static/uploads/439318501e914f1d91b2d9ff46c21b89.png new file mode 100644 index 0000000..8dd0bc1 Binary files /dev/null and b/backend/static/uploads/439318501e914f1d91b2d9ff46c21b89.png differ diff --git a/backend/static/uploads/5cb0458d8a714cc8b3870d4c1260da4f.png b/backend/static/uploads/5cb0458d8a714cc8b3870d4c1260da4f.png new file mode 100644 index 0000000..a80a5c6 Binary files /dev/null and b/backend/static/uploads/5cb0458d8a714cc8b3870d4c1260da4f.png differ diff --git a/backend/static/uploads/70f26a2fd47f4f5c91ed99982ee9c12c.jpg b/backend/static/uploads/70f26a2fd47f4f5c91ed99982ee9c12c.jpg new file mode 100644 index 0000000..4ba4c53 Binary files /dev/null and b/backend/static/uploads/70f26a2fd47f4f5c91ed99982ee9c12c.jpg differ diff --git a/backend/static/uploads/7205bc5d8e7e4b5698c899e3a655e863.png b/backend/static/uploads/7205bc5d8e7e4b5698c899e3a655e863.png new file mode 100644 index 0000000..a386f81 Binary files /dev/null and b/backend/static/uploads/7205bc5d8e7e4b5698c899e3a655e863.png differ diff --git a/backend/static/uploads/729ba05ddff64951bf6e16e23bac0d49.jpg b/backend/static/uploads/729ba05ddff64951bf6e16e23bac0d49.jpg new file mode 100644 index 0000000..4ba4c53 Binary files /dev/null and b/backend/static/uploads/729ba05ddff64951bf6e16e23bac0d49.jpg differ diff --git a/backend/static/uploads/7c8780b2f6f04ce2aee3df8a84a0d038.png b/backend/static/uploads/7c8780b2f6f04ce2aee3df8a84a0d038.png new file mode 100644 index 0000000..4f53cd3 Binary files /dev/null and b/backend/static/uploads/7c8780b2f6f04ce2aee3df8a84a0d038.png differ diff --git a/backend/static/uploads/7ccf739206d94e958fcee838e6ece28d.png b/backend/static/uploads/7ccf739206d94e958fcee838e6ece28d.png new file mode 100644 index 0000000..b403195 Binary files /dev/null and b/backend/static/uploads/7ccf739206d94e958fcee838e6ece28d.png differ diff --git a/backend/static/uploads/88868ad4f0774fc78df0998e24ada8ba.png b/backend/static/uploads/88868ad4f0774fc78df0998e24ada8ba.png new file mode 100644 index 0000000..8cc667a Binary files /dev/null and b/backend/static/uploads/88868ad4f0774fc78df0998e24ada8ba.png differ diff --git a/backend/static/uploads/951dc37cd7e247ecb89a4c79f02cbe7d.jpg b/backend/static/uploads/951dc37cd7e247ecb89a4c79f02cbe7d.jpg new file mode 100644 index 0000000..29bba00 Binary files /dev/null and b/backend/static/uploads/951dc37cd7e247ecb89a4c79f02cbe7d.jpg differ diff --git a/backend/static/uploads/9a767f0a538a482fa0d3be2b3beea924.png b/backend/static/uploads/9a767f0a538a482fa0d3be2b3beea924.png new file mode 100644 index 0000000..8be43f6 Binary files /dev/null and b/backend/static/uploads/9a767f0a538a482fa0d3be2b3beea924.png differ diff --git a/backend/static/uploads/bc824c6a93b34e9bad3db5e593506ce9.png b/backend/static/uploads/bc824c6a93b34e9bad3db5e593506ce9.png new file mode 100644 index 0000000..004b979 Binary files /dev/null and b/backend/static/uploads/bc824c6a93b34e9bad3db5e593506ce9.png differ diff --git a/backend/static/uploads/c73a19ff9b574bf386edfaa464578a7c.png b/backend/static/uploads/c73a19ff9b574bf386edfaa464578a7c.png new file mode 100644 index 0000000..35b2906 Binary files /dev/null and b/backend/static/uploads/c73a19ff9b574bf386edfaa464578a7c.png differ diff --git a/backend/static/uploads/d11286e3137749a8896fc6d4bc7b7998.png b/backend/static/uploads/d11286e3137749a8896fc6d4bc7b7998.png new file mode 100644 index 0000000..7875134 Binary files /dev/null and b/backend/static/uploads/d11286e3137749a8896fc6d4bc7b7998.png differ diff --git a/backend/static/uploads/d36a7dc2704749739786f7d75f3b6934.png b/backend/static/uploads/d36a7dc2704749739786f7d75f3b6934.png new file mode 100644 index 0000000..b238b07 Binary files /dev/null and b/backend/static/uploads/d36a7dc2704749739786f7d75f3b6934.png differ diff --git a/backend/static/uploads/dcd890a0bc0c4dde94fd48d5c2374ae6.jpg b/backend/static/uploads/dcd890a0bc0c4dde94fd48d5c2374ae6.jpg new file mode 100644 index 0000000..4ba4c53 Binary files /dev/null and b/backend/static/uploads/dcd890a0bc0c4dde94fd48d5c2374ae6.jpg differ diff --git a/backend/static/uploads/e3abe52e2f754547be20f2e7a2e71da7.png b/backend/static/uploads/e3abe52e2f754547be20f2e7a2e71da7.png new file mode 100644 index 0000000..c10fbb5 Binary files /dev/null and b/backend/static/uploads/e3abe52e2f754547be20f2e7a2e71da7.png differ diff --git a/backend/static/uploads/f08d59bf9c854593ab63d1a9974bb6aa.png b/backend/static/uploads/f08d59bf9c854593ab63d1a9974bb6aa.png new file mode 100644 index 0000000..bc5fd96 Binary files /dev/null and b/backend/static/uploads/f08d59bf9c854593ab63d1a9974bb6aa.png differ diff --git a/backend/static/uploads/f377cc7ca4df43cca6599a9a54bfc3d3.png b/backend/static/uploads/f377cc7ca4df43cca6599a9a54bfc3d3.png new file mode 100644 index 0000000..01cfc99 Binary files /dev/null and b/backend/static/uploads/f377cc7ca4df43cca6599a9a54bfc3d3.png differ diff --git a/backend/tests/__init__.py b/backend/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py new file mode 100644 index 0000000..d6a217f --- /dev/null +++ b/backend/tests/conftest.py @@ -0,0 +1,93 @@ +from os import environ + +import pytest +from asgi_lifespan import LifespanManager +from asyncpg.pool import Pool +from fastapi import FastAPI +from httpx import AsyncClient + +from app.db.repositories.articles import ArticlesRepository +from app.db.repositories.users import UsersRepository +from app.models.domain.articles import Article +from app.models.domain.users import UserInDB +from app.services import jwt +from tests.fake_asyncpg_pool import FakeAsyncPGPool + +environ["APP_ENV"] = "test" + + +@pytest.fixture +def app() -> FastAPI: + from app.main import get_application # local import for testing purpose + + return get_application() + + +@pytest.fixture +async def initialized_app(app: FastAPI) -> FastAPI: + async with LifespanManager(app): + app.state.pool = await FakeAsyncPGPool.create_pool(app.state.pool) + yield app + + +@pytest.fixture +def pool(initialized_app: FastAPI) -> Pool: + return initialized_app.state.pool + + +@pytest.fixture +async def client(initialized_app: FastAPI) -> AsyncClient: + async with AsyncClient( + app=initialized_app, + base_url="http://testserver", + headers={"Content-Type": "application/json"}, + ) as client: + yield client + + +@pytest.fixture +def authorization_prefix() -> str: + from app.core.config import get_app_settings + + settings = get_app_settings() + jwt_token_prefix = settings.jwt_token_prefix + + return jwt_token_prefix + + +@pytest.fixture +async def test_user(pool: Pool) -> UserInDB: + async with pool.acquire() as conn: + return await UsersRepository(conn).create_user( + email="test@test.com", password="password", username="username" + ) + + +@pytest.fixture +async def test_article(test_user: UserInDB, pool: Pool) -> Article: + async with pool.acquire() as connection: + articles_repo = ArticlesRepository(connection) + return await articles_repo.create_article( + slug="test-slug", + title="Test Slug", + description="Slug for tests", + body="Test " * 100, + author=test_user, + tags=["tests", "testing", "pytest"], + ) + + +@pytest.fixture +def token(test_user: UserInDB) -> str: + return jwt.create_access_token_for_user(test_user, environ["SECRET_KEY"]) + + +@pytest.fixture +def authorized_client( + client: AsyncClient, token: str, authorization_prefix: str +) -> AsyncClient: + client.headers = { + "Authorization": f"{authorization_prefix} {token}", + **client.headers, + } + return client diff --git a/backend/tests/fake_asyncpg_pool.py b/backend/tests/fake_asyncpg_pool.py new file mode 100644 index 0000000..e442a0f --- /dev/null +++ b/backend/tests/fake_asyncpg_pool.py @@ -0,0 +1,46 @@ +from types import TracebackType +from typing import Optional, Type + +from asyncpg import Connection +from asyncpg.pool import Pool + + +class FakeAsyncPGPool: + def __init__(self, pool: Pool) -> None: + self._pool = pool + self._conn = None + self._tx = None + + @classmethod + async def create_pool(cls, pool: Pool) -> "FakeAsyncPGPool": + pool = cls(pool) + conn = await pool._pool.acquire() + tx = conn.transaction() + await tx.start() + pool._conn = conn + pool._tx = tx + return pool + + async def close(self) -> None: + await self._tx.rollback() + await self._pool.release(self._conn) + await self._pool.close() + + def acquire(self, *, timeout: Optional[float] = None) -> "FakePoolAcquireContent": + return FakePoolAcquireContent(self) + + +class FakePoolAcquireContent: + def __init__(self, pool: FakeAsyncPGPool) -> None: + self._pool = pool + + async def __aenter__(self) -> Connection: + return self._pool._conn + + async def __aexit__( + self, + exc_type: Optional[Type[Exception]], + exc: Optional[Exception], + tb: Optional[TracebackType], + ) -> None: + pass diff --git a/backend/tests/test_api/__init__.py b/backend/tests/test_api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/tests/test_api/test_errors/__init__.py b/backend/tests/test_api/test_errors/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/tests/test_api/test_errors/test_422_error.py b/backend/tests/test_api/test_errors/test_422_error.py new file mode 100644 index 0000000..e3102b7 --- /dev/null +++ b/backend/tests/test_api/test_errors/test_422_error.py @@ -0,0 +1,20 @@ +import pytest +from fastapi import FastAPI +from httpx import AsyncClient +from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY + +pytestmark = pytest.mark.asyncio + + +async def test_frw_validation_error_format(app: FastAPI): + @app.get("/wrong_path/{param}") + def route_for_test(param: int) -> None: # pragma: no cover + pass + + async with AsyncClient(base_url="http://testserver", app=app) as client: + response = await client.get("/wrong_path/asd") + + assert response.status_code == HTTP_422_UNPROCESSABLE_ENTITY + + error_data = response.json() + assert "errors" in error_data diff --git a/backend/tests/test_api/test_errors/test_error.py b/backend/tests/test_api/test_errors/test_error.py new file mode 100644 index 0000000..841cdd1 --- /dev/null +++ b/backend/tests/test_api/test_errors/test_error.py @@ -0,0 +1,16 @@ +import pytest +from fastapi import FastAPI +from httpx import AsyncClient +from starlette.status import HTTP_404_NOT_FOUND + +pytestmark = pytest.mark.asyncio + + +async def test_frw_validation_error_format(app: FastAPI): + async with AsyncClient(base_url="http://testserver", app=app) as client: + response = await client.get("/wrong_path/asd") + + assert response.status_code == HTTP_404_NOT_FOUND + + error_data = response.json() + assert "errors" in error_data diff --git a/backend/tests/test_api/test_routes/__init__.py b/backend/tests/test_api/test_routes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/tests/test_api/test_routes/test_articles.py b/backend/tests/test_api/test_routes/test_articles.py new file mode 100644 index 0000000..07bd06c --- /dev/null +++ b/backend/tests/test_api/test_routes/test_articles.py @@ -0,0 +1,575 @@ +import pytest +from asyncpg.pool import Pool +from fastapi import FastAPI +from httpx import AsyncClient +from starlette import status + +from app.db.errors import EntityDoesNotExist +from app.db.repositories.articles import ArticlesRepository +from app.db.repositories.profiles import ProfilesRepository +from app.db.repositories.users import UsersRepository +from app.models.domain.articles import Article +from app.models.domain.users import UserInDB +from app.models.schemas.articles import ArticleInResponse, ListOfArticlesInResponse + +pytestmark = pytest.mark.asyncio + + +async def test_user_can_not_create_article_with_duplicated_slug( + app: FastAPI, authorized_client: AsyncClient, test_article: Article +) -> None: + article_data = { + "title": "Test Slug", + "body": "does not matter", + "description": "¯\\_(ツ)_/¯", + } + response = await authorized_client.post( + app.url_path_for("articles:create-article"), json={"article": article_data} + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +async def test_user_can_create_article( + app: FastAPI, authorized_client: AsyncClient, test_user: UserInDB +) -> None: + article_data = { + "title": "Test Slug", + "body": "does not matter", + "description": "¯\\_(ツ)_/¯", + } + response = await authorized_client.post( + app.url_path_for("articles:create-article"), json={"article": article_data} + ) + article = ArticleInResponse(**response.json()) + assert article.article.title == article_data["title"] + assert article.article.author.username == test_user.username + + +async def test_not_existing_tags_will_be_created_without_duplication( + app: FastAPI, authorized_client: AsyncClient, test_user: UserInDB +) -> None: + article_data = { + "title": "Test Slug", + "body": "does not matter", + "description": "¯\\_(ツ)_/¯", + "tagList": ["tag1", "tag2", "tag3", "tag3"], + } + response = await authorized_client.post( + app.url_path_for("articles:create-article"), json={"article": article_data} + ) + article = ArticleInResponse(**response.json()) + assert set(article.article.tags) == {"tag1", "tag2", "tag3"} + + +@pytest.mark.parametrize( + "api_method, route_name", + (("GET", "articles:get-article"), ("PUT", "articles:update-article")), +) +async def test_user_can_not_retrieve_not_existing_article( + app: FastAPI, + authorized_client: AsyncClient, + test_article: Article, + api_method: str, + route_name: str, +) -> None: + response = await authorized_client.request( + api_method, app.url_path_for(route_name, slug="wrong-slug") + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +async def test_user_can_retrieve_article_if_exists( + app: FastAPI, authorized_client: AsyncClient, test_article: Article +) -> None: + response = await authorized_client.get( + app.url_path_for("articles:get-article", slug=test_article.slug) + ) + article = ArticleInResponse(**response.json()) + assert article.article == test_article + + +@pytest.mark.parametrize( + "update_field, update_value, extra_updates", + ( + ("title", "New Title", {"slug": "new-title"}), + ("description", "new description", {}), + ("body", "new body", {}), + ), +) +async def test_user_can_update_article( + app: FastAPI, + authorized_client: AsyncClient, + test_article: Article, + update_field: str, + update_value: str, + extra_updates: dict, +) -> None: + response = await authorized_client.put( + app.url_path_for("articles:update-article", slug=test_article.slug), + json={"article": {update_field: update_value}}, + ) + + assert response.status_code == status.HTTP_200_OK + + article = ArticleInResponse(**response.json()).article + article_as_dict = article.dict() + assert article_as_dict[update_field] == update_value + + for extra_field, extra_value in extra_updates.items(): + assert article_as_dict[extra_field] == extra_value + + exclude_fields = {update_field, *extra_updates.keys(), "updated_at"} + assert article.dict(exclude=exclude_fields) == test_article.dict( + exclude=exclude_fields + ) + + +@pytest.mark.parametrize( + "api_method, route_name", + (("PUT", "articles:update-article"), ("DELETE", "articles:delete-article")), +) +async def test_user_can_not_modify_article_that_is_not_authored_by_him( + app: FastAPI, + authorized_client: AsyncClient, + pool: Pool, + api_method: str, + route_name: str, +) -> None: + async with pool.acquire() as connection: + users_repo = UsersRepository(connection) + user = await users_repo.create_user( + username="test_author", email="author@email.com", password="password" + ) + articles_repo = ArticlesRepository(connection) + await articles_repo.create_article( + slug="test-slug", + title="Test Slug", + description="Slug for tests", + body="Test " * 100, + author=user, + tags=["tests", "testing", "pytest"], + ) + + response = await authorized_client.request( + api_method, + app.url_path_for(route_name, slug="test-slug"), + json={"article": {"title": "Updated Title"}}, + ) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +async def test_user_can_delete_his_article( + app: FastAPI, + authorized_client: AsyncClient, + test_article: Article, + pool: Pool, +) -> None: + await authorized_client.delete( + app.url_path_for("articles:delete-article", slug=test_article.slug) + ) + + async with pool.acquire() as connection: + articles_repo = ArticlesRepository(connection) + with pytest.raises(EntityDoesNotExist): + await articles_repo.get_article_by_slug(slug=test_article.slug) + + +@pytest.mark.parametrize( + "api_method, route_name, favorite_state", + ( + ("POST", "articles:mark-article-favorite", True), + ("DELETE", "articles:unmark-article-favorite", False), + ), +) +async def test_user_can_change_favorite_state( + app: FastAPI, + authorized_client: AsyncClient, + test_article: Article, + test_user: UserInDB, + pool: Pool, + api_method: str, + route_name: str, + favorite_state: bool, +) -> None: + if not favorite_state: + async with pool.acquire() as connection: + articles_repo = ArticlesRepository(connection) + await articles_repo.add_article_into_favorites( + article=test_article, user=test_user + ) + + await authorized_client.request( + api_method, app.url_path_for(route_name, slug=test_article.slug) + ) + + response = await authorized_client.get( + app.url_path_for("articles:get-article", slug=test_article.slug) + ) + + article = ArticleInResponse(**response.json()) + + assert article.article.favorited == favorite_state + assert article.article.favorites_count == int(favorite_state) + + +@pytest.mark.parametrize( + "api_method, route_name, favorite_state", + ( + ("POST", "articles:mark-article-favorite", True), + ("DELETE", "articles:unmark-article-favorite", False), + ), +) +async def test_user_can_not_change_article_state_twice( + app: FastAPI, + authorized_client: AsyncClient, + test_article: Article, + test_user: UserInDB, + pool: Pool, + api_method: str, + route_name: str, + favorite_state: bool, +) -> None: + if favorite_state: + async with pool.acquire() as connection: + articles_repo = ArticlesRepository(connection) + await articles_repo.add_article_into_favorites( + article=test_article, user=test_user + ) + + response = await authorized_client.request( + api_method, app.url_path_for(route_name, slug=test_article.slug) + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +async def test_empty_feed_if_user_has_not_followings( + app: FastAPI, + authorized_client: AsyncClient, + test_article: Article, + test_user: UserInDB, + pool: Pool, +) -> None: + async with pool.acquire() as connection: + users_repo = UsersRepository(connection) + articles_repo = ArticlesRepository(connection) + + for i in range(5): + user = await users_repo.create_user( + username=f"user-{i}", email=f"user-{i}@email.com", password="password" + ) + for j in range(5): + await articles_repo.create_article( + slug=f"slug-{i}-{j}", + title="tmp", + description="tmp", + body="tmp", + author=user, + tags=[f"tag-{i}-{j}"], + ) + + response = await authorized_client.get( + app.url_path_for("articles:get-user-feed-articles") + ) + + articles = ListOfArticlesInResponse(**response.json()) + assert articles.articles == [] + + +async def test_user_will_receive_only_following_articles( + app: FastAPI, + authorized_client: AsyncClient, + test_article: Article, + test_user: UserInDB, + pool: Pool, +) -> None: + following_author_username = "user-2" + async with pool.acquire() as connection: + users_repo = UsersRepository(connection) + profiles_repo = ProfilesRepository(connection) + articles_repo = ArticlesRepository(connection) + + for i in range(5): + user = await users_repo.create_user( + username=f"user-{i}", email=f"user-{i}@email.com", password="password" + ) + if i == 2: + await profiles_repo.add_user_into_followers( + target_user=user, requested_user=test_user + ) + + for j in range(5): + await articles_repo.create_article( + slug=f"slug-{i}-{j}", + title="tmp", + description="tmp", + body="tmp", + author=user, + tags=[f"tag-{i}-{j}"], + ) + + response = await authorized_client.get( + app.url_path_for("articles:get-user-feed-articles") + ) + + articles_from_response = ListOfArticlesInResponse(**response.json()) + assert len(articles_from_response.articles) == 5 + + all_from_following = ( + article.author.username == following_author_username + for article in articles_from_response.articles + ) + assert all(all_from_following) + + +async def test_user_receiving_feed_with_limit_and_offset( + app: FastAPI, + authorized_client: AsyncClient, + test_article: Article, + test_user: UserInDB, + pool: Pool, +) -> None: + async with pool.acquire() as connection: + users_repo = UsersRepository(connection) + profiles_repo = ProfilesRepository(connection) + articles_repo = ArticlesRepository(connection) + + for i in range(5): + user = await users_repo.create_user( + username=f"user-{i}", email=f"user-{i}@email.com", password="password" + ) + if i == 2: + await profiles_repo.add_user_into_followers( + target_user=user, requested_user=test_user + ) + + for j in range(5): + await articles_repo.create_article( + slug=f"slug-{i}-{j}", + title="tmp", + description="tmp", + body="tmp", + author=user, + tags=[f"tag-{i}-{j}"], + ) + + full_response = await authorized_client.get( + app.url_path_for("articles:get-user-feed-articles") + ) + full_articles = ListOfArticlesInResponse(**full_response.json()) + + response = await authorized_client.get( + app.url_path_for("articles:get-user-feed-articles"), + params={"limit": 2, "offset": 3}, + ) + + articles_from_response = ListOfArticlesInResponse(**response.json()) + assert full_articles.articles[3:] == articles_from_response.articles + + +async def test_article_will_contain_only_attached_tags( + app: FastAPI, authorized_client: AsyncClient, test_user: UserInDB, pool: Pool +) -> None: + attached_tags = ["tag1", "tag3"] + + async with pool.acquire() as connection: + articles_repo = ArticlesRepository(connection) + + await articles_repo.create_article( + slug=f"test-slug", + title="tmp", + description="tmp", + body="tmp", + author=test_user, + tags=attached_tags, + ) + + for i in range(5): + await articles_repo.create_article( + slug=f"slug-{i}", + title="tmp", + description="tmp", + body="tmp", + author=test_user, + tags=[f"tag-{i}"], + ) + + response = await authorized_client.get( + app.url_path_for("articles:get-article", slug="test-slug") + ) + article = ArticleInResponse(**response.json()) + assert len(article.article.tags) == len(attached_tags) + assert set(article.article.tags) == set(attached_tags) + + +@pytest.mark.parametrize( + "tag, result", (("", 7), ("tag1", 1), ("tag2", 2), ("wrong", 0)) +) +async def test_filtering_by_tags( + app: FastAPI, + authorized_client: AsyncClient, + test_user: UserInDB, + pool: Pool, + tag: str, + result: int, +) -> None: + async with pool.acquire() as connection: + articles_repo = ArticlesRepository(connection) + + await articles_repo.create_article( + slug=f"slug-1", + title="tmp", + description="tmp", + body="tmp", + author=test_user, + tags=["tag1", "tag2"], + ) + await articles_repo.create_article( + slug=f"slug-2", + title="tmp", + description="tmp", + body="tmp", + author=test_user, + tags=["tag2"], + ) + + for i in range(5, 10): + await articles_repo.create_article( + slug=f"slug-{i}", + title="tmp", + description="tmp", + body="tmp", + author=test_user, + tags=[f"tag-{i}"], + ) + + response = await authorized_client.get( + app.url_path_for("articles:list-articles"), params={"tag": tag} + ) + articles = ListOfArticlesInResponse(**response.json()) + assert articles.articles_count == result + + +@pytest.mark.parametrize( + "author, result", (("", 8), ("author1", 1), ("author2", 2), ("wrong", 0)) +) +async def test_filtering_by_authors( + app: FastAPI, + authorized_client: AsyncClient, + test_user: UserInDB, + pool: Pool, + author: str, + result: int, +) -> None: + async with pool.acquire() as connection: + users_repo = UsersRepository(connection) + articles_repo = ArticlesRepository(connection) + + author1 = await users_repo.create_user( + username="author1", email="author1@email.com", password="password" + ) + author2 = await users_repo.create_user( + username="author2", email="author2@email.com", password="password" + ) + + await articles_repo.create_article( + slug=f"slug-1", title="tmp", description="tmp", body="tmp", author=author1 + ) + await articles_repo.create_article( + slug=f"slug-2-1", title="tmp", description="tmp", body="tmp", author=author2 + ) + await articles_repo.create_article( + slug=f"slug-2-2", title="tmp", description="tmp", body="tmp", author=author2 + ) + + for i in range(5, 10): + await articles_repo.create_article( + slug=f"slug-{i}", + title="tmp", + description="tmp", + body="tmp", + author=test_user, + ) + + response = await authorized_client.get( + app.url_path_for("articles:list-articles"), params={"author": author} + ) + articles = ListOfArticlesInResponse(**response.json()) + assert articles.articles_count == result + + +@pytest.mark.parametrize( + "favorited, result", (("", 7), ("fan1", 1), ("fan2", 2), ("wrong", 0)) +) +async def test_filtering_by_favorited( + app: FastAPI, + authorized_client: AsyncClient, + test_user: UserInDB, + pool: Pool, + favorited: str, + result: int, +) -> None: + async with pool.acquire() as connection: + users_repo = UsersRepository(connection) + articles_repo = ArticlesRepository(connection) + + fan1 = await users_repo.create_user( + username="fan1", email="fan1@email.com", password="password" + ) + fan2 = await users_repo.create_user( + username="fan2", email="fan2@email.com", password="password" + ) + + article1 = await articles_repo.create_article( + slug=f"slug-1", title="tmp", description="tmp", body="tmp", author=test_user + ) + article2 = await articles_repo.create_article( + slug=f"slug-2", title="tmp", description="tmp", body="tmp", author=test_user + ) + + await articles_repo.add_article_into_favorites(article=article1, user=fan1) + await articles_repo.add_article_into_favorites(article=article1, user=fan2) + await articles_repo.add_article_into_favorites(article=article2, user=fan2) + + for i in range(5, 10): + await articles_repo.create_article( + slug=f"slug-{i}", + title="tmp", + description="tmp", + body="tmp", + author=test_user, + ) + + response = await authorized_client.get( + app.url_path_for("articles:list-articles"), params={"favorited": favorited} + ) + articles = ListOfArticlesInResponse(**response.json()) + assert articles.articles_count == result + + +async def test_filtering_with_limit_and_offset( + app: FastAPI, authorized_client: AsyncClient, test_user: UserInDB, pool: Pool +) -> None: + async with pool.acquire() as connection: + articles_repo = ArticlesRepository(connection) + + for i in range(5, 10): + await articles_repo.create_article( + slug=f"slug-{i}", + title="tmp", + description="tmp", + body="tmp", + author=test_user, + ) + + full_response = await authorized_client.get( + app.url_path_for("articles:list-articles") + ) + full_articles = ListOfArticlesInResponse(**full_response.json()) + + response = await authorized_client.get( + app.url_path_for("articles:list-articles"), params={"limit": 2, "offset": 3} + ) + + articles_from_response = ListOfArticlesInResponse(**response.json()) + assert full_articles.articles[3:] == articles_from_response.articles diff --git a/backend/tests/test_api/test_routes/test_authentication.py b/backend/tests/test_api/test_routes/test_authentication.py new file mode 100644 index 0000000..8298b82 --- /dev/null +++ b/backend/tests/test_api/test_routes/test_authentication.py @@ -0,0 +1,32 @@ +import pytest +from fastapi import FastAPI +from httpx import AsyncClient +from starlette.status import HTTP_403_FORBIDDEN + +from app.models.domain.users import User +from app.services.jwt import create_access_token_for_user + +pytestmark = pytest.mark.asyncio + + +async def test_unable_to_login_with_wrong_jwt_prefix( + app: FastAPI, client: AsyncClient, token: str +) -> None: + response = await client.get( + app.url_path_for("users:get-current-user"), + headers={"Authorization": f"WrongPrefix {token}"}, + ) + assert response.status_code == HTTP_403_FORBIDDEN + + +async def test_unable_to_login_when_user_does_not_exist_any_more( + app: FastAPI, client: AsyncClient, authorization_prefix: str +) -> None: + token = create_access_token_for_user( + User(username="user", email="email@email.com"), "secret" + ) + response = await client.get( + app.url_path_for("users:get-current-user"), + headers={"Authorization": f"{authorization_prefix} {token}"}, + ) + assert response.status_code == HTTP_403_FORBIDDEN diff --git a/backend/tests/test_api/test_routes/test_comments.py b/backend/tests/test_api/test_routes/test_comments.py new file mode 100644 index 0000000..d93813b --- /dev/null +++ b/backend/tests/test_api/test_routes/test_comments.py @@ -0,0 +1,96 @@ +import pytest +from asyncpg.pool import Pool +from fastapi import FastAPI +from httpx import AsyncClient +from starlette import status + +from app.db.repositories.comments import CommentsRepository +from app.db.repositories.users import UsersRepository +from app.models.domain.articles import Article +from app.models.schemas.comments import CommentInResponse, ListOfCommentsInResponse + +pytestmark = pytest.mark.asyncio + + +async def test_user_can_add_comment_for_article( + app: FastAPI, authorized_client: AsyncClient, test_article: Article +) -> None: + created_comment_response = await authorized_client.post( + app.url_path_for("comments:create-comment-for-article", slug=test_article.slug), + json={"comment": {"body": "comment"}}, + ) + + created_comment = CommentInResponse(**created_comment_response.json()) + + comments_for_article_response = await authorized_client.get( + app.url_path_for("comments:get-comments-for-article", slug=test_article.slug) + ) + + comments = ListOfCommentsInResponse(**comments_for_article_response.json()) + + assert created_comment.comment == comments.comments[0] + + +async def test_user_can_delete_own_comment( + app: FastAPI, authorized_client: AsyncClient, test_article: Article +) -> None: + created_comment_response = await authorized_client.post( + app.url_path_for("comments:create-comment-for-article", slug=test_article.slug), + json={"comment": {"body": "comment"}}, + ) + + created_comment = CommentInResponse(**created_comment_response.json()) + + await authorized_client.delete( + app.url_path_for( + "comments:delete-comment-from-article", + slug=test_article.slug, + comment_id=str(created_comment.comment.id_), + ) + ) + + comments_for_article_response = await authorized_client.get( + app.url_path_for("comments:get-comments-for-article", slug=test_article.slug) + ) + + comments = ListOfCommentsInResponse(**comments_for_article_response.json()) + + assert len(comments.comments) == 0 + + +async def test_user_can_not_delete_not_authored_comment( + app: FastAPI, authorized_client: AsyncClient, test_article: Article, pool: Pool +) -> None: + async with pool.acquire() as connection: + users_repo = UsersRepository(connection) + user = await users_repo.create_user( + username="test_author", email="author@email.com", password="password" + ) + comments_repo = CommentsRepository(connection) + comment = await comments_repo.create_comment_for_article( + body="tmp", article=test_article, user=user + ) + + forbidden_response = await authorized_client.delete( + app.url_path_for( + "comments:delete-comment-from-article", + slug=test_article.slug, + comment_id=str(comment.id_), + ) + ) + + assert forbidden_response.status_code == status.HTTP_403_FORBIDDEN + + +async def test_user_will_receive_error_for_not_existing_comment( + app: FastAPI, authorized_client: AsyncClient, test_article: Article +) -> None: + not_found_response = await authorized_client.delete( + app.url_path_for( + "comments:delete-comment-from-article", + slug=test_article.slug, + comment_id="1", + ) + ) + + assert not_found_response.status_code == status.HTTP_404_NOT_FOUND diff --git a/backend/tests/test_api/test_routes/test_login.py b/backend/tests/test_api/test_routes/test_login.py new file mode 100644 index 0000000..c2f14d6 --- /dev/null +++ b/backend/tests/test_api/test_routes/test_login.py @@ -0,0 +1,34 @@ +import pytest +from fastapi import FastAPI +from httpx import AsyncClient +from starlette.status import HTTP_200_OK, HTTP_400_BAD_REQUEST + +from app.models.domain.users import UserInDB + +pytestmark = pytest.mark.asyncio + + +async def test_user_successful_login( + app: FastAPI, client: AsyncClient, test_user: UserInDB +) -> None: + login_json = {"user": {"email": "test@test.com", "password": "password"}} + + response = await client.post(app.url_path_for("auth:login"), json=login_json) + assert response.status_code == HTTP_200_OK + + +@pytest.mark.parametrize( + "credentials_part, credentials_value", + (("email", "wrong@test.com"), ("password", "wrong")), +) +async def test_user_login_when_credential_part_does_not_match( + app: FastAPI, + client: AsyncClient, + test_user: UserInDB, + credentials_part: str, + credentials_value: str, +) -> None: + login_json = {"user": {"email": "test@test.com", "password": "password"}} + login_json["user"][credentials_part] = credentials_value + response = await client.post(app.url_path_for("auth:login"), json=login_json) + assert response.status_code == HTTP_400_BAD_REQUEST diff --git a/backend/tests/test_api/test_routes/test_profiles.py b/backend/tests/test_api/test_routes/test_profiles.py new file mode 100644 index 0000000..e32298f --- /dev/null +++ b/backend/tests/test_api/test_routes/test_profiles.py @@ -0,0 +1,181 @@ +import pytest +from asyncpg.pool import Pool +from fastapi import FastAPI +from httpx import AsyncClient +from starlette import status + +from app.db.repositories.profiles import ProfilesRepository +from app.db.repositories.users import UsersRepository +from app.models.domain.users import UserInDB +from app.models.schemas.profiles import ProfileInResponse + +pytestmark = pytest.mark.asyncio + + +async def test_unregistered_user_will_receive_profile_without_following( + app: FastAPI, client: AsyncClient, test_user: UserInDB +) -> None: + response = await client.get( + app.url_path_for("profiles:get-profile", username=test_user.username) + ) + profile = ProfileInResponse(**response.json()) + assert profile.profile.username == test_user.username + assert not profile.profile.following + + +async def test_user_that_does_not_follows_another_will_receive_profile_without_follow( + app: FastAPI, authorized_client: AsyncClient, pool: Pool +) -> None: + async with pool.acquire() as conn: + users_repo = UsersRepository(conn) + user = await users_repo.create_user( + username="user_for_following", + email="test-for-following@email.com", + password="password", + ) + + response = await authorized_client.get( + app.url_path_for("profiles:get-profile", username=user.username) + ) + profile = ProfileInResponse(**response.json()) + assert profile.profile.username == user.username + assert not profile.profile.following + + +async def test_user_that_follows_another_will_receive_profile_with_follow( + app: FastAPI, authorized_client: AsyncClient, pool: Pool, test_user: UserInDB +) -> None: + async with pool.acquire() as conn: + users_repo = UsersRepository(conn) + user = await users_repo.create_user( + username="user_for_following", + email="test-for-following@email.com", + password="password", + ) + + profiles_repo = ProfilesRepository(conn) + await profiles_repo.add_user_into_followers( + target_user=user, requested_user=test_user + ) + + response = await authorized_client.get( + app.url_path_for("profiles:get-profile", username=user.username) + ) + profile = ProfileInResponse(**response.json()) + assert profile.profile.username == user.username + assert profile.profile.following + + +@pytest.mark.parametrize( + "api_method, route_name", + ( + ("GET", "profiles:get-profile"), + ("POST", "profiles:follow-user"), + ("DELETE", "profiles:unsubscribe-from-user"), + ), +) +async def test_user_can_not_retrieve_not_existing_profile( + app: FastAPI, authorized_client: AsyncClient, api_method: str, route_name: str +) -> None: + response = await authorized_client.request( + api_method, app.url_path_for(route_name, username="not_existing_user") + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.parametrize( + "api_method, route_name, following", + ( + ("POST", "profiles:follow-user", True), + ("DELETE", "profiles:unsubscribe-from-user", False), + ), +) +async def test_user_can_change_following_for_another_user( + app: FastAPI, + authorized_client: AsyncClient, + pool: Pool, + test_user: UserInDB, + api_method: str, + route_name: str, + following: bool, +) -> None: + async with pool.acquire() as conn: + users_repo = UsersRepository(conn) + user = await users_repo.create_user( + username="user_for_following", + email="test-for-following@email.com", + password="password", + ) + + if not following: + profiles_repo = ProfilesRepository(conn) + await profiles_repo.add_user_into_followers( + target_user=user, requested_user=test_user + ) + + change_following_response = await authorized_client.request( + api_method, app.url_path_for(route_name, username=user.username) + ) + assert change_following_response.status_code == status.HTTP_200_OK + + response = await authorized_client.get( + app.url_path_for("profiles:get-profile", username=user.username) + ) + profile = ProfileInResponse(**response.json()) + assert profile.profile.username == user.username + assert profile.profile.following == following + + +@pytest.mark.parametrize( + "api_method, route_name, following", + ( + ("POST", "profiles:follow-user", True), + ("DELETE", "profiles:unsubscribe-from-user", False), + ), +) +async def test_user_can_not_change_following_state_to_the_same_twice( + app: FastAPI, + authorized_client: AsyncClient, + pool: Pool, + test_user: UserInDB, + api_method: str, + route_name: str, + following: bool, +) -> None: + async with pool.acquire() as conn: + users_repo = UsersRepository(conn) + user = await users_repo.create_user( + username="user_for_following", + email="test-for-following@email.com", + password="password", + ) + + if following: + profiles_repo = ProfilesRepository(conn) + await profiles_repo.add_user_into_followers( + target_user=user, requested_user=test_user + ) + + response = await authorized_client.request( + api_method, app.url_path_for(route_name, username=user.username) + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +@pytest.mark.parametrize( + "api_method, route_name", + (("POST", "profiles:follow-user"), ("DELETE", "profiles:unsubscribe-from-user")), +) +async def test_user_can_not_change_following_state_for_him_self( + app: FastAPI, + authorized_client: AsyncClient, + test_user: UserInDB, + api_method: str, + route_name: str, +) -> None: + response = await authorized_client.request( + api_method, app.url_path_for(route_name, username=test_user.username) + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST diff --git a/backend/tests/test_api/test_routes/test_registration.py b/backend/tests/test_api/test_routes/test_registration.py new file mode 100644 index 0000000..89cdb83 --- /dev/null +++ b/backend/tests/test_api/test_routes/test_registration.py @@ -0,0 +1,56 @@ +import pytest +from asyncpg.pool import Pool +from fastapi import FastAPI +from httpx import AsyncClient +from starlette.status import HTTP_201_CREATED, HTTP_400_BAD_REQUEST + +from app.db.repositories.users import UsersRepository +from app.models.domain.users import UserInDB + +pytestmark = pytest.mark.asyncio + + +async def test_user_success_registration( + app: FastAPI, client: AsyncClient, pool: Pool +) -> None: + email, username, password = "test@test.com", "username", "password" + registration_json = { + "user": {"email": email, "username": username, "password": password} + } + response = await client.post( + app.url_path_for("auth:register"), json=registration_json + ) + assert response.status_code == HTTP_201_CREATED + + async with pool.acquire() as conn: + repo = UsersRepository(conn) + user = await repo.get_user_by_email(email=email) + assert user.email == email + assert user.username == username + assert user.check_password(password) + + +@pytest.mark.parametrize( + "credentials_part, credentials_value", + (("username", "free_username"), ("email", "free-email@tset.com")), +) +async def test_failed_user_registration_when_some_credentials_are_taken( + app: FastAPI, + client: AsyncClient, + test_user: UserInDB, + credentials_part: str, + credentials_value: str, +) -> None: + registration_json = { + "user": { + "email": "test@test.com", + "username": "username", + "password": "password", + } + } + registration_json["user"][credentials_part] = credentials_value + + response = await client.post( + app.url_path_for("auth:register"), json=registration_json + ) + assert response.status_code == HTTP_400_BAD_REQUEST diff --git a/backend/tests/test_api/test_routes/test_tags.py b/backend/tests/test_api/test_routes/test_tags.py new file mode 100644 index 0000000..2d37dc4 --- /dev/null +++ b/backend/tests/test_api/test_routes/test_tags.py @@ -0,0 +1,28 @@ +import pytest +from asyncpg.pool import Pool +from fastapi import FastAPI +from httpx import AsyncClient + +from app.db.repositories.tags import TagsRepository + +pytestmark = pytest.mark.asyncio + + +async def test_empty_list_when_no_tags_exist(app: FastAPI, client: AsyncClient) -> None: + response = await client.get(app.url_path_for("tags:get-all")) + assert response.json() == {"tags": []} + + +async def test_list_of_tags_when_tags_exist( + app: FastAPI, client: AsyncClient, pool: Pool +) -> None: + tags = ["tag1", "tag2", "tag3", "tag4", "tag1"] + + async with pool.acquire() as conn: + tags_repo = TagsRepository(conn) + await tags_repo.create_tags_that_dont_exist(tags=tags) + + response = await client.get(app.url_path_for("tags:get-all")) + tags_from_response = response.json()["tags"] + assert len(tags_from_response) == len(set(tags)) + assert all((tag in tags for tag in tags_from_response)) diff --git a/backend/tests/test_api/test_routes/test_users.py b/backend/tests/test_api/test_routes/test_users.py new file mode 100644 index 0000000..c582b29 --- /dev/null +++ b/backend/tests/test_api/test_routes/test_users.py @@ -0,0 +1,140 @@ +import pytest +from asyncpg.pool import Pool +from fastapi import FastAPI +from httpx import AsyncClient +from starlette import status + +from app.db.repositories.users import UsersRepository +from app.models.domain.users import UserInDB +from app.models.schemas.users import UserInResponse + +pytestmark = pytest.mark.asyncio + + +@pytest.fixture(params=("", "value", "Token value", "JWT value", "Bearer value")) +def wrong_authorization_header(request) -> str: + return request.param + + +@pytest.mark.parametrize( + "api_method, route_name", + (("GET", "users:get-current-user"), ("PUT", "users:update-current-user")), +) +async def test_user_can_not_access_own_profile_if_not_logged_in( + app: FastAPI, + client: AsyncClient, + test_user: UserInDB, + api_method: str, + route_name: str, +) -> None: + response = await client.request(api_method, app.url_path_for(route_name)) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.parametrize( + "api_method, route_name", + (("GET", "users:get-current-user"), ("PUT", "users:update-current-user")), +) +async def test_user_can_not_retrieve_own_profile_if_wrong_token( + app: FastAPI, + client: AsyncClient, + test_user: UserInDB, + api_method: str, + route_name: str, + wrong_authorization_header: str, +) -> None: + response = await client.request( + api_method, + app.url_path_for(route_name), + headers={"Authorization": wrong_authorization_header}, + ) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +async def test_user_can_retrieve_own_profile( + app: FastAPI, authorized_client: AsyncClient, test_user: UserInDB, token: str +) -> None: + response = await authorized_client.get(app.url_path_for("users:get-current-user")) + assert response.status_code == status.HTTP_200_OK + + user_profile = UserInResponse(**response.json()) + assert user_profile.user.email == test_user.email + + +@pytest.mark.parametrize( + "update_field, update_value", + ( + ("username", "new_username"), + ("email", "new_email@email.com"), + ("bio", "new bio"), + ("image", "http://testhost.com/imageurl"), + ), +) +async def test_user_can_update_own_profile( + app: FastAPI, + authorized_client: AsyncClient, + test_user: UserInDB, + token: str, + update_value: str, + update_field: str, +) -> None: + response = await authorized_client.put( + app.url_path_for("users:update-current-user"), + json={"user": {update_field: update_value}}, + ) + assert response.status_code == status.HTTP_200_OK + + user_profile = UserInResponse(**response.json()).dict() + assert user_profile["user"][update_field] == update_value + + +async def test_user_can_change_password( + app: FastAPI, + authorized_client: AsyncClient, + test_user: UserInDB, + token: str, + pool: Pool, +) -> None: + response = await authorized_client.put( + app.url_path_for("users:update-current-user"), + json={"user": {"password": "new_password"}}, + ) + assert response.status_code == status.HTTP_200_OK + user_profile = UserInResponse(**response.json()) + + async with pool.acquire() as connection: + users_repo = UsersRepository(connection) + user = await users_repo.get_user_by_username( + username=user_profile.user.username + ) + + assert user.check_password("new_password") + + +@pytest.mark.parametrize( + "credentials_part, credentials_value", + (("username", "taken_username"), ("email", "taken@email.com")), +) +async def test_user_can_not_take_already_used_credentials( + app: FastAPI, + authorized_client: AsyncClient, + pool: Pool, + token: str, + credentials_part: str, + credentials_value: str, +) -> None: + user_dict = { + "username": "not_taken_username", + "password": "password", + "email": "free_email@email.com", + } + user_dict.update({credentials_part: credentials_value}) + async with pool.acquire() as conn: + users_repo = UsersRepository(conn) + await users_repo.create_user(**user_dict) + + response = await authorized_client.put( + app.url_path_for("users:update-current-user"), + json={"user": {credentials_part: credentials_value}}, + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST diff --git a/backend/tests/test_db/__init__.py b/backend/tests/test_db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/tests/test_db/test_queries/__init__.py b/backend/tests/test_db/test_queries/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/tests/test_db/test_queries/test_tables.py b/backend/tests/test_db/test_queries/test_tables.py new file mode 100644 index 0000000..34a8ea9 --- /dev/null +++ b/backend/tests/test_db/test_queries/test_tables.py @@ -0,0 +1,19 @@ +from app.db.queries.tables import TypedTable + + +def test_typed_table_uses_explicit_name() -> None: + assert TypedTable("table_name").get_sql() == "table_name" + + +def test_typed_table_use_class_attribute_as_table_name() -> None: + class NewTable(TypedTable): + __table__ = "new_table" + + assert NewTable().get_table_name() == "new_table" + + +def test_typed_table_use_class_name_as_table_name() -> None: + class NewTable(TypedTable): + ... + + assert NewTable().get_table_name() == "NewTable" diff --git a/backend/tests/test_schemas/__init__.py b/backend/tests/test_schemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/tests/test_schemas/test_rw_model.py b/backend/tests/test_schemas/test_rw_model.py new file mode 100644 index 0000000..ef68bc5 --- /dev/null +++ b/backend/tests/test_schemas/test_rw_model.py @@ -0,0 +1,8 @@ +from datetime import datetime + +from app.models.domain.rwmodel import convert_datetime_to_realworld + + +def test_api_datetime_is_in_realworld_format() -> None: + dt = datetime.fromisoformat("2019-10-27T02:21:42.844640") + assert convert_datetime_to_realworld(dt) == "2019-10-27T02:21:42.844640Z" diff --git a/backend/tests/test_services/__init__.py b/backend/tests/test_services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/tests/test_services/test_jwt.py b/backend/tests/test_services/test_jwt.py new file mode 100644 index 0000000..05ea147 --- /dev/null +++ b/backend/tests/test_services/test_jwt.py @@ -0,0 +1,51 @@ +from datetime import timedelta + +import jwt +import pytest + +from app.models.domain.users import UserInDB +from app.services.jwt import ( + ALGORITHM, + create_access_token_for_user, + create_jwt_token, + get_username_from_token, +) + + +def test_creating_jwt_token() -> None: + token = create_jwt_token( + jwt_content={"content": "payload"}, + secret_key="secret", + expires_delta=timedelta(minutes=1), + ) + parsed_payload = jwt.decode(token, "secret", algorithms=[ALGORITHM]) + + assert parsed_payload["content"] == "payload" + + +def test_creating_token_for_user(test_user: UserInDB) -> None: + token = create_access_token_for_user(user=test_user, secret_key="secret") + parsed_payload = jwt.decode(token, "secret", algorithms=[ALGORITHM]) + + assert parsed_payload["username"] == test_user.username + + +def test_retrieving_token_from_user(test_user: UserInDB) -> None: + token = create_access_token_for_user(user=test_user, secret_key="secret") + username = get_username_from_token(token, "secret") + assert username == test_user.username + + +def test_error_when_wrong_token() -> None: + with pytest.raises(ValueError): + get_username_from_token("asdf", "asdf") + + +def test_error_when_wrong_token_shape() -> None: + token = create_jwt_token( + jwt_content={"content": "payload"}, + secret_key="secret", + expires_delta=timedelta(minutes=1), + ) + with pytest.raises(ValueError): + get_username_from_token(token, "secret") diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000..4a7f73a --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,24 @@ +# Nuxt dev/build outputs +.output +.data +.nuxt +.nitro +.cache +dist + +# Node dependencies +node_modules + +# Logs +logs +*.log + +# Misc +.DS_Store +.fleet +.idea + +# Local env files +.env +.env.* +!.env.example diff --git a/frontend/.yarn/install-state.gz b/frontend/.yarn/install-state.gz new file mode 100644 index 0000000..b922c67 Binary files /dev/null and b/frontend/.yarn/install-state.gz differ diff --git a/frontend/.yarnrc.yml b/frontend/.yarnrc.yml new file mode 100644 index 0000000..3186f3f --- /dev/null +++ b/frontend/.yarnrc.yml @@ -0,0 +1 @@ +nodeLinker: node-modules diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000..25b5821 --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,75 @@ +# Nuxt Minimal Starter + +Look at the [Nuxt documentation](https://nuxt.com/docs/getting-started/introduction) to learn more. + +## Setup + +Make sure to install dependencies: + +```bash +# npm +npm install + +# pnpm +pnpm install + +# yarn +yarn install + +# bun +bun install +``` + +## Development Server + +Start the development server on `http://localhost:3000`: + +```bash +# npm +npm run dev + +# pnpm +pnpm dev + +# yarn +yarn dev + +# bun +bun run dev +``` + +## Production + +Build the application for production: + +```bash +# npm +npm run build + +# pnpm +pnpm build + +# yarn +yarn build + +# bun +bun run build +``` + +Locally preview production build: + +```bash +# npm +npm run preview + +# pnpm +pnpm preview + +# yarn +yarn preview + +# bun +bun run preview +``` + +Check out the [deployment documentation](https://nuxt.com/docs/getting-started/deployment) for more information. diff --git a/frontend/app/app.vue b/frontend/app/app.vue new file mode 100644 index 0000000..3418bd3 --- /dev/null +++ b/frontend/app/app.vue @@ -0,0 +1,87 @@ + + + + + diff --git a/frontend/app/assets/background/home-banner-1.png b/frontend/app/assets/background/home-banner-1.png new file mode 100644 index 0000000..979a54d Binary files /dev/null and b/frontend/app/assets/background/home-banner-1.png differ diff --git a/frontend/app/assets/background/home-banner.png b/frontend/app/assets/background/home-banner.png new file mode 100644 index 0000000..e1064b9 Binary files /dev/null and b/frontend/app/assets/background/home-banner.png differ diff --git a/frontend/app/assets/login/login-bg.jpg b/frontend/app/assets/login/login-bg.jpg new file mode 100644 index 0000000..a386f81 Binary files /dev/null and b/frontend/app/assets/login/login-bg.jpg differ diff --git a/frontend/app/assets/logo.png b/frontend/app/assets/logo.png new file mode 100644 index 0000000..9caf16a Binary files /dev/null and b/frontend/app/assets/logo.png differ diff --git a/frontend/app/assets/logoleft.png b/frontend/app/assets/logoleft.png new file mode 100644 index 0000000..595574e Binary files /dev/null and b/frontend/app/assets/logoleft.png differ diff --git a/frontend/app/components/AuthCard.vue b/frontend/app/components/AuthCard.vue new file mode 100644 index 0000000..7324e87 --- /dev/null +++ b/frontend/app/components/AuthCard.vue @@ -0,0 +1,373 @@ + + + + + diff --git a/frontend/app/components/ConfirmDialog.vue b/frontend/app/components/ConfirmDialog.vue new file mode 100644 index 0000000..94ca69f --- /dev/null +++ b/frontend/app/components/ConfirmDialog.vue @@ -0,0 +1,326 @@ + + + + + + diff --git a/frontend/app/components/FlashBanner.vue b/frontend/app/components/FlashBanner.vue new file mode 100644 index 0000000..ba1da27 --- /dev/null +++ b/frontend/app/components/FlashBanner.vue @@ -0,0 +1,51 @@ + + + + + diff --git a/frontend/app/components/RichEditor.vue b/frontend/app/components/RichEditor.vue new file mode 100644 index 0000000..be46552 --- /dev/null +++ b/frontend/app/components/RichEditor.vue @@ -0,0 +1,392 @@ + + + + + diff --git a/frontend/app/components/RightDrawer.vue b/frontend/app/components/RightDrawer.vue new file mode 100644 index 0000000..59f6dcd --- /dev/null +++ b/frontend/app/components/RightDrawer.vue @@ -0,0 +1,794 @@ + +