forked from HSE_team/BetterCallPraskovia
finalised backend
This commit is contained in:
parent
036741c0bf
commit
c20626c179
115
backend/alembic.ini
Normal file
115
backend/alembic.ini
Normal file
@ -0,0 +1,115 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
|
||||
85
backend/alembic/env.py
Normal file
85
backend/alembic/env.py
Normal file
@ -0,0 +1,85 @@
|
||||
"""
|
||||
Alembic environment configuration
|
||||
"""
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[1]))
|
||||
|
||||
from src.infrastructure.database.base import Base
|
||||
from src.infrastructure.database.models import *
|
||||
from src.shared.config import settings
|
||||
|
||||
config = context.config
|
||||
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
config.set_main_option("sqlalchemy.url", settings.database_url.replace("postgresql://", "postgresql+asyncpg://"))
|
||||
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine
|
||||
is acceptable here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
configuration = config.get_section(config.config_ini_section)
|
||||
configuration["sqlalchemy.url"] = settings.database_url.replace("postgresql://", "postgresql+asyncpg://")
|
||||
connectable = async_engine_from_config(
|
||||
configuration,
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
import asyncio
|
||||
asyncio.run(run_migrations_online())
|
||||
|
||||
25
backend/alembic/script.py.mako
Normal file
25
backend/alembic/script.py.mako
Normal file
@ -0,0 +1,25 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
||||
111
backend/alembic/versions/001_initial_migration.py
Normal file
111
backend/alembic/versions/001_initial_migration.py
Normal file
@ -0,0 +1,111 @@
|
||||
"""Initial migration
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2024-01-01 00:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
revision = '001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
'users',
|
||||
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('telegram_id', sa.String(), nullable=False),
|
||||
sa.Column('role', sa.String(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('user_id'),
|
||||
sa.UniqueConstraint('telegram_id')
|
||||
)
|
||||
op.create_index(op.f('ix_users_telegram_id'), 'users', ['telegram_id'], unique=True)
|
||||
|
||||
op.create_table(
|
||||
'collections',
|
||||
sa.Column('collection_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('owner_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('is_public', sa.Boolean(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['owner_id'], ['users.user_id'], ),
|
||||
sa.PrimaryKeyConstraint('collection_id')
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'documents',
|
||||
sa.Column('document_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('collection_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('title', sa.String(), nullable=False),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['collection_id'], ['collections.collection_id'], ),
|
||||
sa.PrimaryKeyConstraint('document_id')
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'embeddings',
|
||||
sa.Column('embedding_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('document_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('embedding', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('model_version', sa.String(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['document_id'], ['documents.document_id'], ),
|
||||
sa.PrimaryKeyConstraint('embedding_id')
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'conversations',
|
||||
sa.Column('conversation_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('collection_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['collection_id'], ['collections.collection_id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.user_id'], ),
|
||||
sa.PrimaryKeyConstraint('conversation_id')
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'messages',
|
||||
sa.Column('message_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('conversation_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('role', sa.String(), nullable=False),
|
||||
sa.Column('sources', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['conversation_id'], ['conversations.conversation_id'], ),
|
||||
sa.PrimaryKeyConstraint('message_id')
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'collection_access',
|
||||
sa.Column('access_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('collection_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['collection_id'], ['collections.collection_id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.user_id'], ),
|
||||
sa.PrimaryKeyConstraint('access_id'),
|
||||
sa.UniqueConstraint('user_id', 'collection_id', name='uq_user_collection')
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table('collection_access')
|
||||
op.drop_table('messages')
|
||||
op.drop_table('conversations')
|
||||
op.drop_table('embeddings')
|
||||
op.drop_table('documents')
|
||||
op.drop_table('collections')
|
||||
op.drop_index(op.f('ix_users_telegram_id'), table_name='users')
|
||||
op.drop_table('users')
|
||||
|
||||
83
backend/src/presentation/main.py
Normal file
83
backend/src/presentation/main.py
Normal file
@ -0,0 +1,83 @@
|
||||
"""
|
||||
Главный файл FastAPI приложения
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
|
||||
if '/app' not in sys.path:
|
||||
sys.path.insert(0, '/app')
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from contextlib import asynccontextmanager
|
||||
from dishka.integrations.fastapi import setup_dishka
|
||||
from dishka import Container
|
||||
from src.shared.config import settings
|
||||
from src.shared.exceptions import LawyerAIException
|
||||
from src.shared.di_container import create_container
|
||||
from src.presentation.middleware.error_handler import exception_handler
|
||||
from src.presentation.api.v1 import users, collections, documents, conversations, messages
|
||||
from src.infrastructure.database.base import engine, Base
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Управление жизненным циклом приложения"""
|
||||
container = create_container()
|
||||
setup_dishka(container, app)
|
||||
try:
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
except Exception as e:
|
||||
print(f"Примечание при создании таблиц: {e}")
|
||||
yield
|
||||
await container.close()
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title=settings.APP_NAME,
|
||||
description="API для системы ИИ-юриста",
|
||||
version="1.0.0",
|
||||
lifespan=lifespan
|
||||
)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=settings.CORS_ORIGINS,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.add_exception_handler(LawyerAIException, exception_handler)
|
||||
|
||||
app.include_router(users.router, prefix="/api/v1")
|
||||
app.include_router(collections.router, prefix="/api/v1")
|
||||
app.include_router(documents.router, prefix="/api/v1")
|
||||
app.include_router(conversations.router, prefix="/api/v1")
|
||||
app.include_router(messages.router, prefix="/api/v1")
|
||||
|
||||
try:
|
||||
from src.presentation.api.v1 import admin
|
||||
app.include_router(admin.router, prefix="/api/v1")
|
||||
except (ImportError, AttributeError) as e:
|
||||
print(f"Админ-панель не загружена: {e}")
|
||||
pass
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""Корневой эндпоинт"""
|
||||
return {
|
||||
"message": "Добро пожаловать в API ИИ-юриста",
|
||||
"version": "1.0.0",
|
||||
"docs": "/docs"
|
||||
}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Проверка здоровья приложения"""
|
||||
return {"status": "ok"}
|
||||
|
||||
4
backend/src/presentation/middleware/__init__.py
Normal file
4
backend/src/presentation/middleware/__init__.py
Normal file
@ -0,0 +1,4 @@
|
||||
"""
|
||||
Middleware
|
||||
"""
|
||||
|
||||
50
backend/src/presentation/middleware/auth_middleware.py
Normal file
50
backend/src/presentation/middleware/auth_middleware.py
Normal file
@ -0,0 +1,50 @@
|
||||
"""
|
||||
Middleware для авторизации через Telegram бота
|
||||
|
||||
Для Telegram бота авторизация простая:
|
||||
1. Бот получает user_id от Telegram в каждом сообщении
|
||||
2. Бот передает user_id на бэкенд в заголовке X-Telegram-ID
|
||||
3. Бэкенд идентифицирует пользователя по telegram_id
|
||||
"""
|
||||
from fastapi import HTTPException, Request
|
||||
from src.domain.entities.user import User
|
||||
from src.domain.repositories.user_repository import IUserRepository
|
||||
from src.domain.entities.user import UserRole
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
request: Request,
|
||||
user_repo: IUserRepository
|
||||
) -> User:
|
||||
"""
|
||||
Получить текущего пользователя по Telegram ID
|
||||
|
||||
Telegram бот передает user_id в заголовке X-Telegram-ID.
|
||||
Бэкенд идентифицирует пользователя по этому ID.
|
||||
|
||||
Args:
|
||||
request: FastAPI Request объект
|
||||
user_repo: Репозиторий пользователей
|
||||
|
||||
Returns:
|
||||
Пользователь
|
||||
|
||||
Raises:
|
||||
HTTPException: Если не указан X-Telegram-ID
|
||||
"""
|
||||
x_telegram_id = request.headers.get("X-Telegram-ID")
|
||||
|
||||
if not x_telegram_id:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Не указан X-Telegram-ID. Бот должен передавать user_id в заголовке."
|
||||
)
|
||||
|
||||
user = await user_repo.get_by_telegram_id(x_telegram_id)
|
||||
|
||||
if not user:
|
||||
user = User(telegram_id=x_telegram_id, role=UserRole.USER)
|
||||
user = await user_repo.create(user)
|
||||
|
||||
return user
|
||||
|
||||
36
backend/src/presentation/middleware/error_handler.py
Normal file
36
backend/src/presentation/middleware/error_handler.py
Normal file
@ -0,0 +1,36 @@
|
||||
"""
|
||||
Обработчик ошибок для FastAPI
|
||||
"""
|
||||
from fastapi import Request, status
|
||||
from fastapi.responses import JSONResponse
|
||||
from src.shared.exceptions import (
|
||||
LawyerAIException,
|
||||
NotFoundError,
|
||||
UnauthorizedError,
|
||||
ForbiddenError,
|
||||
ValidationError,
|
||||
DatabaseError
|
||||
)
|
||||
|
||||
|
||||
async def exception_handler(request: Request, exc: LawyerAIException) -> JSONResponse:
|
||||
"""Обработчик кастомных исключений"""
|
||||
status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
detail = str(exc)
|
||||
|
||||
if isinstance(exc, NotFoundError):
|
||||
status_code = status.HTTP_404_NOT_FOUND
|
||||
elif isinstance(exc, UnauthorizedError):
|
||||
status_code = status.HTTP_401_UNAUTHORIZED
|
||||
elif isinstance(exc, ForbiddenError):
|
||||
status_code = status.HTTP_403_FORBIDDEN
|
||||
elif isinstance(exc, ValidationError):
|
||||
status_code = status.HTTP_400_BAD_REQUEST
|
||||
elif isinstance(exc, DatabaseError):
|
||||
status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
|
||||
return JSONResponse(
|
||||
status_code=status_code,
|
||||
content={"detail": detail, "type": exc.__class__.__name__}
|
||||
)
|
||||
|
||||
143
backend/src/shared/di_container.py
Normal file
143
backend/src/shared/di_container.py
Normal file
@ -0,0 +1,143 @@
|
||||
"""
|
||||
DI контейнер на основе dishka
|
||||
"""
|
||||
from dishka import Container, Provider, Scope, provide
|
||||
from fastapi import Request
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from src.infrastructure.database.base import AsyncSessionLocal
|
||||
from src.infrastructure.repositories.postgresql.user_repository import PostgreSQLUserRepository
|
||||
from src.infrastructure.repositories.postgresql.collection_repository import PostgreSQLCollectionRepository
|
||||
from src.infrastructure.repositories.postgresql.document_repository import PostgreSQLDocumentRepository
|
||||
from src.infrastructure.repositories.postgresql.conversation_repository import PostgreSQLConversationRepository
|
||||
from src.infrastructure.repositories.postgresql.message_repository import PostgreSQLMessageRepository
|
||||
from src.infrastructure.repositories.postgresql.collection_access_repository import PostgreSQLCollectionAccessRepository
|
||||
from src.domain.repositories.user_repository import IUserRepository
|
||||
from src.domain.repositories.collection_repository import ICollectionRepository
|
||||
from src.domain.repositories.document_repository import IDocumentRepository
|
||||
from src.domain.repositories.conversation_repository import IConversationRepository
|
||||
from src.domain.repositories.message_repository import IMessageRepository
|
||||
from src.domain.repositories.collection_access_repository import ICollectionAccessRepository
|
||||
from src.infrastructure.external.yandex_ocr import YandexOCRService
|
||||
from src.infrastructure.external.deepseek_client import DeepSeekClient
|
||||
from src.application.services.document_parser_service import DocumentParserService
|
||||
from src.application.use_cases.user_use_cases import UserUseCases
|
||||
from src.application.use_cases.collection_use_cases import CollectionUseCases
|
||||
from src.application.use_cases.document_use_cases import DocumentUseCases
|
||||
from src.application.use_cases.conversation_use_cases import ConversationUseCases
|
||||
from src.application.use_cases.message_use_cases import MessageUseCases
|
||||
from src.domain.entities.user import User
|
||||
|
||||
|
||||
class DatabaseProvider(Provider):
|
||||
@provide(scope=Scope.REQUEST)
|
||||
@asynccontextmanager
|
||||
async def get_db(self) -> AsyncSession:
|
||||
async with AsyncSessionLocal() as session:
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
|
||||
class RepositoryProvider(Provider):
|
||||
@provide(scope=Scope.REQUEST)
|
||||
def get_user_repository(self, session: AsyncSession) -> IUserRepository:
|
||||
return PostgreSQLUserRepository(session)
|
||||
|
||||
@provide(scope=Scope.REQUEST)
|
||||
def get_collection_repository(self, session: AsyncSession) -> ICollectionRepository:
|
||||
return PostgreSQLCollectionRepository(session)
|
||||
|
||||
@provide(scope=Scope.REQUEST)
|
||||
def get_document_repository(self, session: AsyncSession) -> IDocumentRepository:
|
||||
return PostgreSQLDocumentRepository(session)
|
||||
|
||||
@provide(scope=Scope.REQUEST)
|
||||
def get_conversation_repository(self, session: AsyncSession) -> IConversationRepository:
|
||||
return PostgreSQLConversationRepository(session)
|
||||
|
||||
@provide(scope=Scope.REQUEST)
|
||||
def get_message_repository(self, session: AsyncSession) -> IMessageRepository:
|
||||
return PostgreSQLMessageRepository(session)
|
||||
|
||||
@provide(scope=Scope.REQUEST)
|
||||
def get_collection_access_repository(self, session: AsyncSession) -> ICollectionAccessRepository:
|
||||
return PostgreSQLCollectionAccessRepository(session)
|
||||
|
||||
|
||||
class ServiceProvider(Provider):
|
||||
@provide(scope=Scope.APP)
|
||||
def get_ocr_service(self) -> YandexOCRService:
|
||||
return YandexOCRService()
|
||||
|
||||
@provide(scope=Scope.APP)
|
||||
def get_deepseek_client(self) -> DeepSeekClient:
|
||||
return DeepSeekClient()
|
||||
|
||||
@provide(scope=Scope.APP)
|
||||
def get_parser_service(self, ocr_service: YandexOCRService) -> DocumentParserService:
|
||||
return DocumentParserService(ocr_service)
|
||||
|
||||
|
||||
class AuthProvider(Provider):
|
||||
@provide(scope=Scope.REQUEST)
|
||||
async def get_current_user(self, request: Request, user_repo: IUserRepository) -> User:
|
||||
from src.presentation.middleware.auth_middleware import get_current_user
|
||||
return await get_current_user(request, user_repo)
|
||||
|
||||
|
||||
class UseCaseProvider(Provider):
|
||||
@provide(scope=Scope.REQUEST)
|
||||
def get_user_use_cases(
|
||||
self,
|
||||
user_repo: IUserRepository
|
||||
) -> UserUseCases:
|
||||
return UserUseCases(user_repo)
|
||||
|
||||
@provide(scope=Scope.REQUEST)
|
||||
def get_collection_use_cases(
|
||||
self,
|
||||
collection_repo: ICollectionRepository,
|
||||
access_repo: ICollectionAccessRepository,
|
||||
user_repo: IUserRepository
|
||||
) -> CollectionUseCases:
|
||||
return CollectionUseCases(collection_repo, access_repo, user_repo)
|
||||
|
||||
@provide(scope=Scope.REQUEST)
|
||||
def get_document_use_cases(
|
||||
self,
|
||||
document_repo: IDocumentRepository,
|
||||
collection_repo: ICollectionRepository,
|
||||
parser_service: DocumentParserService
|
||||
) -> DocumentUseCases:
|
||||
return DocumentUseCases(document_repo, collection_repo, parser_service)
|
||||
|
||||
@provide(scope=Scope.REQUEST)
|
||||
def get_conversation_use_cases(
|
||||
self,
|
||||
conversation_repo: IConversationRepository,
|
||||
collection_repo: ICollectionRepository,
|
||||
access_repo: ICollectionAccessRepository
|
||||
) -> ConversationUseCases:
|
||||
return ConversationUseCases(conversation_repo, collection_repo, access_repo)
|
||||
|
||||
@provide(scope=Scope.REQUEST)
|
||||
def get_message_use_cases(
|
||||
self,
|
||||
message_repo: IMessageRepository,
|
||||
conversation_repo: IConversationRepository
|
||||
) -> MessageUseCases:
|
||||
return MessageUseCases(message_repo, conversation_repo)
|
||||
|
||||
|
||||
def create_container() -> Container:
|
||||
container = Container()
|
||||
container.add_provider(DatabaseProvider())
|
||||
container.add_provider(RepositoryProvider())
|
||||
container.add_provider(ServiceProvider())
|
||||
container.add_provider(AuthProvider())
|
||||
container.add_provider(UseCaseProvider())
|
||||
return container
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user