feat: implement core app architecture

- project structure with app/, db/, core/, bot/, factories/
- implemented logic in middlewares
- added full i18n support with FTL files
- created user model, repository
- initialized Alembic with base migration
- added bot, dispatcher, redis, session pool and i18n factories
- configured logger, enums, config loader
- added Dockerfile, Makefile, .env.example, and docker-compose.yml
This commit is contained in:
Ilay
2025-05-28 22:42:30 +05:00
parent 288456fd30
commit c848e3b01c
66 changed files with 1899 additions and 0 deletions

69
.env.example Normal file
View File

@@ -0,0 +1,69 @@
### BOT CONFIGURATION ###
# Telegram bot token
BOT_TOKEN=change_me
# Autogenerated if not set
BOT_SECRET_TOKEN=change_me
# Developer Telegram user ID
BOT_DEV_ID=change_me
# Domain used for webhook (e.g. example.com)
BOT_DOMAIN=change_me
## OPTIONAL ##
# BOT_HOST=0.0.0.0
# BOT_PORT=5000
# BOT_WEBHOOK_PORT=443
# BOT_RESET_WEBHOOK=true
# BOT_DROP_PENDING_UPDATES=false
# BOT_SETUP_COMMANDS=true
### DATABASE CONFIGURATION ###
DB_NAME=remnashop
DB_USER=remnashop
DB_PASSWORD=change_me
## OPTIONAL ##
# DB_HOST=remnashop-db
# DB_PORT=5432
### REDIS CONFIGURATION ###
## OPTIONAL ##
# REDIS_HOST=remnashop-redis
# REDIS_PORT=6379
# REDIS_NAME=0
# REDIS_USERNAME=remnashop
# REDIS_PASSWORD=password
### LOGGING CONFIGURATION ###
## OPTIONAL ##
# LOG_LEVEL=DEBUG
# LOG_FORMAT="%(asctime)s | %(name)s | %(levelname)s | %(message)s"
# LOG_ARCHIVE_FORMAT=zip
### I18N CONFIGURATION ###
## OPTIONAL ##
# I18N_LOCALES_DIR=app/assets/locales
# I18N_LOCALES=en,ru
# I18N_DEFAULT_LOCALE=en
### SQLALCHEMY CONFIGURATION ###
## OPTIONAL ##
# ALCHEMY_ECHO=false
# ALCHEMY_ECHO_POOL=false
# ALCHEMY_POOL_SIZE=25
# ALCHEMY_MAX_OVERFLOW=25
# ALCHEMY_POOL_TIMEOUT=10
# ALCHEMY_POOL_RECYCLE=3600

5
.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,5 @@
{
"recommendations": [
"ms-python.black-formatter"
],
}

7
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,7 @@
{
"[python]": {
"editor.defaultFormatter": "ms-python.black-formatter"
},
"black-formatter.args": ["--line-length", "100"],
"editor.rulers": [100]
}

12
Dockerfile Normal file
View File

@@ -0,0 +1,12 @@
FROM python:3.12-slim-bullseye
ENV PYTHONPATH=/
COPY pyproject.toml /
RUN pip install poetry \
&& poetry config virtualenvs.create false \
&& poetry install --no-interaction --no-ansi
COPY ./app /app
ENTRYPOINT ["sh", "-c", "poetry run alembic -c /app/db/alembic.ini upgrade head && uvicorn app.__main__:app --host 0.0.0.0 --port 5000"]

19
Makefile Normal file
View File

@@ -0,0 +1,19 @@
ALEMBIC_INI=app/db/alembic.ini
.PHONY: migration
migration:
ifndef message
$(error message is undefined. Use: make migration message="Your message")
endif
alembic -c $(ALEMBIC_INI) revision --autogenerate -m "$(message)"
.PHONY: migrate
migrate:
alembic -c $(ALEMBIC_INI) upgrade head
.PHONY: downgrade
downgrade:
ifndef rev
$(error rev is undefined. Use: make downgrade rev=<revision>)
endif
alembic -c $(ALEMBIC_INI) downgrade $(rev)

0
app/__init__.py Normal file
View File

79
app/__main__.py Normal file
View File

@@ -0,0 +1,79 @@
import logging
from contextlib import asynccontextmanager
from typing import AsyncGenerator, Optional
import uvicorn
from aiogram import Bot, Dispatcher
from aiogram.types import Update
from fastapi import FastAPI, Request
from starlette.middleware.cors import CORSMiddleware
from app.bot.commands import commands_delete, commands_setup
from app.bot.webhook import webhook_shutdown, webhook_startup
from app.core.config import AppConfig
from app.core.constants import HEADER_SECRET_TOKEN
from app.core.logger import setup_logging
from app.factories import create_bot, create_dispatcher
config = AppConfig.get()
setup_logging(config.logging)
logger = logging.getLogger(__name__)
@asynccontextmanager
async def lifespan(application: FastAPI) -> AsyncGenerator[None, None]:
logger.info("Starting application")
bot: Bot = create_bot(config.bot.token.get_secret_value())
dispatcher: Dispatcher = create_dispatcher(config)
application.state.bot = bot
application.state.dispatcher = dispatcher
await webhook_startup(bot, dispatcher, config)
await commands_setup(bot, config)
yield
await commands_delete(bot, config)
await webhook_shutdown(bot, config)
logger.info("Stopping application")
app = FastAPI(
lifespan=lifespan,
docs_url=None,
redoc_url=None,
openapi_url=None,
)
app.add_middleware(
CORSMiddleware,
allow_origins=config.origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
@app.post(config.bot.webhook_path)
async def webhook(request: Request, update: Update) -> Optional[dict]:
bot: Bot = request.app.state.bot
dispatcher: Dispatcher = request.app.state.dispatcher
secret_token = request.headers.get(HEADER_SECRET_TOKEN)
if not secret_token:
logger.error("Missing secret token")
return {"status": "error", "message": "Missing secret token"}
if secret_token != config.bot.secret_token.get_secret_value():
logger.error("Wrong secret token")
return {"status": "error", "message": "Wrong secret token"}
update = Update.model_validate(await request.json(), context={"bot": bot})
await dispatcher.feed_webhook_update(bot, update)
return {"ok": True}
if __name__ == "__main__":
uvicorn.run(app, host=config.bot.host, port=config.bot.port)

View File

@@ -0,0 +1 @@
button = button

View File

@@ -0,0 +1 @@
main_menu = main menu

View File

View File

View File

@@ -0,0 +1 @@
button = кнопка

View File

@@ -0,0 +1 @@
main_menu = главное меню

View File

View File

0
app/bot/__init__.py Normal file
View File

30
app/bot/commands.py Normal file
View File

@@ -0,0 +1,30 @@
import logging
from aiogram import Bot
from aiogram.types import BotCommandScopeAllPrivateChats
from app.core.config import AppConfig
from app.core.enums import Command
logger = logging.getLogger(__name__)
async def commands_setup(bot: Bot, config: AppConfig) -> None:
if config.bot.setup_commands is False:
return
commands = [Command.START.value]
if await bot.set_my_commands(commands=commands, scope=BotCommandScopeAllPrivateChats()):
logger.info("Bot commands successfully set")
else:
logger.error("Failed to set bot commands")
async def commands_delete(bot: Bot, config: AppConfig) -> None:
if config.bot.setup_commands is False:
return
if await bot.delete_my_commands(scope=BotCommandScopeAllPrivateChats()):
logger.info("Bot commands successfully deleted")
else:
logger.error("Failed to delete bot commands")

View File

@@ -0,0 +1,5 @@
from .is_private import IsPrivate
__all__ = [
"IsPrivate",
]

View File

@@ -0,0 +1,8 @@
from aiogram.enums import ChatType
from aiogram.filters import BaseFilter
from aiogram.types import Chat
class IsPrivate(BaseFilter):
async def __call__(self, event_chat: Chat) -> bool:
return event_chat.type == ChatType.PRIVATE

View File

@@ -0,0 +1,15 @@
from .base import EventTypedMiddleware
from .error import ErrorMiddleware
from .garbage import GarbageMiddleware
from .i18n import I18nMiddleware
from .throttling import ThrottlingMiddleware
from .user import UserMiddleware
__all__ = [
"EventTypedMiddleware",
"ErrorMiddleware",
"GarbageMiddleware",
"I18nMiddleware",
"ThrottlingMiddleware",
"UserMiddleware",
]

View File

@@ -0,0 +1,34 @@
import logging
from abc import ABC
from typing import ClassVar, Final
from aiogram import BaseMiddleware, Router
from app.core.enums import MiddlewareEventType
DEFAULT_UPDATE_TYPES: Final[list[MiddlewareEventType]] = [
MiddlewareEventType.MESSAGE,
MiddlewareEventType.CALLBACK_QUERY,
]
logger = logging.getLogger(__name__)
class EventTypedMiddleware(BaseMiddleware, ABC):
__event_types__: ClassVar[list[MiddlewareEventType]] = DEFAULT_UPDATE_TYPES
def setup_inner(self, router: Router) -> None:
for event_type in self.__event_types__:
router.observers[event_type].middleware(self)
logger.debug(
f"{self.__class__.__name__} set as inner middleware for: "
f"{', '.join(t.value for t in self.__event_types__)}"
)
def setup_outer(self, router: Router) -> None:
for event_type in self.__event_types__:
router.observers[event_type].outer_middleware(self)
logger.debug(
f"{self.__class__.__name__} set as outer middleware for: "
f"{', '.join(t.value for t in self.__event_types__)}"
)

View File

@@ -0,0 +1,59 @@
import logging
from typing import Any, Awaitable, Callable, Optional
from aiogram.exceptions import (
TelegramBadRequest,
TelegramForbiddenError,
TelegramNotFound,
)
from aiogram.types import ErrorEvent
from aiogram.types import User as AiogramUser
from aiogram.utils.formatting import Bold, Text
from app.core.enums import MiddlewareEventType
from .base import EventTypedMiddleware
logger = logging.getLogger(__name__)
class ErrorMiddleware(EventTypedMiddleware):
__event_types__ = [MiddlewareEventType.ERROR]
def __init__(self) -> None:
logger.debug("Error Middleware initialized")
async def __call__(
self,
handler: Callable[[ErrorEvent, dict[str, Any]], Awaitable[Any]],
event: ErrorEvent,
data: dict[str, Any],
) -> Any:
aiogram_user: Optional[AiogramUser]
if event.update.message.from_user:
aiogram_user = event.update.message.from_user
elif event.update.callback_query:
aiogram_user = event.update.callback_query.from_user
if isinstance(event.exception, TelegramForbiddenError):
logger.info(f"[User:{aiogram_user.id} ({aiogram_user.full_name})] Blocked the bot")
elif isinstance(event.exception, TelegramBadRequest):
logger.warning(f"[User:{aiogram_user.id} ({aiogram_user.full_name})] Bad request")
elif isinstance(event.exception, TelegramNotFound):
logger.warning(f"[User:{aiogram_user.id} ({aiogram_user.full_name})] Not found")
logger.exception(f"Update: {event.update}\nException: {event.exception}")
try:
text = Text(
Bold((type(event.exception).__name__)), f": {str(event.exception)[:1021]}..."
)
# TODO: send error details to developer
except TelegramBadRequest as exception:
logger.warning(f"Failed to send error details: {exception}")
except Exception as exception:
logger.error(f"Unexpected error in error handler: {exception}")
return await handler(event, data)

View File

@@ -0,0 +1,35 @@
import logging
from typing import Any, Awaitable, Callable, Optional
from aiogram.types import Message
from aiogram.types import User as AiogramUser
from app.core.enums import Command, MiddlewareEventType
from .base import EventTypedMiddleware
logger = logging.getLogger(__name__)
class GarbageMiddleware(EventTypedMiddleware):
__event_types__ = [MiddlewareEventType.MESSAGE]
def __init__(self) -> None:
logger.debug("Garbage Middleware initialized")
async def __call__(
self,
handler: Callable[[Message, dict[str, Any]], Awaitable[Any]],
event: Message,
data: dict[str, Any],
) -> Any:
aiogram_user: Optional[AiogramUser] = event.from_user
if aiogram_user is None:
return await handler(event, data)
if aiogram_user.id != event.bot.id and event.text != f"/{Command.START.value.command}":
await event.delete()
logger.debug(f"[User:{aiogram_user.id} ({aiogram_user.full_name})] Message deleted")
return await handler(event, data)

View File

@@ -0,0 +1,44 @@
import logging
from typing import Any, Awaitable, Callable, Optional, Union
from aiogram.types import CallbackQuery, Message
from fluent.runtime import FluentLocalization
from app.bot.widgets.i18n_format import I18N_FORMAT_KEY
from app.core.constants import USER_KEY
from app.core.enums import Locale, MiddlewareEventType
from app.db.models import User
from .base import EventTypedMiddleware
logger = logging.getLogger(__name__)
class I18nMiddleware(EventTypedMiddleware):
__event_types__ = [MiddlewareEventType.MESSAGE, MiddlewareEventType.CALLBACK_QUERY]
def __init__(
self,
locales: dict[Locale, FluentLocalization],
default_locale: Locale,
) -> None:
self.locales = locales
self.default_locale = default_locale
logger.debug("I18n Middleware initialized")
async def __call__(
self,
handler: Callable[[Union[Message, CallbackQuery], dict[str, Any]], Awaitable[Any]],
event: Union[Message, CallbackQuery],
data: dict[str, Any],
) -> Any:
user: Optional[User] = data.get(USER_KEY)
if user is None:
return await handler(event, data)
lang = user.language
logger.debug(f"[User:{user.telegram_id} ({user.name})] Using locale: {lang}")
locale = self.locales[lang]
data[I18N_FORMAT_KEY] = locale.format_value
return await handler(event, data)

View File

@@ -0,0 +1,67 @@
import logging
from typing import Any, Awaitable, Callable, MutableMapping, Optional, Union
from aiogram.dispatcher.flags import get_flag
from aiogram.types import CallbackQuery, Message
from aiogram.types import User as AiogramUser
from cachetools import TTLCache
from app.core.constants import THROTTLING_KEY
from app.core.enums import MiddlewareEventType
from .base import EventTypedMiddleware
logger = logging.getLogger(__name__)
DEFAULT_KEY = "default"
DEFAULT_TTL = 0.5
class ThrottlingMiddleware(EventTypedMiddleware):
__event_types__ = [MiddlewareEventType.MESSAGE, MiddlewareEventType.CALLBACK_QUERY]
def __init__(
self,
default_key: str = DEFAULT_KEY,
default_ttl: float = DEFAULT_TTL,
ttl_map: Optional[dict[str, float]] = None,
) -> None:
ttl_map = ttl_map or {}
if default_key not in ttl_map:
ttl_map[default_key] = default_ttl
self.default_key = default_key
self.caches: dict[str, MutableMapping[int, None]] = {}
for name, ttl in ttl_map.items():
self.caches[name] = TTLCache(maxsize=10_000, ttl=ttl)
logger.debug("Throttling Middleware initialized")
async def __call__(
self,
handler: Callable[[Union[Message, CallbackQuery], dict[str, Any]], Awaitable[Any]],
event: Union[Message, CallbackQuery],
data: dict[str, Any],
) -> Any:
if event.successful_payment: # TODO: check payment
logger.debug("SuccessfulPayment event skipping throttling")
return await handler(event, data)
aiogram_user: Optional[AiogramUser] = event.from_user
if aiogram_user is None:
return await handler(event, data)
key = get_flag(handler=data, name=THROTTLING_KEY, default=self.default_key)
cache = self.caches.get(key, self.caches[DEFAULT_KEY])
if aiogram_user.id in cache:
logger.warning(f"[User:{aiogram_user.id} ({aiogram_user.full_name})] Throttled")
return None
else:
logger.debug(f"[User:{aiogram_user.id} ({aiogram_user.full_name})] Not throttled")
cache[aiogram_user.id] = None
return await handler(event, data)

View File

@@ -0,0 +1,58 @@
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional
from aiogram.types import Message
from aiogram.types import User as AiogramUser
from aiogram_i18n import I18nMiddleware
from app.core.config import AppConfig
from app.core.constants import (
I18N_MIDDLEWARE_KEY,
SESSION_POOL_KEY,
USER_KEY,
USER_SERVICE_KEY,
)
from app.core.enums import MiddlewareEventType
from app.db.services import UserService
from .base import EventTypedMiddleware
if TYPE_CHECKING:
from app.db.models import User
logger = logging.getLogger(__name__)
class UserMiddleware(EventTypedMiddleware):
__event_types__ = [MiddlewareEventType.MESSAGE]
def __init__(self) -> None:
logger.debug("User Middleware initialized")
async def __call__(
self,
handler: Callable[[Message, dict[str, Any]], Awaitable[Any]],
event: Message,
data: dict[str, Any],
) -> Optional[Any]:
aiogram_user: Optional[AiogramUser] = event.from_user
if aiogram_user is None or aiogram_user.is_bot:
return await handler(event, data)
user_service = data[USER_SERVICE_KEY] = UserService(session_pool=data[SESSION_POOL_KEY])
user: Optional[User] = await user_service.get(telegram_id=aiogram_user.id)
if user is None:
i18n: I18nMiddleware = data[I18N_MIDDLEWARE_KEY]
config: AppConfig = data["config"]
is_dev = True if config.bot.dev_id == aiogram_user.id else False
user = await user_service.create(aiogram_user=aiogram_user, i18n=i18n, is_dev=is_dev)
if user.is_bot_blocked:
await user_service.set_bot_blocked(telegram_id=aiogram_user.id, blocked=False)
data[USER_KEY] = user
return await handler(event, data)

View File

@@ -0,0 +1,11 @@
from . import main_menu, profile
routers = [
main_menu.router,
main_menu.dialog,
profile.dialog,
]
__all__ = [
"routers",
]

View File

@@ -0,0 +1,7 @@
from .dialog import dialog
from .handler import router
__all__ = [
"dialog",
"router",
]

View File

@@ -0,0 +1,19 @@
from aiogram_dialog import Dialog, Window
from app.bot.states import MainMenuState, ProfileState
from app.bot.widgets import I18NFormat, IgnoreInput
dialog = Dialog(
Window(
I18NFormat("main_menu"),
# Row(
# Start(
# text=Const("Profile"),
# id="profile",
# state=ProfileState.profile,
# ),
# ),
IgnoreInput(),
state=MainMenuState.main_menu,
),
)

View File

@@ -0,0 +1,30 @@
import logging
from aiogram import Router
from aiogram.filters import CommandStart
from aiogram.types import Message
from aiogram_dialog import DialogManager, ShowMode, StartMode
from app.core.config import AppConfig
from app.db.models import User
from .dialog import MainMenuState
logger = logging.getLogger(__name__)
router = Router()
@router.message(CommandStart())
async def command_start_handler(
message: Message,
config: AppConfig,
dialog_manager: DialogManager,
user: User,
) -> None:
logger.info(f"[User:{user.telegram_id} ({user.name})] Opened main menu")
dialog_manager.show_mode = ShowMode.DELETE_AND_SEND
await dialog_manager.start(
MainMenuState.main_menu,
mode=StartMode.RESET_STACK,
)

View File

@@ -0,0 +1,5 @@
from .dialog import dialog
__all__ = [
"dialog",
]

View File

@@ -0,0 +1,23 @@
from aiogram.fsm.state import State, StatesGroup
from aiogram.types import CallbackQuery
from aiogram_dialog import Dialog, DialogManager, ShowMode, Window
from aiogram_dialog.widgets.kbd import Back, Button, Row, Start, SwitchTo
from aiogram_dialog.widgets.text import Const, Format
from app.bot.states import MainMenuState, ProfileState
from app.bot.widgets import IgnoreInput
dialog = Dialog(
Window(
Const("Profile"),
Row(
Start(
text=Const("Back to Main Menu"),
id="main_menu",
state=MainMenuState.main_menu,
),
),
IgnoreInput(),
state=ProfileState.profile,
)
)

9
app/bot/states.py Normal file
View File

@@ -0,0 +1,9 @@
from aiogram.fsm.state import State, StatesGroup
class MainMenuState(StatesGroup):
main_menu = State()
class ProfileState(StatesGroup):
profile = State()

30
app/bot/webhook.py Normal file
View File

@@ -0,0 +1,30 @@
import logging
from aiogram import Bot, Dispatcher
from app.core.config import AppConfig
logger = logging.getLogger(__name__)
async def webhook_startup(bot: Bot, dispatcher: Dispatcher, config: AppConfig) -> None:
url = config.bot.webhook_url
if await bot.set_webhook(
url=url.get_secret_value(),
allowed_updates=dispatcher.resolve_used_update_types(),
secret_token=config.bot.secret_token.get_secret_value(),
drop_pending_updates=config.bot.drop_pending_updates,
):
logger.info(f"Bot webhook set successfully")
logger.debug(f"Webhook url: '{url.get_secret_value()}'")
else:
logger.error(f"Failed to set bot webhook")
async def webhook_shutdown(bot: Bot, config: AppConfig) -> None:
if not config.bot.reset_webhook:
return
if await bot.delete_webhook():
logger.info("Bot webhook deleted successfully")
else:
logger.error("Failed to delete bot webhook")

View File

@@ -0,0 +1,7 @@
from .i18n_format import I18NFormat
from .ignore_input import IgnoreInput
__all__ = [
"I18NFormat",
"IgnoreInput",
]

View File

@@ -0,0 +1,29 @@
from typing import Any, Protocol
from aiogram_dialog.api.protocols import DialogManager
from aiogram_dialog.widgets.common import WhenCondition
from aiogram_dialog.widgets.text import Text
from app.core.constants import I18N_FORMAT_KEY
class Values(Protocol):
def __getitem__(self, item: Any) -> Any:
raise NotImplementedError
def default_format_text(text: str, data: Values) -> str:
return text.format_map(data)
class I18NFormat(Text):
def __init__(self, text: str, when: WhenCondition = None) -> None:
super().__init__(when)
self.text = text
async def _render_text(self, data: dict, manager: DialogManager) -> str:
format_text = manager.middleware_data.get(
I18N_FORMAT_KEY,
default_format_text,
)
return format_text(self.text, data)

View File

@@ -0,0 +1,14 @@
from aiogram.types import Message
from aiogram_dialog import DialogManager, DialogProtocol, ShowMode
from aiogram_dialog.widgets.input import BaseInput
class IgnoreInput(BaseInput):
async def process_message(
self,
message: Message,
dialog: DialogProtocol,
dialog_manager: DialogManager,
) -> bool:
dialog_manager.show_mode = ShowMode.NO_UPDATE
return True

0
app/core/__init__.py Normal file
View File

143
app/core/config.py Normal file
View File

@@ -0,0 +1,143 @@
import re
import secrets
from functools import lru_cache
from pathlib import Path
from typing import Optional, Self
from pydantic import Field, Secret, SecretStr, field_validator
from pydantic_settings import BaseSettings, SettingsConfigDict
from app.core.constants import API_V1, WEBHOOK_PATH
from app.core.enums import ArchiveFormat, Locale, LogLevel
BASE_DIR = Path(__file__).resolve().parents[2]
DEFAULT_ASSETS_DIR = BASE_DIR / "app" / "assets"
DEFAULT_LOCALES_DIR = DEFAULT_ASSETS_DIR / "locales"
DEFAULT_I18N_LOCALES = [Locale.EN, Locale.RU]
DEFAULT_I18N_LOCALE = Locale.EN
DEFAULT_BOT_HOST = "127.0.0.1"
DEFAULT_BOT_PORT = 5000
DEFAULT_BOT_WEBHOOK_PORT = 443
DEFAULT_BOT_RESET_WEBHOOK = True
DEFAULT_BOT_DROP_PENDING_UPDATES = False
DEFAULT_BOT_SETUP_COMMANDS = True
DEFAULT_DB_HOST = "remnashop-db"
DEFAULT_DB_PORT = 5432
DEFAULT_DB_NAME = "remnashop"
DEFAULT_DB_USER = "remnashop"
DEFAULT_REDIS_HOST = "remnashop-redis"
DEFAULT_REDIS_PORT = 6379
DEFAULT_REDIS_NAME = "0"
DEFAULT_LOG_LEVEL = LogLevel.DEBUG
DEFAULT_LOG_FORMAT = "%(asctime)s | %(name)s | %(levelname)s | %(message)s"
DEFAULT_LOG_ARCHIVE_FORMAT = ArchiveFormat.ZIP
class BotConfig(BaseSettings, env_prefix="BOT_"):
token: SecretStr
secret_token: SecretStr = Field(default_factory=lambda: SecretStr(secrets.token_hex()))
dev_id: int
domain: Secret[str]
host: str = DEFAULT_BOT_HOST
port: int = DEFAULT_BOT_PORT
webhook_port: int = DEFAULT_BOT_WEBHOOK_PORT
reset_webhook: bool = DEFAULT_BOT_RESET_WEBHOOK
drop_pending_updates: bool = DEFAULT_BOT_DROP_PENDING_UPDATES
setup_commands: bool = DEFAULT_BOT_SETUP_COMMANDS
@field_validator("domain")
@classmethod
def validate_domain(cls, field: Secret[str]) -> Secret[str]:
DOMAIN_REGEX = r"^(?:[a-zA-Z0-9-]+\.)+[a-zA-Z]{2,}$"
domain = field.get_secret_value()
if not domain:
raise ValueError("Domain cannot be empty")
if not re.match(DOMAIN_REGEX, domain):
raise ValueError("Invalid domain format")
return field
@property
def webhook_path(self) -> str:
return f"{API_V1}{WEBHOOK_PATH}"
@property
def webhook_url(self) -> SecretStr:
url = f"https://{self.domain.get_secret_value()}:{self.webhook_port}{self.webhook_path}"
return SecretStr(url)
class DatabaseConfig(BaseSettings, env_prefix="DB_"):
host: str = DEFAULT_DB_HOST
port: int = DEFAULT_DB_PORT
name: str = DEFAULT_DB_NAME
user: str = DEFAULT_DB_USER
password: Secret[str]
def dsn(self, scheme: str = "postgresql+asyncpg") -> str:
return (
f"{scheme}://{self.user}:{self.password.get_secret_value()}"
f"@{self.host}:{self.port}/{self.name}"
)
class RedisConfig(BaseSettings, env_prefix="REDIS_"):
host: str = DEFAULT_REDIS_HOST
port: int = DEFAULT_REDIS_PORT
name: str = DEFAULT_REDIS_NAME
username: Optional[str] = None
password: Optional[SecretStr] = None
def dsn(self, scheme: str = "redis") -> str:
if self.username and self.password:
return (
f"{scheme}://{self.username}:{self.password.get_secret_value()}"
f"@{self.host}:{self.port}/{self.name}"
)
return f"{scheme}://{self.host}:{self.port}/{self.name}"
class LoggingConfig(BaseSettings, env_prefix="LOG_"):
level: LogLevel = DEFAULT_LOG_LEVEL
format: str = DEFAULT_LOG_FORMAT
archive_format: ArchiveFormat = DEFAULT_LOG_ARCHIVE_FORMAT
class I18nConfig(BaseSettings, env_prefix="I18N_"):
locales_dir: Path = DEFAULT_LOCALES_DIR
locales: list[Locale] = DEFAULT_I18N_LOCALES
default_locale: Locale = DEFAULT_I18N_LOCALE
class SQLAlchemyConfig(BaseSettings, env_prefix="ALCHEMY_"): # TODO
echo: bool = False
echo_pool: bool = False
pool_size: int = 25
max_overflow: int = 25
pool_timeout: int = 10
pool_recycle: int = 3600
class AppConfig(BaseSettings):
bot: BotConfig = Field(default_factory=BotConfig)
db: DatabaseConfig = Field(default_factory=DatabaseConfig)
redis: RedisConfig = Field(default_factory=RedisConfig)
logging: LoggingConfig = Field(default_factory=LoggingConfig)
i18n: I18nConfig = Field(default_factory=I18nConfig)
alchemy: SQLAlchemyConfig = Field(default_factory=SQLAlchemyConfig)
origins: list[str] = [] # TODO:
model_config = SettingsConfigDict(
extra="ignore",
env_file=BASE_DIR / ".env",
env_file_encoding="utf-8",
)
@classmethod
@lru_cache
def get(cls) -> Self:
return cls()

23
app/core/constants.py Normal file
View File

@@ -0,0 +1,23 @@
# Constants for the application
from datetime import timezone
API_V1: str = "/api/v1"
WEBHOOK_PATH: str = "/webhook"
HEADER_SECRET_TOKEN: str = "x-telegram-bot-api-secret-token"
TIMEZONE = timezone.utc
# Resource file names for i18n
RESOURCE_MESSAGES = "messages.ftl"
RESOURCE_BUTTONS = "buttons.ftl"
RESOURCE_NOTIFICATIONS = "notifications.ftl"
RESOURCE_POPUPS = "popups.ftl"
RESOURCE_I18N = [RESOURCE_MESSAGES, RESOURCE_BUTTONS, RESOURCE_NOTIFICATIONS, RESOURCE_POPUPS]
# Keys for aiogram data
USER_KEY = "user"
USER_SERVICE_KEY = "user_service"
THROTTLING_KEY = "throttling_key"
I18N_MIDDLEWARE_KEY = "i18n_middleware"
SESSION_POOL_KEY = "session_pool"
I18N_FORMAT_KEY = "aiogd_i18n_format"
CONFIG_KEY = "config"

92
app/core/enums.py Normal file
View File

@@ -0,0 +1,92 @@
from enum import Enum, StrEnum, auto
from aiogram.types import BotCommand
class UserRole(StrEnum):
ADMIN = auto()
USER = auto()
class SubscriptionStatus(StrEnum):
pass
class PaymentMethod(StrEnum):
pass
class Command(Enum):
START = BotCommand(command="start", description="Restart bot")
HELP = BotCommand(command="help", description="Show help")
class Locale(StrEnum):
AR = auto() # Arabic
AZ = auto() # Azerbaijani
BE = auto() # Belarusian
CS = auto() # Czech
DE = auto() # German
EN = auto() # English
ES = auto() # Spanish
FA = auto() # Persian
FR = auto() # French
HE = auto() # Hebrew
HI = auto() # Hindi
ID = auto() # Indonesian
IT = auto() # Italian
JA = auto() # Japanese
KK = auto() # Kazakh
KO = auto() # Korean
MS = auto() # Malay
NL = auto() # Dutch
PL = auto() # Polish
PT = auto() # Portuguese
RO = auto() # Romanian
RU = auto() # Russian
SR = auto() # Serbian
TR = auto() # Turkish
UK = auto() # Ukrainian
UZ = auto() # Uzbek
VI = auto() # Vietnamese
class MiddlewareEventType(StrEnum): # https://docs.aiogram.dev/en/latest/api/types/update.html
UPDATE = auto()
MESSAGE = auto()
EDITED_MESSAGE = auto()
CHANNEL_POST = auto()
EDITED_CHANNEL_POST = auto()
BUSINESS_CONNECTION = auto()
BUSINESS_MESSAGE = auto()
EDITED_BUSINESS_MESSAGE = auto()
DELETED_BUSINESS_MESSAGES = auto()
MESSAGE_REACTION = auto()
MESSAGE_REACTION_COUNT = auto()
INLINE_QUERY = auto()
CHOSEN_INLINE_RESULT = auto()
CALLBACK_QUERY = auto()
SHIPPING_QUERY = auto()
PRE_CHECKOUT_QUERY = auto()
PURCHASED_PAID_MEDIA = auto()
POLL = auto()
POLL_ANSWER = auto()
MY_CHAT_MEMBER = auto()
CHAT_MEMBER = auto()
CHAT_JOIN_REQUEST = auto()
CHAT_BOOST = auto()
REMOVED_CHAT_BOOST = auto()
ERROR = auto()
class LogLevel(StrEnum):
INFO = auto()
DEBUG = auto()
WARNING = auto()
ERROR = auto()
CRITICAL = auto()
class ArchiveFormat(StrEnum):
ZIP = auto()
GZ = auto()

130
app/core/logger.py Normal file
View File

@@ -0,0 +1,130 @@
import logging
import os
import tarfile
import time
import zipfile
from datetime import datetime
from logging.handlers import TimedRotatingFileHandler
from app.core.config import LoggingConfig
from app.core.constants import TIMEZONE
from app.core.enums import ArchiveFormat
LOG_DIR = "logs"
LOG_FILENAME = "app.log"
LOG_WHEN = "midnight"
LOG_INTERVAL = 1
LOG_ENCODING = "utf-8"
logger = logging.getLogger(__name__)
class UTCFormatter(logging.Formatter):
converter = time.gmtime
class ArchiveRotatingFileHandler(TimedRotatingFileHandler):
def __init__(
self,
filename,
when="h",
interval=1,
backupCount=0,
encoding=None,
delay=False,
utc=True,
atTime=None,
errors=None,
archive_format=ArchiveFormat.ZIP.value,
):
super().__init__(
filename, when, interval, backupCount, encoding, delay, utc, atTime, errors
)
self.archive_format = archive_format
logger.debug(f"Initialized with format: {self.archive_format}")
def doRollover(self) -> None:
super().doRollover()
timestamp = datetime.now(TIMEZONE).strftime("%Y-%m-%d_%H-%M-%S")
dir_name = os.path.dirname(self.baseFilename)
archive_name = os.path.join(dir_name, f"{timestamp}.{self.archive_format}")
self._archive_log_file(archive_name)
self._remove_old_logs()
def _archive_log_file(self, archive_name: str) -> None:
logger.info(f"Archiving {self.baseFilename} to {archive_name}")
if os.path.exists(self.baseFilename):
if self.archive_format == ArchiveFormat.ZIP.value:
self._archive_to_zip(archive_name)
elif self.archive_format == ArchiveFormat.GZ.value:
self._archive_to_gz(archive_name)
else:
logger.warning(f"Log file {self.baseFilename} does not exist, skipping archive")
def _archive_to_zip(self, archive_name: str) -> None:
log = self.getFilesToDelete()[0]
new_log_name = self._get_log_filename(archive_name)
with zipfile.ZipFile(archive_name, "w", zipfile.ZIP_DEFLATED) as archive:
archive.write(filename=log, arcname=new_log_name)
def _archive_to_gz(self, archive_name: str) -> None:
log = self.getFilesToDelete()[0]
new_log_name = self._get_log_filename(archive_name)
with tarfile.open(archive_name, "w:gz") as archive:
archive.add(name=log, arcname=new_log_name)
def _get_log_filename(self, archive_name: str) -> str:
return os.path.splitext(os.path.basename(archive_name))[0] + ".log"
def _remove_old_logs(self) -> None:
files_to_delete = self.getFilesToDelete()
logger.debug(f"Removing old log files: {files_to_delete}")
for file in files_to_delete:
if os.path.exists(file):
try:
os.remove(file)
logger.debug(f"Successfully deleted old log file: {file}")
except Exception as exception:
logger.error(f"Error deleting {file}: {exception}")
def setup_logging(config: LoggingConfig) -> None:
os.makedirs(LOG_DIR, exist_ok=True)
log_file = os.path.join(LOG_DIR, LOG_FILENAME)
formatter = UTCFormatter(config.format)
file_handler = ArchiveRotatingFileHandler(
filename=log_file,
when=LOG_WHEN,
interval=LOG_INTERVAL,
encoding=LOG_ENCODING,
archive_format=config.archive_format.value,
)
file_handler.setFormatter(formatter)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
logging.basicConfig(
level=getattr(logging, config.level.value.upper(), logging.INFO),
handlers=[file_handler, stream_handler],
)
logger.debug(
f"Logging configuration: level={config.level.value}, "
f"format={config.format}, archive_format={config.archive_format.value}"
)
# Suppresses logs to avoid unnecessary output
aiogram_logger = logging.getLogger("aiogram.event")
aiogram_logger.setLevel(logging.CRITICAL)
aiogram_dialog_logger = logging.getLogger("aiogram_dialog")
aiogram_dialog_logger.setLevel(logging.DEBUG)
asyncio_logger = logging.getLogger("asyncio")
asyncio_logger.setLevel(logging.WARNING)

9
app/db/__init__.py Normal file
View File

@@ -0,0 +1,9 @@
from .context import SQLSessionContext
from .repositories import Repository
from .uow import UoW
__all__ = [
"SQLSessionContext",
"Repository",
"UoW",
]

119
app/db/alembic.ini Normal file
View File

@@ -0,0 +1,119 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
# Use forward slashes (/) also on windows to provide an os agnostic path
script_location = app/db/migrations
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
# version_path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
version_path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

35
app/db/context.py Normal file
View File

@@ -0,0 +1,35 @@
import asyncio
from types import TracebackType
from typing import Optional
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
from .repositories import Repository
from .uow import UoW
class SQLSessionContext:
_session_pool: async_sessionmaker[AsyncSession]
_session: Optional[AsyncSession]
__slots__ = ("_session_pool", "_session")
def __init__(self, session_pool: async_sessionmaker[AsyncSession]) -> None:
self._session_pool = session_pool
self._session = None
async def __aenter__(self) -> tuple[Repository, UoW]:
self._session = await self._session_pool().__aenter__()
return Repository(session=self._session), UoW(session=self._session)
async def __aexit__(
self,
exc_type: Optional[type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
if self._session is None:
return
task: asyncio.Task[None] = asyncio.create_task(self._session.close())
await asyncio.shield(task)
self._session = None

71
app/db/migrations/env.py Normal file
View File

@@ -0,0 +1,71 @@
import asyncio
from logging.config import fileConfig
from alembic import context
from alembic.script import ScriptDirectory
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from app.core.config import AppConfig
from app.db.models import Base
config = context.config
db_config = AppConfig.get().db
if config.config_file_name is not None:
fileConfig(config.config_file_name)
target_metadata = Base.metadata
def process_revision_directives(context, revision, directives):
migration_script = directives[0]
head_revision = ScriptDirectory.from_config(context.config).get_current_head()
if head_revision is None:
new_rev_id = 1
else:
last_rev_id = int(head_revision.lstrip("0"))
new_rev_id = last_rev_id + 1
migration_script.rev_id = f"{new_rev_id:04}"
def run_migrations_offline() -> None:
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
process_revision_directives=process_revision_directives,
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(
connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
)
with context.begin_transaction():
context.run_migrations()
async def run_migrations_online() -> None:
connectable: AsyncEngine = create_async_engine(url=db_config.dsn())
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
if context.is_offline_mode():
run_migrations_offline()
else:
asyncio.run(run_migrations_online())

View File

@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,46 @@
"""initial
Revision ID: 0001
Revises:
Create Date: 2025-05-28 07:11:51.154866
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '0001'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('telegram_id', sa.BigInteger(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('role', sa.Enum('ADMIN', 'USER', name='userrole'), nullable=False),
sa.Column('language', sa.String(), nullable=False),
sa.Column('balance', sa.Numeric(precision=20, scale=8), nullable=False),
sa.Column('personal_discount', sa.Float(), nullable=False),
sa.Column('purchase_discount', sa.Float(), nullable=False),
sa.Column('is_blocked', sa.Boolean(), nullable=False),
sa.Column('is_bot_blocked', sa.Boolean(), nullable=False),
sa.Column('is_trial_used', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('telegram_id')
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('users')
# ### end Alembic commands ###

View File

@@ -0,0 +1,7 @@
from .base import Base
from .user import User
__all__ = [
"Base",
"User",
]

5
app/db/models/base.py Normal file
View File

@@ -0,0 +1,5 @@
from sqlalchemy.orm import DeclarativeBase
class Base(DeclarativeBase):
pass

39
app/db/models/user.py Normal file
View File

@@ -0,0 +1,39 @@
from datetime import datetime
from decimal import Decimal
from sqlalchemy import TIMESTAMP, BigInteger, Boolean, Enum, Float, Numeric, String
from sqlalchemy.orm import Mapped, mapped_column
from app.core.constants import TIMEZONE
from app.core.enums import UserRole
from .base import Base
class User(Base):
__tablename__ = "users"
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
telegram_id: Mapped[int] = mapped_column(BigInteger, unique=True, nullable=False)
name: Mapped[str] = mapped_column(String, nullable=False)
role: Mapped[UserRole] = mapped_column(Enum(UserRole), nullable=False, default=UserRole.USER)
language: Mapped[str] = mapped_column(String, nullable=False)
balance: Mapped[Decimal] = mapped_column(Numeric(20, 8), nullable=False, default=0)
personal_discount: Mapped[float] = mapped_column(Float, default=0.0, nullable=False)
purchase_discount: Mapped[float] = mapped_column(Float, default=0.0, nullable=False)
is_blocked: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
is_bot_blocked: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
is_trial_used: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
created_at: Mapped[datetime] = mapped_column(
TIMESTAMP(timezone=True), nullable=False, default=lambda: datetime.now(TIMEZONE)
)
updated_at: Mapped[datetime] = mapped_column(
TIMESTAMP(timezone=True),
nullable=False,
default=lambda: datetime.now(TIMEZONE),
onupdate=lambda: datetime.now(TIMEZONE),
)

View File

@@ -0,0 +1,7 @@
from .general import Repository
from .users import UsersRepository
__all__ = [
"Repository",
"UsersRepository",
]

View File

@@ -0,0 +1,68 @@
from __future__ import annotations
from typing import Any, Optional, TypeVar, Union, cast
from sqlalchemy import ColumnExpressionArgument, delete, select, update
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import InstrumentedAttribute
from ..uow import UoW
T = TypeVar("T", bound=Any)
ColumnClauseType = Union[
type[T],
InstrumentedAttribute[T],
]
class BaseRepository:
session: AsyncSession
uow: UoW
def __init__(self, session: AsyncSession) -> None:
self.session = session
self.uow = UoW(session=session)
async def _get(
self,
model: ColumnClauseType[T],
*conditions: ColumnExpressionArgument[Any],
) -> Optional[T]:
return cast(Optional[T], await self.session.scalar(select(model).where(*conditions)))
async def _get_many(
self,
model: ColumnClauseType[T],
*conditions: ColumnExpressionArgument[Any],
) -> list[T]:
return list(await self.session.scalars(select(model).where(*conditions)))
async def _update(
self,
model: ColumnClauseType[T],
conditions: list[ColumnExpressionArgument[Any]],
load_result: bool = True,
**kwargs: Any,
) -> Optional[T]:
if not kwargs:
if not load_result:
return None
return cast(Optional[T], await self._get(model, *conditions))
query = update(model).where(*conditions).values(**kwargs)
if load_result:
query = query.returning(model)
result = await self.session.execute(query)
await self.session.commit()
return result.scalar_one_or_none() if load_result else None
async def _delete(
self,
model: ColumnClauseType[T],
*conditions: ColumnExpressionArgument[Any],
) -> bool:
result = await self.session.execute(delete(model).where(*conditions))
await self.session.commit()
return cast(bool, result.rowcount > 0)

View File

@@ -0,0 +1,14 @@
from __future__ import annotations
from sqlalchemy.ext.asyncio import AsyncSession
from .base import BaseRepository
from .users import UsersRepository
class Repository(BaseRepository):
users: UsersRepository
def __init__(self, session: AsyncSession) -> None:
super().__init__(session=session)
self.users = UsersRepository(session=session)

View File

@@ -0,0 +1,20 @@
from typing import Any, Optional
from ..models import User
from .base import BaseRepository
class UsersRepository(BaseRepository):
async def get(self, telegram_id: int) -> Optional[User]:
return await self._get(User, User.telegram_id == telegram_id)
async def update(self, telegram_id: int, **kwargs: Any) -> Optional[User]:
return await self._update(
model=User,
conditions=[User.telegram_id == telegram_id],
load_result=False,
**kwargs,
)
async def delete(self, telegram_id: int) -> bool:
return await self._delete(User, User.telegram_id == telegram_id)

View File

@@ -0,0 +1,5 @@
from .user import UserService
__all__ = [
"UserService",
]

67
app/db/services/user.py Normal file
View File

@@ -0,0 +1,67 @@
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional
from aiogram.types import User as AiogramUser
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
from app.core.enums import UserRole
from .. import SQLSessionContext
from ..models import User
if TYPE_CHECKING:
from app.bot.middlewares import I18nMiddleware
logger = logging.getLogger(__name__)
class UserService:
session_pool: async_sessionmaker[AsyncSession]
def __init__(self, session_pool: async_sessionmaker[AsyncSession]) -> None:
self.session_pool = session_pool
async def create(
self,
aiogram_user: AiogramUser,
i18n: I18nMiddleware,
is_dev: bool = False,
) -> User:
async with SQLSessionContext(self.session_pool) as (repository, uow):
user = User(
telegram_id=aiogram_user.id,
name=aiogram_user.full_name,
language=(
aiogram_user.language_code
if aiogram_user.language_code in i18n.locales
else i18n.default_locale
),
role=UserRole.ADMIN if is_dev else UserRole.USER,
)
await uow.commit(user)
logger.info(f"[User:{user.telegram_id} ({user.name})] Created in database")
return user
async def _get(
self,
getter: Callable[[Any], Awaitable[Optional[User]]],
key: Any,
) -> Optional[User]:
return await getter(key)
async def get(self, telegram_id: int) -> Optional[User]:
async with SQLSessionContext(self.session_pool) as (repository, uow):
return await self._get(repository.users.get, telegram_id)
async def update(self, user: User, **kwargs: Any) -> None:
for key, value in kwargs.items():
setattr(user, key, value)
async with SQLSessionContext(self.session_pool) as (repository, uow):
await repository.users.update(telegram_id=user.telegram_id, **kwargs)
async def set_bot_blocked(self, user: User, blocked: bool) -> None:
async with SQLSessionContext(self.session_pool) as (repository, uow):
await repository.users.update(telegram_id=user.telegram_id, is_bot_blocked=blocked)
logger.info(f"[User:{user.telegram_id} ({user.name})] Set is_bot_blocked -> {blocked}")

25
app/db/uow.py Normal file
View File

@@ -0,0 +1,25 @@
from sqlalchemy.ext.asyncio import AsyncSession
from .models import Base
class UoW:
session: AsyncSession
__slots__ = ("session",)
def __init__(self, session: AsyncSession) -> None:
self.session = session
async def commit(self, *instances: Base) -> None:
self.session.add_all(instances)
await self.session.commit()
async def merge(self, *instances: Base) -> None:
for instance in instances:
await self.session.merge(instance)
async def delete(self, *instances: Base) -> None:
for instance in instances:
await self.session.delete(instance)
await self.session.commit()

13
app/factories/__init__.py Normal file
View File

@@ -0,0 +1,13 @@
from .bot import create_bot
from .dispatcher import create_dispatcher
from .i18n import create_i18n_middleware
from .redis import create_redis
from .session_pool import create_session_pool
__all__ = [
"create_bot",
"create_dispatcher",
"create_i18n_middleware",
"create_redis",
"create_session_pool",
]

10
app/factories/bot.py Normal file
View File

@@ -0,0 +1,10 @@
from aiogram import Bot
from aiogram.client.default import DefaultBotProperties
from aiogram.enums import ParseMode
def create_bot(token: str) -> Bot:
return Bot(
token=token,
default=DefaultBotProperties(parse_mode=ParseMode.HTML),
)

View File

@@ -0,0 +1,64 @@
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from aiogram import Dispatcher
from aiogram.fsm.storage.base import DefaultKeyBuilder
from aiogram.fsm.storage.redis import RedisStorage
from aiogram_dialog import setup_dialogs
from redis.asyncio import Redis
from app.bot.filters import IsPrivate
from app.bot.middlewares import (
ErrorMiddleware,
GarbageMiddleware,
I18nMiddleware,
ThrottlingMiddleware,
UserMiddleware,
)
from app.bot.routers import routers
from .i18n import create_i18n_middleware
from .redis import create_redis
from .session_pool import create_session_pool
if TYPE_CHECKING:
from app.core.config import AppConfig
logger = logging.getLogger(__name__)
def create_dispatcher(config: AppConfig) -> Dispatcher:
key_builder = DefaultKeyBuilder(with_destiny=True)
redis: Redis = create_redis(url=config.redis.dsn())
error_middleware = ErrorMiddleware()
user_middleware = UserMiddleware()
i18n_middleware: I18nMiddleware = create_i18n_middleware(config)
garbage_middleware = GarbageMiddleware()
throttling_middleware = ThrottlingMiddleware()
dispatcher = Dispatcher(
storage=RedisStorage(
redis=redis,
key_builder=key_builder,
),
config=config,
session_pool=create_session_pool(config=config),
# redis= # TODO: redis repository for cache
i18n_middleware=i18n_middleware,
)
# request -> outer -> filter -> inner -> handler #
dispatcher.update.filter(IsPrivate())
error_middleware.setup_outer(dispatcher)
user_middleware.setup_outer(dispatcher)
i18n_middleware.setup_inner(dispatcher)
garbage_middleware.setup_inner(dispatcher)
throttling_middleware.setup_outer(dispatcher)
dispatcher.include_routers(*routers)
setup_dialogs(dispatcher)
return dispatcher

28
app/factories/i18n.py Normal file
View File

@@ -0,0 +1,28 @@
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from fluent.runtime import FluentLocalization, FluentResourceLoader
from app.bot.middlewares import I18nMiddleware
from app.core.constants import RESOURCE_I18N
if TYPE_CHECKING:
from app.core.config import AppConfig
logger = logging.getLogger(__name__)
def create_i18n_middleware(config: AppConfig) -> I18nMiddleware:
loader = FluentResourceLoader(f"{config.i18n.locales_dir}/{{locale}}")
locales = {
locale: FluentLocalization(
[locale, config.i18n.default_locale],
RESOURCE_I18N,
loader,
)
for locale in config.i18n.locales
}
logger.debug(f"Available locales: {list(locales.keys())}")
return I18nMiddleware(locales, config.i18n.default_locale)

5
app/factories/redis.py Normal file
View File

@@ -0,0 +1,5 @@
from redis.asyncio import ConnectionPool, Redis
def create_redis(url: str) -> Redis:
return Redis(connection_pool=ConnectionPool.from_url(url=url))

View File

@@ -0,0 +1,23 @@
from __future__ import annotations
from sqlalchemy.ext.asyncio import (
AsyncEngine,
AsyncSession,
async_sessionmaker,
create_async_engine,
)
from app.core.config import AppConfig
def create_session_pool(config: AppConfig) -> async_sessionmaker[AsyncSession]:
engine: AsyncEngine = create_async_engine(
url=config.db.dsn(),
echo=config.alchemy.echo,
echo_pool=config.alchemy.echo_pool,
pool_size=config.alchemy.pool_size,
max_overflow=config.alchemy.max_overflow,
pool_timeout=config.alchemy.pool_timeout,
pool_recycle=config.alchemy.pool_recycle,
)
return async_sessionmaker(engine, expire_on_commit=False)

71
docker-compose.yml Normal file
View File

@@ -0,0 +1,71 @@
services:
remnashop-db:
image: postgres:17
container_name: "remnashop-db"
hostname: remnashop-db
restart: always
ports:
- "5432:5432"
env_file:
- .env
environment:
- POSTGRES_USER=${DB_USER}
- POSTGRES_PASSWORD=${DB_PASSWORD}
- POSTGRES_DB=${DB_NAME}
- TZ=UTC
volumes:
- remnashop-db-data:/var/lib/postgresql/data
networks:
- remnashop-network
healthcheck:
test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"]
interval: 3s
timeout: 10s
retries: 3
remnashop-redis:
image: redis:8
container_name: remnashop-redis
hostname: remnashop-redis
restart: always
networks:
- remnashop-network
volumes:
- remnashop-redis-data:/data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 3s
timeout: 10s
retries: 3
remnashop:
build: .
container_name: "remnashop"
hostname: remnashop
restart: always
env_file:
- .env
networks:
- remnashop-network
depends_on:
remnashop-db:
condition: service_healthy
remnashop-redis:
condition: service_healthy
volumes:
- ./app/assets:/app/assets
- ./logs:/logs
networks:
remnashop-network:
external: true
volumes:
remnashop-db-data:
driver: local
external: false
name: remnashop-db-data
remnashop-redis-data:
driver: local
external: false
name: remnashop-redis-data

0
logs/.gitkeep Normal file
View File