Compare commits

..

2 Commits

Author SHA1 Message Date
4f995d530b update README.md 2023-11-05 04:24:44 +03:00
9732426a7e update sqlalchemy to v2 2023-11-05 04:19:05 +03:00
31 changed files with 3036 additions and 1492 deletions

65
sqlalchemy_study/Makefile Normal file
View File

@ -0,0 +1,65 @@
# COLORS
GREEN := $(shell tput -Txterm setaf 2)
WHITE := $(shell tput -Txterm setaf 7)
YELLOW := $(shell tput -Txterm setaf 3)
RESET := $(shell tput -Txterm sgr0)
.DEFAULT_GOAL := help
.PHONY: help format lint check-style check-import-sorting lint-typing lint-imports lint-complexity lint-deps
PY_TARGET_DIRS=src
## Отформатировать код
format:
autoflake --recursive $(PY_TARGET_DIRS) --in-place --remove-unused-variables --remove-all-unused-imports --ignore-init-module-imports --remove-duplicate-keys --ignore-pass-statements
pyup_dirs --py311-plus $(PY_TARGET_DIRS) | true
isort --color --quiet $(PY_TARGET_DIRS)
black $(PY_TARGET_DIRS)
## Проверить стилистику кода
check-style:
black --check $(PY_TARGET_DIRS)
## Проверить сортировку импортов
check-import-sorting:
isort --check-only $(PY_TARGET_DIRS)
## Проверить типизацию
lint-typing:
mypy $(PY_TARGET_DIRS)
## Проверить код на сложность
lint-complexity:
flake8 $(PY_TARGET_DIRS)
## Запустить линтер ruff
lint-ruff:
ruff $(PY_TARGET_DIRS)
## Проверить зависимостей
lint-deps:
poetry run poetry check
poetry run pip check
poetry run safety check --full-report
poetry run pip-audit
## Запустить все линтеры
lint: lint-ruff lint-typing lint-complexity check-import-sorting lint-deps
## Show help
help:
@echo ''
@echo 'Usage:'
@echo ' ${YELLOW}make${RESET} ${GREEN}<target>${RESET}'
@echo ''
@echo 'Targets:'
@awk '/^[a-zA-Z\-_0-9]+:/ { \
helpMessage = match(lastLine, /^## (.*)/); \
if (helpMessage) { \
helpCommand = $$1; sub(/:$$/, "", helpCommand); \
helpMessage = substr(lastLine, RSTART + 3, RLENGTH); \
printf " ${YELLOW}%-$(TARGET_MAX_CHAR_NUM)25s${RESET} ${GREEN}%s${RESET}\n", helpCommand, helpMessage; \
} \
} \
{ lastLine = $$0 }' $(MAKEFILE_LIST)
@echo ''

View File

@ -1,4 +1,4 @@
# SQLALCHEMY STUDY # SQLALCHEMY v2 queries STUDY
--- ---
@ -16,11 +16,11 @@ cp ./src/config/.env.template ./src/config/.env
*Note: Change USE_DATABASE variable to 'mysql' for MySQL training or 'postgres' for Postgres use.* *Note: Change USE_DATABASE variable to 'mysql' for MySQL training or 'postgres' for Postgres use.*
*Default is MySQL* *Default is Postgres*
## Run without app in docker: ## Manual fill database with data:
Requires python > 3.11 and poetry 1.3.1 Requires python >= 3.11 and poetry >= 1.3.1
- **install poetry dependencies:** - **install poetry dependencies:**
```bash ```bash
@ -28,24 +28,34 @@ poetry install
poetry shell poetry shell
``` ```
- **run for mysql:** ```docker-compose -f docker-compose.mysql.yaml up``` - **run for mysql:** ```docker compose -f docker-compose.mysql.yaml up```
- **run for postgres:** ```docker-compose -f docker-compose.postgres.yaml up``` - **run for postgres:** ```docker compose -f docker-compose.postgres.yaml up```
- **run initial data:** ```python ./src/data/fill_data.py``` - **run initial data:**
```bash
cd src
python data/fill_data.py
```
## Run all in docker: ## Fill database with full docker script:
**run for mysql:** **run for mysql:**
```bash ```bash
docker-compose -f docker-compose.mysql.yaml -f docker-compose.docker.yaml up docker compose -f docker-compose.mysql.yaml -f docker-compose.docker.yaml up
``` ```
**run for postgres:** **run for postgres:**
```bash ```bash
docker-compose -f docker-compose.postgres.yaml -f docker-compose.docker.yaml up docker compose -f docker-compose.postgres.yaml -f docker-compose.docker.yaml up
``` ```
*Note: docker will start all migrations automatically. You don't need creation data step* *Note: docker will start all migrations automatically. You don't need creation data step*
## Traininq queries:
```bash
python data/get_data.py
```
## Help info: ## Help info:
### Create alembic migrations: ### Create alembic migrations:
@ -97,7 +107,8 @@ docker exec -it sqlalchemy_study_db psql -d sqlalchemy_study -U balsh
## Clean database ## Clean database
```bash ```bash
docker-compose -f docker-compose.mysql.yaml down -v docker compose -f docker-compose.mysql.yaml down -v
docker compose -f docker-compose.postgres.yaml down -v
``` ```
## Known issues: ## Known issues:

View File

@ -33,7 +33,7 @@ services:
- db - db
command: > command: >
bash -c "/app/scripts/docker-entrypoint.sh bash -c "/app/scripts/docker-entrypoint.sh
&& /app/scripts/alembic-init-migrate.sh && python data/fill_data.py && python main.py
&& sleep infinity" && sleep infinity"
volumes: volumes:
- ./src:/app/src/ - ./src:/app/src/

View File

@ -8,7 +8,7 @@ volumes:
services: services:
db: db:
image: mysql:8.0.31 image: mysql:8.2.0
platform: linux/amd64 platform: linux/amd64
container_name: "sqlalchemy_study_db" container_name: "sqlalchemy_study_db"
hostname: 'db_host' hostname: 'db_host'

View File

@ -8,7 +8,7 @@ volumes:
services: services:
db: db:
image: postgres:14.6 image: postgres:16.0
container_name: "sqlalchemy_study_db" container_name: "sqlalchemy_study_db"
hostname: 'db_host' hostname: 'db_host'
restart: unless-stopped restart: unless-stopped

View File

@ -1,5 +1,5 @@
FROM --platform=linux/amd64 python:3.11.1 FROM --platform=linux/amd64 python:3.11.6
ARG USER ARG USER

File diff suppressed because it is too large Load Diff

View File

@ -1,28 +1,171 @@
[tool.poetry] [tool.poetry]
name = "sqlalchemy_study_project" name = "sqlalchemy_study_project"
version = "1.0.1" version = "2.0.7"
description = "for study sqlalchemy async models" description = "for study sqlalchemy async models"
authors = ["Dmitry Afanasyev <Balshbox@gmail.com>"] authors = ["Dmitry Afanasyev <Balshbox@gmail.com>"]
[build-system]
requires = ["poetry-core>=1.6.1"]
build-backend = "poetry.core.masonry.api"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.11" python = "^3.11"
SQLAlchemy = "^1.4" sqlalchemy = {version = "^2.0", extras=["mypy"]}
SQLAlchemy-Utils = "^0.38.2" pydantic-settings = "^2.0.3"
pydantic = {version = "^1.9.1", extras = ["email"]} pydantic = {version = "^2.4", extras = ["email"]}
factory-boy = "^3.2.1" factory-boy = "^3.3"
Faker = "^15.0.0" Faker = "^19"
loguru = "^0.6.0" loguru = "^0.7"
alembic = "^1.8.0" alembic = "^1.12"
python-dotenv = "^0.20.0" python-dotenv = "^1.0"
asyncpg = "^0.27.0" asyncpg = "^0.28"
asyncmy = "^0.2.5" asyncmy = "^0.2.8"
PyMySQL = "^1.0.2" PyMySQL = "^1.1"
cryptography = "^39.0" cryptography = "^41.0"
psycopg2-binary = "^2.9.3" psycopg2-binary = "^2.9"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
ipython = "^8.4.0" ipython = "^8.17"
safety = "^2.3.5"
pip-audit = "^2.6"
pyupgrade = "^3.10"
isort = "^5.12"
black = "^23.10"
mypy = "^1.6"
types-PyMySQL = "^1.0"
types-python-dateutil = "^2.8"
autoflake = "^2.2"
flake8 = "^6.1"
flake8-logging-format = "^0.9"
flake8-comprehensions = "^3.14"
flake8-eradicate = "^1.5"
flake8-deprecated = "^2.2"
flake8-bugbear = "^23.7"
flake8-warnings = "^0.4"
flake8-debugger = "^4.1"
flake8-annotations-complexity = "^0.0.8"
flake8-fixme = "^1.1"
flake8-simplify = "^0.21"
flake8-variables-names = "^0.0.6"
flake8-bandit = "^4.1"
flake8-tidy-imports = "^4.10"
flake8-noqa = "^1.3"
flake8-useless-assert = "^0.4"
flake8-mock = "^0.4"
flake8-comments = "^0.1"
Flake8-pyproject = "^1.2.3"
ruff = "^0.1"
[tool.flake8]
inline-quotes = "double"
max-line-length = 120
max-expression-complexity = 10
max-complexity = 10
ban-relative-imports = true
nested-classes-whitelist = ["Config", "Meta"]
pytest-parametrize-names-type = "csv"
exclude = [
".cache/*",
".pytest_cache/*",
"*/__pycache__/*",
]
ignore = [
# use isort instead
"I",
# use black style
"E203", "W", "G004", "VNE003",
# user FastAPI Depends in function calls
"B008"
]
per-file-ignores = []
[tool.autoflake]
in-place = true
ignore-init-module-imports = true
remove-unused-variables = true
remove-all-unused-imports = true
remove-duplicate-keys = true
[tool.isort]
profile = "black"
multi_line_output = 3
src_paths = ["src",]
combine_as_imports = true
[tool.mypy]
allow_redefinition = false
namespace_packages = true
check_untyped_defs = true
disallow_untyped_decorators = false
disallow_any_explicit = false
disallow_any_generics = true
disallow_untyped_calls = true
disallow_untyped_defs = true
ignore_errors = false
ignore_missing_imports = true
implicit_reexport = false
local_partial_types = true
strict_optional = true
strict_equality = true
show_error_codes = true
no_implicit_optional = true
warn_unused_ignores = true
warn_redundant_casts = true
warn_unused_configs = true
warn_unreachable = true
warn_no_return = true
exclude = [
"src/migrations/versions/*"
]
plugins = [
"sqlalchemy.ext.mypy.plugin",
]
[tool.black]
line-length = 120
target-version = ['py311']
[tool.coverage.run]
relative_files = true
concurrency = ["greenlet", "thread"]
[tool.coverage.report]
sort = "cover"
skip_covered = true
[tool.ruff]
extend-select = ["F", "I", "PL", "E", "W", "C4", "PT", "B", "T10", "SIM", "TID", "T20", "PGH", "S", "RET", "ERA", "PIE", "UP", "ASYNC", "ISC", "PERF", "DTZ", "TRY", "C90"]
ignore = ["S105", "S106", "PGH003", "TRY003", "TRY004", "PT001", "PT023", "I001"]
line-length = 120
output-format="grouped"
[tool.ruff.per-file-ignores]
"src/data/factories.py" = ["DTZ005", ]
"src/data/get_data.py" = ["T201"]
"src/db/utils.py" = ["PERF401"]
[tool.ruff.pylint]
max-args = 15
[tool.ruff.flake8-bugbear]
# Allow default arguments like, e.g., `data: List[str] = fastapi.Query(None)`.
extend-immutable-calls = []
[tool.ruff.flake8-pytest-style]
parametrize-names-type = "csv"
[tool.ruff.mccabe]
max-complexity = 15
[tool.ruff.isort]
force-wrap-aliases = true
combine-as-imports = true
[tool.ruff.flake8-quotes]
inline-quotes = "double"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -1,186 +0,0 @@
from sqlalchemy import create_engine
from sqlalchemy import Table, Column, String, MetaData, DATETIME, CHAR, INTEGER
from sqlalchemy.orm import Session, sessionmaker
from datetime import datetime, timezone, timedelta
from pathlib import Path
from decouple import AutoConfig
BASE_DIR = PurePath(__file__).parent.parent
config = AutoConfig(search_path=BASE_DIR.joinpath('config'))
DATABASE_USER = config('POSTGRES_USER')
DATABASE_NAME = config('POSTGRES_DB')
DATABASE_PASSWORD = config('POSTGRES_PASSWORD')
DATABASE_HOST = config('DATABASE_HOST')
DATABASE_PORT = config('DATABASE_PORT')
engine = create_engine(
f'postgresql+psycopg2://{DATABASE_USER}:{DATABASE_PASSWORD}@'
f'{DATABASE_HOST}:{DATABASE_PORT}/{DATABASE_NAME}')
session_factory = sessionmaker(engine)
session = session_factory()
meta = MetaData(engine)
def get_now(offset):
_offset = timezone(timedelta(hours=offset))
now = datetime.now(_offset)
return now
announce = Table('accounts_announce', meta,
Column('id', INTEGER, primary_key=True),
Column('announce', String, nullable=True, default=''),
Column('created', DATETIME),
Column('author', CHAR, nullable=False),
)
bot_users_table = Table('accounts_botusers', meta,
Column('id', INTEGER, primary_key=True),
Column('chat_id', CHAR, nullable=False),
Column('nickname', CHAR, nullable=True, ),
Column('name', CHAR, nullable=True, ),
Column('telephone', CHAR, nullable=True),
Column('location', CHAR, nullable=True, default=''),
Column('user_created', DATETIME)
)
users_messages = Table('accounts_usersmessages', meta,
Column('id', INTEGER, primary_key=True),
Column('chat_id_id', INTEGER, nullable=True),
Column('nickname', CHAR, nullable=True),
Column('name', CHAR, nullable=True),
Column('message', String, nullable=False),
Column('location', CHAR, nullable=True),
Column('message_time', DATETIME),
Column('status', CHAR, nullable=True, default='')
)
reply_messages = Table('accounts_messagesreplys', meta,
Column('id', INTEGER, primary_key=True),
Column('chat_id_id', INTEGER, nullable=True),
Column('nickname', CHAR, nullable=True),
Column('name', CHAR, nullable=True),
Column('message', String, nullable=False),
Column('message_time', DATETIME),
Column('status', CHAR, nullable=True, default='')
)
def db_insert_or_update(chat_id, nickname=None, name=None,
telephone=None, location=None,
):
with engine.connect() as conn:
try:
insert_statement = bot_users_table.insert().values(chat_id=chat_id,
nickname=nickname,
name=name,
telephone=telephone,
location=location,
user_created=get_now(3)
)
conn.execute(insert_statement)
except:
insert_statement = bot_users_table.update().values(nickname=nickname,
name=name,
telephone=telephone
).\
where(bot_users_table.c.chat_id == chat_id)
conn.execute(insert_statement)
def db_get_contact_number(chat_id):
try:
user = session.query(bot_users_table)\
.filter(bot_users_table.c.chat_id == chat_id).one()
return user.telephone
except:
pass
def db_get_location(chat_id):
try:
user = session.query(bot_users_table)\
.filter(bot_users_table.c.chat_id == chat_id).one()
return user.location
except:
pass
def db_get_id(chat_id):
try:
user = session.query(bot_users_table) \
.filter(bot_users_table.c.chat_id == chat_id).one()
return user.id
except(Exception) as e:
print('ERORO chat ID', e)
pass
def db_update_location(chat_id, location):
with engine.connect() as conn:
try:
insert_statement = bot_users_table.update().values(location=location). \
where(bot_users_table.c.chat_id == chat_id)
conn.execute(insert_statement)
except Exception as e:
print('ERROR!!!!!!!!!!!!!!!!', e)
pass
def db_insert_reply_message(chat_id_id, nickname=None, name=None, reply_message=None):
with engine.connect() as conn:
insert_statement = reply_messages.insert().values(chat_id_id=chat_id_id,
nickname=nickname,
name=name,
message=reply_message,
message_time=get_now(3)
)
conn.execute(insert_statement)
def db_insert_user_message(chat_id_id, nickname=None, location=None,
name=None, message=None):
with engine.connect() as conn:
insert_statement = users_messages.insert().values(chat_id_id=chat_id_id,
nickname=nickname,
name=name,
message=message,
location=location,
message_time=get_now(3)
)
conn.execute(insert_statement)
def db_insert_announce(author, bot_announce):
with engine.connect() as conn:
insert_statement = announce.insert().values(announce=bot_announce,
author=author,
created=get_now(3)
)
conn.execute(insert_statement)
# usage:
# db_insert_or_update(chat_id='417070387', nickname='Balsh', name='Dmitry', telephone='23432432')
# print(db_get_contact_number('417070387'))
# db_insert_reply_message(chat_id='1660356916', reply_message='asdasd')
# db_update_location(chat_id='1660356916', location='lsdkjfldskj')
# print(db_get_id('417070387'))

View File

View File

@ -2,7 +2,7 @@
# ==== DB provider ====: 'mysql' -> MySQL use | 'postgres' -> Postgres use # ==== DB provider ====: 'mysql' -> MySQL use | 'postgres' -> Postgres use
USE_DATABASE=mysql USE_DATABASE=postgres
# ==== DB common ==== # ==== DB common ====

View File

@ -1,5 +1,5 @@
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import Optional from typing import Any, Optional
import factory import factory
from factory import fuzzy from factory import fuzzy
@ -8,10 +8,10 @@ from faker import Faker
from db.dependencies import get_sync_db_session from db.dependencies import get_sync_db_session
from db.models.coin import Coin, CoinType from db.models.coin import Coin, CoinType
from db.models.department import Department, EmployeeDepartments from db.models.department import Department, EmployeeDepartments
from db.models.skills import Skill, EmployeesSkills from db.models.skills import EmployeesSkills, Skill
from db.models.user import User, Employee from db.models.user import Employee, User
faker = Faker('ru_RU') faker = Faker("ru_RU")
Session = get_sync_db_session() Session = get_sync_db_session()
@ -20,130 +20,109 @@ Session = get_sync_db_session()
class BaseModelFactory(factory.alchemy.SQLAlchemyModelFactory): class BaseModelFactory(factory.alchemy.SQLAlchemyModelFactory):
class Meta: class Meta:
abstract = True abstract = True
sqlalchemy_session_persistence = 'commit' sqlalchemy_session_persistence = "commit"
sqlalchemy_session = Session sqlalchemy_session = Session
class UserFactory(BaseModelFactory): class UserFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1) id = factory.Sequence(lambda n: n + 1)
username = faker.profile(fields=['username'])['username'] username = faker.profile(fields=["username"])["username"]
email = factory.Faker('email') email = factory.Faker("email")
hash_password = factory.Faker('password') hash_password = factory.Faker("password")
auth_token = factory.Faker('uuid4') auth_token = factory.Faker("uuid4")
class Meta: class Meta:
model = User model = User
sqlalchemy_get_or_create = ( sqlalchemy_get_or_create = ("username",)
'username',
)
class CoinModelFactory(BaseModelFactory): class CoinModelFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1) id = factory.Sequence(lambda n: n + 1)
name = factory.Faker('cryptocurrency_name') name = factory.Faker("cryptocurrency_name")
enabled = fuzzy.FuzzyChoice((0, 1)) enabled = fuzzy.FuzzyChoice((0, 1))
class Meta: class Meta:
model = Coin model = Coin
sqlalchemy_get_or_create = ( sqlalchemy_get_or_create = ("name",)
'name',
)
@factory.post_generation @factory.post_generation
def coin_type(obj, create: bool, extracted: Optional[Coin], *args, **kwargs) -> None: def coin_type(obj, create: bool, extracted: Optional[Coin], *args: Any, **kwargs: Any) -> None:
if create: if create:
CoinTypeFactory.create_batch(faker.random_int(min=3, max=7), coin_id=obj.id) CoinTypeFactory.create_batch(faker.random_int(min=3, max=7), coin_id=obj.id)
class CoinTypeFactory(BaseModelFactory): class CoinTypeFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1) id = factory.Sequence(lambda n: n + 1)
name = factory.Faker('cryptocurrency_code') name = factory.Faker("cryptocurrency_code")
class Meta: class Meta:
model = CoinType model = CoinType
sqlalchemy_get_or_create = ('id', sqlalchemy_get_or_create = ("id",)
)
class SkillFactory(BaseModelFactory): class SkillFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1) id = factory.Sequence(lambda n: n + 1)
name = factory.Faker('job', locale='ru_ru') name = factory.Faker("job", locale="ru_ru")
description = factory.Faker('text', max_nb_chars=160, locale='ru_RU') description = factory.Faker("text", max_nb_chars=160, locale="ru_RU")
updated_at = factory.LazyFunction(datetime.now) updated_at = factory.LazyFunction(datetime.now)
class Meta: class Meta:
model = Skill model = Skill
sqlalchemy_get_or_create = ('name', sqlalchemy_get_or_create = ("name",)
)
class EmployeeFactory(BaseModelFactory): class EmployeeFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1) id = factory.Sequence(lambda n: n + 1)
first_name = factory.Faker('first_name', locale='ru_RU') first_name = factory.Faker("first_name", locale="ru_RU")
last_name = factory.Faker('last_name', locale='ru_RU') last_name = factory.Faker("last_name", locale="ru_RU")
phone = factory.Faker('phone_number') phone = factory.Faker("phone_number")
description = factory.Faker('text', max_nb_chars=80, locale='ru_RU') description = factory.Faker("text", max_nb_chars=80, locale="ru_RU")
coin_id = factory.Faker('random_int') coin_id = factory.Faker("random_int")
class Meta: class Meta:
model = Employee model = Employee
sqlalchemy_get_or_create = ('id', sqlalchemy_get_or_create = ("id",)
)
class EmployeesSkillsFactory(BaseModelFactory): class EmployeesSkillsFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1) id = factory.Sequence(lambda n: n + 1)
employee_id = factory.Faker('random_int') employee_id = factory.Faker("random_int")
skill_id = factory.Faker('random_int') skill_id = factory.Faker("random_int")
updated_at = factory.Faker( updated_at = factory.Faker(
'date_time_between_dates', datetime_start=datetime.now() - timedelta(days=30), datetime_end=datetime.now() "date_time_between_dates", datetime_start=datetime.now() - timedelta(days=30), datetime_end=datetime.now()
) )
class Meta: class Meta:
model = EmployeesSkills model = EmployeesSkills
sqlalchemy_get_or_create = ( sqlalchemy_get_or_create = ("id", "employee_id", "skill_id")
'id',
'employee_id',
'skill_id'
)
class DepartmentFactory(BaseModelFactory): class DepartmentFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1) id = factory.Sequence(lambda n: n + 1)
name = factory.Faker('company') name = factory.Faker("company")
description = factory.Faker('bs') description = factory.Faker("bs")
updated_at = factory.Faker( updated_at = factory.Faker(
'date_time_between_dates', datetime_start=datetime.now() - timedelta(days=30), datetime_end=datetime.now() "date_time_between_dates", datetime_start=datetime.now() - timedelta(days=30), datetime_end=datetime.now()
) )
class Meta: class Meta:
model = Department model = Department
sqlalchemy_get_or_create = ( sqlalchemy_get_or_create = (
'id', "id",
'name', "name",
) )
class EmployeeDepartmentFactory(BaseModelFactory): class EmployeeDepartmentFactory(BaseModelFactory):
employee_id = factory.Faker("random_int")
employee_id = factory.Faker('random_int') department_id = factory.Faker("random_int")
department_id = factory.Faker('random_int')
created_at = factory.Faker( created_at = factory.Faker(
'date_time_between_dates', "date_time_between_dates",
datetime_start=datetime.now() - timedelta(days=30), datetime_start=datetime.now() - timedelta(days=30),
datetime_end=datetime.now() - timedelta(days=10) datetime_end=datetime.now() - timedelta(days=10),
) )
updated_at = factory.Faker( updated_at = factory.Faker(
'date_time_between_dates', "date_time_between_dates", datetime_start=datetime.now() - timedelta(days=10), datetime_end=datetime.now()
datetime_start=datetime.now() - timedelta(days=10),
datetime_end=datetime.now()
) )
class Meta: class Meta:

View File

@ -6,50 +6,48 @@ from factory import fuzzy
from faker import Faker from faker import Faker
from data.factories import ( from data.factories import (
UserFactory,
CoinModelFactory, CoinModelFactory,
DepartmentFactory,
EmployeeDepartmentFactory,
EmployeeFactory,
EmployeesSkillsFactory, EmployeesSkillsFactory,
SkillFactory, SkillFactory,
EmployeeFactory, UserFactory,
DepartmentFactory,
EmployeeDepartmentFactory
) )
from db.dependencies import get_async_db_session from db.dependencies import get_async_db_session
from db.models.user import User from db.models.user import User
from db.utils import drop_tables, run_migrations
from settings.logger import logger from settings.logger import logger
faker = Faker('ru_RU') faker = Faker("ru_RU")
async def add_users_data() -> None: async def add_users_data() -> None:
async with get_async_db_session() as session: async with get_async_db_session() as session:
users = [] users = [
for _ in range(10): User(
users.append(User(username=faker.profile(fields=['username'])['username'], username=faker.profile(fields=["username"])["username"],
hash_password=faker.password(), hash_password=faker.password(),
auth_token=str(uuid.uuid4()), auth_token=str(uuid.uuid4()),
) )
) for _ in range(10)
]
session.add_all(users) session.add_all(users)
def get_random_skill(skills: list[int]) -> list[int]: def get_random_skill(skills: list[int]) -> list[int]:
random_skills = random.sample(skills, random.randint(2, 9)) return random.sample(skills, random.randint(2, 9)) # noqa: S311
return random_skills
def fill_database() -> None: def fill_database() -> None:
# async add faker data # async add faker data
asyncio.run(add_users_data()) asyncio.run(add_users_data())
# sync factory boy add data # sync factory boy add data
coins = [coin.id for coin in CoinModelFactory.create_batch(42)] coins = [coin.id for coin in CoinModelFactory.create_batch(42)]
jonny = EmployeeFactory(first_name='Tony', last_name='Stark', coin_id=fuzzy.FuzzyChoice(coins)) jonny = EmployeeFactory(first_name="Tony", last_name="Stark", coin_id=fuzzy.FuzzyChoice(coins))
karl = EmployeeFactory(first_name='Karl', coin_id=fuzzy.FuzzyChoice(coins)) karl = EmployeeFactory(first_name="Karl", coin_id=fuzzy.FuzzyChoice(coins))
employees = EmployeeFactory.create_batch(40, coin_id=fuzzy.FuzzyChoice(coins)) employees = EmployeeFactory.create_batch(40, coin_id=fuzzy.FuzzyChoice(coins))
skills = [skill.id for skill in SkillFactory.create_batch(size=faker.random_int(min=20, max=42))] skills = [skill.id for skill in SkillFactory.create_batch(size=faker.random_int(min=20, max=42))]
@ -66,7 +64,7 @@ def fill_database() -> None:
# User data (first 20 rows if not exists) # User data (first 20 rows if not exists)
for user_id in range(20, 30): for user_id in range(20, 30):
UserFactory(id=user_id, username=faker.profile(fields=['username'])['username']) UserFactory(id=user_id, username=faker.profile(fields=["username"])["username"])
# Department data # Department data
departments = DepartmentFactory.create_batch(5) departments = DepartmentFactory.create_batch(5)
@ -75,10 +73,4 @@ def fill_database() -> None:
for employee in [jonny, karl, *employees]: for employee in [jonny, karl, *employees]:
EmployeeDepartmentFactory(employee_id=employee.id, department_id=fuzzy.FuzzyChoice(departments)) EmployeeDepartmentFactory(employee_id=employee.id, department_id=fuzzy.FuzzyChoice(departments))
logger.info('All data has been created. You can run data/get_data.py script') logger.info("All data has been created. You can run data/get_data.py script")
if __name__ == '__main__':
drop_tables()
run_migrations()
fill_database()

View File

@ -1,66 +1,75 @@
import asyncio import asyncio
from settings.logger import logger from sqlalchemy import select
from sqlalchemy_study.sqlalchemy import select from sqlalchemy.orm import contains_eager, joinedload, load_only
from sqlalchemy_study.sqlalchemy import load_only, contains_eager, joinedload
from db.dependencies import get_async_db_session from db.dependencies import get_async_db_session
from db.models.coin import Coin from db.models.coin import Coin
from db.models.department import EmployeeDepartments, Department from db.models.department import Department, EmployeeDepartments
from db.models.skills import Skill from db.models.skills import Skill
from db.models.user import Employee, User from db.models.user import Employee, User
from settings.logger import logger
async def get_data() -> list[Employee]: async def get_data() -> list[Employee]:
query = ( query = (
select(Employee) select(Employee)
.join(Employee.coin).options( .join(Employee.coin)
contains_eager(Employee.coin).options(load_only(Coin.name, .options(contains_eager(Employee.coin).options(load_only(Coin.name, Coin.enabled)))
Coin.enabled))) .join(Employee.skills)
.join(Employee.skills).options( .options(contains_eager(Employee.skills).load_only(Skill.name))
contains_eager(Employee.skills).load_only(Skill.name) .options(
).options(load_only(Employee.id, load_only(
Employee.first_name, Employee.id,
Employee.phone, Employee.first_name,
) Employee.phone,
)
.outerjoin(Employee.department).options(
contains_eager(Employee.department).options(
joinedload(EmployeeDepartments.department)
.options(load_only(Department.name,
Department.description, )
)
) )
) )
.outerjoin(Employee.user).options( .outerjoin(Employee.department)
contains_eager(Employee.user).options(load_only(User.username, .options(
) contains_eager(Employee.department).options(
) joinedload(EmployeeDepartments.department).options(
load_only(
Department.name,
Department.description,
)
)
)
)
.outerjoin(Employee.user)
.options(
contains_eager(Employee.user).options(
load_only(
User.username,
)
)
) )
).order_by(Employee.id, Skill.name) ).order_by(Employee.id, Skill.name)
async with get_async_db_session() as session: async with get_async_db_session() as session:
result = await session.execute(query) result = await session.execute(query)
data = result.unique().scalars().all() return result.unique().scalars().all()
return data
employees = asyncio.run(get_data()) employees = asyncio.run(get_data())
for employee in employees: for employee in employees:
print(''.center(40, '-'), '\nEmployee id: {0}\nFirst name: {1}\nPhone: {2}\nSkills: {3}\n' print(
'Coin name: {4}\nCoin enabled: {5}\nDepartment: {6} -> {7}\nUsername: {8}' "".center(40, "-"),
.format(employee.id, "\nEmployee id: {id}\nFirst name: {first_name}\nPhone: {phone}\nSkills: {skills}\n"
employee.first_name, "Coin name: {coin_name}\nCoin enabled: {coin_enabled}\nDepartment: {department_name} -> "
employee.phone, "{department_description}\nUsername: {user_username}".format(
', '.join([skill.name for skill in employee.skills[:5]]), id=employee.id,
employee.coin.name, first_name=employee.first_name,
employee.coin.enabled, phone=employee.phone,
employee.department.department.name, skills=", ".join([skill.name for skill in employee.skills[:5]]),
employee.department.department.description, coin_name=employee.coin.name,
employee.user.username if hasattr(employee.user, 'username') else None, coin_enabled=employee.coin.enabled,
) department_name=employee.department.department.name,
) department_description=employee.department.department.description,
user_username=employee.user.username if hasattr(employee.user, "username") else None,
),
)
logger.info(f'Total employees: {len(employees)}') logger.info(f"Total employees: {len(employees)}")

View File

View File

@ -1,31 +1,42 @@
from typing import Any, Tuple, Union, Type from datetime import datetime
from typing import Type, Union
from sqlalchemy_study.sqlalchemy import Table, Column, Integer, DATETIME, TIMESTAMP, func from sqlalchemy import DATETIME, INTEGER, TIMESTAMP, Table, func
from sqlalchemy_study.sqlalchemy import as_declarative from sqlalchemy.orm import Mapped, as_declarative, declared_attr, mapped_column
from db.meta import meta from db.meta import meta
from settings import settings from settings import settings
DB_TIME_FORMAT: Type[Union[DATETIME, TIMESTAMP]] = DATETIME if settings.USE_DATABASE == 'mysql' else TIMESTAMP DB_TIME_FORMAT: Type[Union[DATETIME, TIMESTAMP]] = DATETIME if settings.USE_DATABASE == "mysql" else TIMESTAMP
@as_declarative(metadata=meta) @as_declarative(metadata=meta)
class BaseModel: class BaseModel:
""" """
BaseModel for all models. Base for all models.
It has some type definitions to It has some type definitions to
enhance autocompletion. enhance autocompletion.
""" """
__tablename__: str # Generate __tablename__ automatically
@declared_attr
def __tablename__(self) -> str:
return self.__name__.lower()
__table__: Table __table__: Table
__table_args__: Tuple[Any, ...]
__abstract__ = True __abstract__ = True
id = Column(Integer, nullable=False, unique=True, primary_key=True, autoincrement=True) id: Mapped[int] = mapped_column(
created_at = Column(DB_TIME_FORMAT, default=func.now(), index=True) "id",
updated_at = Column(DB_TIME_FORMAT, nullable=True) INTEGER(),
primary_key=True,
autoincrement=True,
nullable=False,
unique=True,
)
created_at: Mapped[datetime] = mapped_column("created_at", DB_TIME_FORMAT, default=func.now(), index=True)
updated_at: Mapped[datetime | None] = mapped_column("updated_at", DB_TIME_FORMAT, nullable=True)
def __repr__(self): def __repr__(self) -> str:
return f"<{self.__class__.__name__}(id={self.id!r})>" return f"<{self.__class__.__name__}(id={self.id!r})>"

View File

@ -2,23 +2,28 @@ from asyncio import current_task
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from typing import AsyncGenerator from typing import AsyncGenerator
from sqlalchemy_study.sqlalchemy import create_engine from sqlalchemy import create_engine
from sqlalchemy_study.sqlalchemy import create_async_engine, AsyncSession, async_scoped_session, AsyncEngine from sqlalchemy.ext.asyncio import (
from sqlalchemy_study.sqlalchemy import sessionmaker, Session AsyncEngine,
AsyncSession,
async_scoped_session,
create_async_engine,
)
from sqlalchemy.orm import Session, sessionmaker
from settings import settings from settings import settings
async_engine: AsyncEngine = create_async_engine(str(settings.async_db_url), echo=settings.DB_ECHO) async_engine: AsyncEngine = create_async_engine(str(settings.async_db_url), echo=settings.DB_ECHO)
async_session_factory = async_scoped_session( async_session_factory = async_scoped_session(
sessionmaker( sessionmaker( # type: ignore
autocommit=False, autocommit=False,
autoflush=False, autoflush=False,
class_=AsyncSession, class_=AsyncSession,
expire_on_commit=False, expire_on_commit=False,
bind=async_engine, bind=async_engine,
), ),
scopefunc=current_task, scopefunc=current_task,
) )
sync_engine = create_engine(settings.sync_db_url, echo=settings.DB_ECHO) sync_engine = create_engine(settings.sync_db_url, echo=settings.DB_ECHO)
@ -29,9 +34,9 @@ def get_sync_db_session() -> Session:
session: Session = sync_session_factory() session: Session = sync_session_factory()
try: try:
return session return session
except Exception as err: except Exception:
session.rollback() session.rollback()
raise err raise
finally: finally:
session.commit() session.commit()
session.close() session.close()
@ -48,9 +53,9 @@ async def get_async_db_session() -> AsyncGenerator[AsyncSession, None]:
session = async_session_factory() session = async_session_factory()
try: try:
yield session yield session
except Exception as err: except Exception:
await session.rollback() await session.rollback()
raise err raise
finally: finally:
await session.commit() await session.commit()
await session.close() await session.close()

View File

@ -1,3 +1,3 @@
from sqlalchemy_study import sqlalchemy as sa import sqlalchemy as sa
meta = sa.MetaData() meta = sa.MetaData()

View File

@ -1,16 +1,22 @@
from sqlalchemy_study.sqlalchemy import Column, Integer, ForeignKey, VARCHAR from sqlalchemy import VARCHAR, ForeignKey, Integer
from sqlalchemy_study.sqlalchemy import relation from sqlalchemy.orm import Mapped, mapped_column, relationship
from db.base import BaseModel from db.base import BaseModel
from db.models.department import Department from db.models.department import Department
class CadreMovement(BaseModel): class CadreMovement(BaseModel):
__tablename__ = 'cadre_movements' __tablename__ = "cadre_movements"
employee = Column(Integer, ForeignKey('employees.id', ondelete='CASCADE'), nullable=False, index=True) employee: Mapped[int] = mapped_column(
old_department = Column(Integer, ForeignKey('departments.id', ondelete='CASCADE'), nullable=False, index=True) Integer, ForeignKey("employees.id", ondelete="CASCADE"), nullable=False, index=True
new_department = Column(Integer, ForeignKey('departments.id', ondelete='CASCADE'), nullable=False, index=True) )
reason = Column(VARCHAR(500), nullable=True) old_department: Mapped[int] = mapped_column(
Integer, ForeignKey("departments.id", ondelete="CASCADE"), nullable=False, index=True
)
new_department: Mapped[int] = mapped_column(
Integer, ForeignKey("departments.id", ondelete="CASCADE"), nullable=False, index=True
)
reason: Mapped[str | None] = mapped_column(VARCHAR(500), nullable=True)
department = relation(Department, foreign_keys=new_department, lazy='select') department = relationship(Department, foreign_keys=new_department, lazy="select")

View File

@ -1,8 +1,7 @@
from sqlalchemy_study.sqlalchemy import VARCHAR from sqlalchemy import VARCHAR
from sqlalchemy_study.sqlalchemy import relationship from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy_study.sqlalchemy import Column from sqlalchemy.sql.schema import ForeignKey
from sqlalchemy_study.sqlalchemy import ForeignKey from sqlalchemy.sql.sqltypes import BOOLEAN, Integer
from sqlalchemy_study.sqlalchemy import Integer, BOOLEAN
from db.base import BaseModel from db.base import BaseModel
@ -12,17 +11,18 @@ class Coin(BaseModel):
__tablename__ = "coins" __tablename__ = "coins"
name = Column('coin_name', VARCHAR(50), unique=True) name: Mapped[str] = mapped_column("coin_name", VARCHAR(50), unique=True)
enabled = Column('enabled', BOOLEAN) enabled: Mapped[bool] = mapped_column("enabled", BOOLEAN, default=True)
coin_type_id = relationship("CoinType", coin_type_id = relationship(
primaryjoin="Coin.id == CoinType.coin_id", "CoinType",
back_populates='coin', primaryjoin="Coin.id == CoinType.coin_id",
uselist=False, back_populates="coin",
viewonly=True, uselist=False,
lazy="raise", viewonly=True,
) lazy="raise",
employee = relationship('Employee', back_populates='coin') )
employee = relationship("Employee", back_populates="coin")
class CoinType(BaseModel): class CoinType(BaseModel):
@ -30,6 +30,6 @@ class CoinType(BaseModel):
__tablename__ = "coin_types" __tablename__ = "coin_types"
name = Column('coin_name', VARCHAR(50)) name: Mapped[str] = mapped_column("coin_name", VARCHAR(50))
coin_id = Column(Integer, ForeignKey('coins.id', ondelete='CASCADE')) coin_id: Mapped[int] = mapped_column(Integer, ForeignKey("coins.id", ondelete="CASCADE"))
coin = relationship(Coin, back_populates='coin_type_id') coin = relationship(Coin, back_populates="coin_type_id")

View File

@ -1,23 +1,28 @@
from sqlalchemy_study.sqlalchemy import Column, VARCHAR, Integer, ForeignKey from sqlalchemy import VARCHAR, ForeignKey, Integer
from sqlalchemy_study.sqlalchemy import relationship from sqlalchemy.orm import Mapped, mapped_column, relationship
from db.base import BaseModel from db.base import BaseModel
class Department(BaseModel): class Department(BaseModel):
__tablename__ = 'departments' __tablename__ = "departments"
name = Column(VARCHAR(255), nullable=False) name: Mapped[str] = mapped_column(VARCHAR(255), nullable=False)
description = Column(VARCHAR(255), nullable=False) description: Mapped[str] = mapped_column(VARCHAR(255), nullable=False)
class EmployeeDepartments(BaseModel): class EmployeeDepartments(BaseModel):
__tablename__ = 'employee_departments' __tablename__ = "employee_departments"
employee_id = Column(Integer, ForeignKey('employees.id', ondelete='CASCADE'), nullable=False, index=True) employee_id: Mapped[int] = mapped_column(
department_id = Column(Integer, ForeignKey('departments.id', ondelete='CASCADE'), nullable=False, index=True) Integer, ForeignKey("employees.id", ondelete="CASCADE"), nullable=False, index=True
)
department_id: Mapped[int] = mapped_column(
Integer, ForeignKey("departments.id", ondelete="CASCADE"), nullable=False, index=True
)
department = relationship(Department, department = relationship(
lazy='noload', Department,
backref='emp_depart', lazy="noload",
) backref="emp_depart",
)

View File

@ -1,19 +1,20 @@
from sqlalchemy_study.sqlalchemy import Column, ForeignKey, VARCHAR, Text, UniqueConstraint from sqlalchemy import VARCHAR, ForeignKey, Text, UniqueConstraint
from sqlalchemy.orm import Mapped, mapped_column
from db.base import BaseModel from db.base import BaseModel
from db.models.user import Employee from db.models.user import Employee
class Skill(BaseModel): class Skill(BaseModel):
__tablename__ = 'skills' __tablename__ = "skills"
name = Column(VARCHAR(255), nullable=False, unique=True) name: Mapped[str] = mapped_column("name", VARCHAR(255), nullable=False, unique=True)
description = Column(Text, nullable=True) description: Mapped[str | None] = mapped_column("description", Text, nullable=True)
class EmployeesSkills(BaseModel): class EmployeesSkills(BaseModel):
__tablename__ = 'employees_skills' __tablename__ = "employees_skills"
__table_args__ = (UniqueConstraint("employee_id", "skill_id"),) __table_args__ = (UniqueConstraint("employee_id", "skill_id"),)
employee_id = Column(ForeignKey(Employee.id, ondelete='CASCADE'), nullable=False, index=True) employee_id: Mapped[int] = mapped_column(ForeignKey(Employee.id, ondelete="CASCADE"), nullable=False, index=True)
skill_id = Column(ForeignKey(Skill.id, ondelete='CASCADE'), nullable=False, index=True) skill_id: Mapped[int] = mapped_column(ForeignKey(Skill.id, ondelete="CASCADE"), nullable=False, index=True)

View File

@ -1,62 +1,66 @@
import datetime from datetime import datetime
from sqlalchemy_study.sqlalchemy import Column, String, DateTime, ForeignKey from sqlalchemy import VARCHAR, DateTime, ForeignKey, String
from sqlalchemy_study.sqlalchemy import VARCHAR from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy_study.sqlalchemy import relationship
from db.base import BaseModel from db.base import BaseModel
from db.models.coin import Coin from db.models.coin import Coin
class User(BaseModel): class User(BaseModel):
__tablename__ = 'users' __tablename__ = "users"
username: str = Column(String(255), unique=True) username: Mapped[str] = mapped_column(String(255), unique=True)
email: str = Column(String(255), index=True, unique=True, nullable=True) email: Mapped[str] = mapped_column(String(255), index=True, unique=True, nullable=True)
hash_password: str = Column(String(255)) hash_password: Mapped[str] = mapped_column(String(255))
auth_token: str = Column(String(255)) auth_token: Mapped[str] = mapped_column(String(255))
last_login: datetime.datetime = Column(DateTime, default=datetime.datetime.now, index=True) last_login: Mapped[datetime] = mapped_column(DateTime, default=datetime.now, index=True)
def __repr__(self) -> str:
return f"User: id:{self.id}, name: {self.username}"
employee = relationship(
"Employee",
primaryjoin="foreign(User.id)==remote(Employee.id)",
lazy="noload",
backref="user_employee",
)
def __repr__(self):
return f'User: id:{self.id}, name: {self.username}'
employee = relationship('Employee',
primaryjoin='foreign(User.id)==remote(Employee.id)',
lazy='noload',
backref='user_employee',
)
class Employee(BaseModel): class Employee(BaseModel):
__tablename__ = 'employees' __tablename__ = "employees"
first_name = Column(VARCHAR(128), nullable=False) first_name: Mapped[str] = mapped_column("first_name", VARCHAR(128), nullable=False)
last_name = Column(VARCHAR(128), nullable=False) last_name: Mapped[str] = mapped_column("last_name", VARCHAR(128), nullable=False)
phone = Column(VARCHAR(30), unique=True, nullable=True) phone: Mapped[str | None] = mapped_column("phone", VARCHAR(30), unique=True, nullable=True)
description = Column(VARCHAR(255), nullable=True) description: Mapped[str | None] = mapped_column("description", VARCHAR(255), nullable=True)
coin_id = Column('coin_id', ForeignKey('coins.id', ondelete='SET NULL'), nullable=True) coin_id: Mapped[int | None] = mapped_column("coin_id", ForeignKey("coins.id", ondelete="SET NULL"), nullable=True)
coin = relationship(Coin, coin = relationship(
back_populates='employee', Coin,
primaryjoin='Employee.coin_id==Coin.id', back_populates="employee",
lazy='noload', primaryjoin="Employee.coin_id==Coin.id",
uselist=False, lazy="noload",
) uselist=False,
)
skills = relationship('Skill', skills = relationship(
secondary="employees_skills", "Skill",
lazy='noload', secondary="employees_skills",
uselist=True, lazy="noload",
) uselist=True,
)
department = relationship('EmployeeDepartments', department = relationship(
lazy='noload', "EmployeeDepartments",
backref='employee', lazy="noload",
uselist=False, backref="employee",
) uselist=False,
)
user = relationship('User', user = relationship(
primaryjoin='foreign(Employee.id)==remote(User.id)', "User",
lazy='raise', primaryjoin="foreign(Employee.id)==remote(User.id)",
backref='user_employee', lazy="raise",
) backref="user_employee",
)

View File

@ -1,10 +1,9 @@
from alembic import command, config as alembic_config from alembic import command, config as alembic_config
from sqlalchemy_study.sqlalchemy import MetaData, Table, ForeignKeyConstraint from sqlalchemy import ForeignKeyConstraint, MetaData, Table, inspect, text
from sqlalchemy_study.sqlalchemy import inspect from sqlalchemy.exc import NoSuchTableError
from sqlalchemy_study.sqlalchemy import NoSuchTableError from sqlalchemy.schema import DropConstraint
from sqlalchemy_study.sqlalchemy import DropConstraint
from db.dependencies import sync_engine from db.dependencies import get_sync_db_session, sync_engine
from db.meta import meta from db.meta import meta
from db.models import load_all_models from db.models import load_all_models
from settings import settings from settings import settings
@ -25,17 +24,17 @@ def remove_foreign_keys() -> None:
fks = [] fks = []
try: try:
for fk in inspector.get_foreign_keys(table_name): for fk in inspector.get_foreign_keys(table_name):
if fk['name']: if fk["name"]:
fks.append(ForeignKeyConstraint((), (), name=fk['name'])) fks.append(ForeignKeyConstraint((), (), name=fk["name"]))
except NoSuchTableError: except NoSuchTableError:
logger.error(f'Table {table_name} not exist') logger.error(f"Table {table_name} not exist")
t = Table(table_name, fake_metadata, *fks) table = Table(table_name, fake_metadata, *fks)
fake_tables.append(t) fake_tables.append(table)
all_fks.extend(fks) all_fks.extend(fks)
connection = sync_engine.connect() connection = sync_engine.connect()
transaction = connection.begin() transaction = connection.begin()
for fkc in all_fks: for fkc in all_fks:
connection.execute(DropConstraint(fkc)) connection.execute(DropConstraint(fkc)) # type: ignore
transaction.commit() transaction.commit()
@ -43,14 +42,14 @@ def drop_tables() -> None:
load_all_models() load_all_models()
remove_foreign_keys() remove_foreign_keys()
meta.drop_all(bind=sync_engine, checkfirst=True) meta.drop_all(bind=sync_engine, checkfirst=True)
sync_engine.execute('DROP TABLE IF EXISTS alembic_version') session = get_sync_db_session()
sync_engine.dispose() session.execute(text("DROP TABLE IF EXISTS alembic_version;"))
logger.info("All tables are dropped") logger.info("All tables are dropped")
def run_migrations() -> None: def run_migrations() -> None:
with sync_engine.begin() as connection: with sync_engine.begin() as connection:
alembic_cfg.attributes['connection'] = connection alembic_cfg.attributes["connection"] = connection
migration_dialect = 'mysql_init_migrations' if settings.USE_DATABASE == 'mysql' else 'postgres_init_migrations' migration_dialect = "mysql_init_migrations" if settings.USE_DATABASE == "mysql" else "postgres_init_migrations"
command.upgrade(alembic_cfg, migration_dialect) command.upgrade(alembic_cfg, migration_dialect)
logger.info('Tables recreated') logger.info("Tables recreated")

View File

@ -0,0 +1,7 @@
from data.fill_data import fill_database
from db.utils import drop_tables, run_migrations
if __name__ == "__main__":
drop_tables()
run_migrations()
fill_database()

View File

@ -2,8 +2,8 @@ import asyncio
from logging.config import fileConfig from logging.config import fileConfig
from alembic import context from alembic import context
from sqlalchemy_study.sqlalchemy import create_async_engine from sqlalchemy.ext.asyncio import create_async_engine
from sqlalchemy_study.sqlalchemy import Connection from sqlalchemy.future import Connection
from db.base import BaseModel from db.base import BaseModel
from db.models import load_all_models from db.models import load_all_models
@ -18,7 +18,7 @@ target_metadata = BaseModel.metadata
load_all_models() load_all_models()
async def run_migrations_offline(): async def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode. """Run migrations in 'offline' mode.
This configures the context with just a URL This configures the context with just a URL
@ -54,7 +54,7 @@ def do_run_migrations(connection: Connection) -> None:
context.run_migrations() context.run_migrations()
async def run_migrations_online(): async def run_migrations_online() -> None:
"""Run migrations in 'online' mode. """Run migrations in 'online' mode.
In this scenario we need to create an Engine In this scenario we need to create an Engine

View File

@ -1,16 +1,16 @@
"""mysql init models """mysql init models
Revision ID: mysql_init_migrations Revision ID: mysql_init_migrations
Revises: Revises:
Create Date: 2022-05-29 19:26:09.995005 Create Date: 2022-05-29 19:26:09.995005
""" """
import sqlalchemy as sa
from alembic import op from alembic import op
from sqlalchemy_study import sqlalchemy as sa from sqlalchemy.dialects import mysql
from sqlalchemy_study.sqlalchemy import mysql
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'mysql_init_migrations' revision = "mysql_init_migrations"
down_revision = None down_revision = None
branch_labels = None branch_labels = None
depends_on = None depends_on = None
@ -18,157 +18,168 @@ depends_on = None
def upgrade(): def upgrade():
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.create_table('coins', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "coins",
sa.Column('created_at', sa.DATETIME(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.DATETIME(), nullable=True), sa.Column("created_at", sa.DATETIME(), nullable=True),
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True), sa.Column("updated_at", sa.DATETIME(), nullable=True),
sa.Column('enabled', sa.BOOLEAN(), nullable=True), sa.Column("coin_name", sa.VARCHAR(length=50), nullable=True),
sa.PrimaryKeyConstraint('id'), sa.Column("enabled", sa.BOOLEAN(), nullable=True),
sa.UniqueConstraint('coin_name'), sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint('id') sa.UniqueConstraint("coin_name"),
sa.UniqueConstraint("id"),
) )
op.create_index(op.f('ix_coins_created_at'), 'coins', ['created_at'], unique=False) op.create_index(op.f("ix_coins_created_at"), "coins", ["created_at"], unique=False)
op.create_table('departments', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "departments",
sa.Column('created_at', sa.DATETIME(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.DATETIME(), nullable=True), sa.Column("created_at", sa.DATETIME(), nullable=True),
sa.Column('name', sa.VARCHAR(length=255), nullable=False), sa.Column("updated_at", sa.DATETIME(), nullable=True),
sa.Column('description', sa.VARCHAR(length=255), nullable=False), sa.Column("name", sa.VARCHAR(length=255), nullable=False),
sa.PrimaryKeyConstraint('id'), sa.Column("description", sa.VARCHAR(length=255), nullable=False),
sa.UniqueConstraint('id') sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
) )
op.create_index(op.f('ix_departments_created_at'), 'departments', ['created_at'], unique=False) op.create_index(op.f("ix_departments_created_at"), "departments", ["created_at"], unique=False)
op.create_table('skills', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "skills",
sa.Column('created_at', sa.DATETIME(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.DATETIME(), nullable=True), sa.Column("created_at", sa.DATETIME(), nullable=True),
sa.Column('name', sa.VARCHAR(length=255), nullable=False), sa.Column("updated_at", sa.DATETIME(), nullable=True),
sa.Column('description', sa.Text(), nullable=True), sa.Column("name", sa.VARCHAR(length=255), nullable=False),
sa.PrimaryKeyConstraint('id'), sa.Column("description", sa.Text(), nullable=True),
sa.UniqueConstraint('id'), sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint('name') sa.UniqueConstraint("id"),
sa.UniqueConstraint("name"),
) )
op.create_index(op.f('ix_skills_created_at'), 'skills', ['created_at'], unique=False) op.create_index(op.f("ix_skills_created_at"), "skills", ["created_at"], unique=False)
op.create_table('users', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "users",
sa.Column('created_at', sa.DATETIME(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.DATETIME(), nullable=True), sa.Column("created_at", sa.DATETIME(), nullable=True),
sa.Column('username', sa.String(length=255), nullable=True), sa.Column("updated_at", sa.DATETIME(), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True), sa.Column("username", sa.String(length=255), nullable=True),
sa.Column('hash_password', sa.String(length=255), nullable=True), sa.Column("email", sa.String(length=255), nullable=True),
sa.Column('auth_token', sa.String(length=255), nullable=True), sa.Column("hash_password", sa.String(length=255), nullable=True),
sa.Column('last_login', sa.DateTime(), nullable=True), sa.Column("auth_token", sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'), sa.Column("last_login", sa.DateTime(), nullable=True),
sa.UniqueConstraint('id'), sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint('username') sa.UniqueConstraint("id"),
sa.UniqueConstraint("username"),
) )
op.create_index(op.f('ix_users_created_at'), 'users', ['created_at'], unique=False) op.create_index(op.f("ix_users_created_at"), "users", ["created_at"], unique=False)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True) op.create_index(op.f("ix_users_email"), "users", ["email"], unique=True)
op.create_index(op.f('ix_users_last_login'), 'users', ['last_login'], unique=False) op.create_index(op.f("ix_users_last_login"), "users", ["last_login"], unique=False)
op.create_table('coin_types', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "coin_types",
sa.Column('created_at', sa.DATETIME(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.DATETIME(), nullable=True), sa.Column("created_at", sa.DATETIME(), nullable=True),
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True), sa.Column("updated_at", sa.DATETIME(), nullable=True),
sa.Column('coin_id', sa.Integer(), nullable=True), sa.Column("coin_name", sa.VARCHAR(length=50), nullable=True),
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='CASCADE'), sa.Column("coin_id", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'), sa.ForeignKeyConstraint(["coin_id"], ["coins.id"], ondelete="CASCADE"),
sa.UniqueConstraint('id') sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
) )
op.create_index(op.f('ix_coin_types_created_at'), 'coin_types', ['created_at'], unique=False) op.create_index(op.f("ix_coin_types_created_at"), "coin_types", ["created_at"], unique=False)
op.create_table('employees', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "employees",
sa.Column('created_at', sa.DATETIME(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.DATETIME(), nullable=True), sa.Column("created_at", sa.DATETIME(), nullable=True),
sa.Column('first_name', mysql.VARCHAR(length=128), nullable=False), sa.Column("updated_at", sa.DATETIME(), nullable=True),
sa.Column('last_name', mysql.VARCHAR(length=128), nullable=False), sa.Column("first_name", mysql.VARCHAR(length=128), nullable=False),
sa.Column('phone', mysql.VARCHAR(length=30), nullable=True), sa.Column("last_name", mysql.VARCHAR(length=128), nullable=False),
sa.Column('description', mysql.VARCHAR(length=255), nullable=True), sa.Column("phone", mysql.VARCHAR(length=30), nullable=True),
sa.Column('coin_id', sa.Integer(), nullable=True), sa.Column("description", mysql.VARCHAR(length=255), nullable=True),
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='SET NULL'), sa.Column("coin_id", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'), sa.ForeignKeyConstraint(["coin_id"], ["coins.id"], ondelete="SET NULL"),
sa.UniqueConstraint('id'), sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint('phone') sa.UniqueConstraint("id"),
sa.UniqueConstraint("phone"),
) )
op.create_index(op.f('ix_employees_created_at'), 'employees', ['created_at'], unique=False) op.create_index(op.f("ix_employees_created_at"), "employees", ["created_at"], unique=False)
op.create_table('cadre_movements', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "cadre_movements",
sa.Column('created_at', sa.DATETIME(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.DATETIME(), nullable=True), sa.Column("created_at", sa.DATETIME(), nullable=True),
sa.Column('employee', sa.Integer(), nullable=False), sa.Column("updated_at", sa.DATETIME(), nullable=True),
sa.Column('old_department', sa.Integer(), nullable=False), sa.Column("employee", sa.Integer(), nullable=False),
sa.Column('new_department', sa.Integer(), nullable=False), sa.Column("old_department", sa.Integer(), nullable=False),
sa.Column('reason', sa.VARCHAR(length=500), nullable=True), sa.Column("new_department", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['employee'], ['employees.id'], ondelete='CASCADE'), sa.Column("reason", sa.VARCHAR(length=500), nullable=True),
sa.ForeignKeyConstraint(['new_department'], ['departments.id'], ondelete='CASCADE'), sa.ForeignKeyConstraint(["employee"], ["employees.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(['old_department'], ['departments.id'], ondelete='CASCADE'), sa.ForeignKeyConstraint(["new_department"], ["departments.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint('id'), sa.ForeignKeyConstraint(["old_department"], ["departments.id"], ondelete="CASCADE"),
sa.UniqueConstraint('id') sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
) )
op.create_index(op.f('ix_cadre_movements_created_at'), 'cadre_movements', ['created_at'], unique=False) op.create_index(op.f("ix_cadre_movements_created_at"), "cadre_movements", ["created_at"], unique=False)
op.create_index(op.f('ix_cadre_movements_employee'), 'cadre_movements', ['employee'], unique=False) op.create_index(op.f("ix_cadre_movements_employee"), "cadre_movements", ["employee"], unique=False)
op.create_index(op.f('ix_cadre_movements_new_department'), 'cadre_movements', ['new_department'], unique=False) op.create_index(op.f("ix_cadre_movements_new_department"), "cadre_movements", ["new_department"], unique=False)
op.create_index(op.f('ix_cadre_movements_old_department'), 'cadre_movements', ['old_department'], unique=False) op.create_index(op.f("ix_cadre_movements_old_department"), "cadre_movements", ["old_department"], unique=False)
op.create_table('employee_departments', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "employee_departments",
sa.Column('created_at', sa.DATETIME(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.DATETIME(), nullable=True), sa.Column("created_at", sa.DATETIME(), nullable=True),
sa.Column('employee_id', sa.Integer(), nullable=False), sa.Column("updated_at", sa.DATETIME(), nullable=True),
sa.Column('department_id', sa.Integer(), nullable=False), sa.Column("employee_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['department_id'], ['departments.id'], ondelete='CASCADE'), sa.Column("department_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'), sa.ForeignKeyConstraint(["department_id"], ["departments.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint('id'), sa.ForeignKeyConstraint(["employee_id"], ["employees.id"], ondelete="CASCADE"),
sa.UniqueConstraint('id') sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
) )
op.create_index(op.f('ix_employee_departments_created_at'), 'employee_departments', ['created_at'], unique=False) op.create_index(op.f("ix_employee_departments_created_at"), "employee_departments", ["created_at"], unique=False)
op.create_index(op.f('ix_employee_departments_department_id'), 'employee_departments', ['department_id'], unique=False) op.create_index(
op.create_index(op.f('ix_employee_departments_employee_id'), 'employee_departments', ['employee_id'], unique=False) op.f("ix_employee_departments_department_id"), "employee_departments", ["department_id"], unique=False
op.create_table('employees_skills',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.DATETIME(), nullable=True),
sa.Column('updated_at', sa.DATETIME(), nullable=True),
sa.Column('employee_id', sa.Integer(), nullable=False),
sa.Column('skill_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['skill_id'], ['skills.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('employee_id', 'skill_id'),
sa.UniqueConstraint('id')
) )
op.create_index(op.f('ix_employees_skills_created_at'), 'employees_skills', ['created_at'], unique=False) op.create_index(op.f("ix_employee_departments_employee_id"), "employee_departments", ["employee_id"], unique=False)
op.create_index(op.f('ix_employees_skills_employee_id'), 'employees_skills', ['employee_id'], unique=False) op.create_table(
op.create_index(op.f('ix_employees_skills_skill_id'), 'employees_skills', ['skill_id'], unique=False) "employees_skills",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("created_at", sa.DATETIME(), nullable=True),
sa.Column("updated_at", sa.DATETIME(), nullable=True),
sa.Column("employee_id", sa.Integer(), nullable=False),
sa.Column("skill_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["employee_id"], ["employees.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["skill_id"], ["skills.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("employee_id", "skill_id"),
sa.UniqueConstraint("id"),
)
op.create_index(op.f("ix_employees_skills_created_at"), "employees_skills", ["created_at"], unique=False)
op.create_index(op.f("ix_employees_skills_employee_id"), "employees_skills", ["employee_id"], unique=False)
op.create_index(op.f("ix_employees_skills_skill_id"), "employees_skills", ["skill_id"], unique=False)
# ### end Alembic commands ### # ### end Alembic commands ###
def downgrade(): def downgrade():
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_employees_skills_skill_id'), table_name='employees_skills') op.drop_index(op.f("ix_employees_skills_skill_id"), table_name="employees_skills")
op.drop_index(op.f('ix_employees_skills_employee_id'), table_name='employees_skills') op.drop_index(op.f("ix_employees_skills_employee_id"), table_name="employees_skills")
op.drop_index(op.f('ix_employees_skills_created_at'), table_name='employees_skills') op.drop_index(op.f("ix_employees_skills_created_at"), table_name="employees_skills")
op.drop_table('employees_skills') op.drop_table("employees_skills")
op.drop_index(op.f('ix_employee_departments_employee_id'), table_name='employee_departments') op.drop_index(op.f("ix_employee_departments_employee_id"), table_name="employee_departments")
op.drop_index(op.f('ix_employee_departments_department_id'), table_name='employee_departments') op.drop_index(op.f("ix_employee_departments_department_id"), table_name="employee_departments")
op.drop_index(op.f('ix_employee_departments_created_at'), table_name='employee_departments') op.drop_index(op.f("ix_employee_departments_created_at"), table_name="employee_departments")
op.drop_table('employee_departments') op.drop_table("employee_departments")
op.drop_index(op.f('ix_cadre_movements_old_department'), table_name='cadre_movements') op.drop_index(op.f("ix_cadre_movements_old_department"), table_name="cadre_movements")
op.drop_index(op.f('ix_cadre_movements_new_department'), table_name='cadre_movements') op.drop_index(op.f("ix_cadre_movements_new_department"), table_name="cadre_movements")
op.drop_index(op.f('ix_cadre_movements_employee'), table_name='cadre_movements') op.drop_index(op.f("ix_cadre_movements_employee"), table_name="cadre_movements")
op.drop_index(op.f('ix_cadre_movements_created_at'), table_name='cadre_movements') op.drop_index(op.f("ix_cadre_movements_created_at"), table_name="cadre_movements")
op.drop_table('cadre_movements') op.drop_table("cadre_movements")
op.drop_index(op.f('ix_employees_created_at'), table_name='employees') op.drop_index(op.f("ix_employees_created_at"), table_name="employees")
op.drop_table('employees') op.drop_table("employees")
op.drop_index(op.f('ix_coin_types_created_at'), table_name='coin_types') op.drop_index(op.f("ix_coin_types_created_at"), table_name="coin_types")
op.drop_table('coin_types') op.drop_table("coin_types")
op.drop_index(op.f('ix_users_last_login'), table_name='users') op.drop_index(op.f("ix_users_last_login"), table_name="users")
op.drop_index(op.f('ix_users_email'), table_name='users') op.drop_index(op.f("ix_users_email"), table_name="users")
op.drop_index(op.f('ix_users_created_at'), table_name='users') op.drop_index(op.f("ix_users_created_at"), table_name="users")
op.drop_table('users') op.drop_table("users")
op.drop_index(op.f('ix_skills_created_at'), table_name='skills') op.drop_index(op.f("ix_skills_created_at"), table_name="skills")
op.drop_table('skills') op.drop_table("skills")
op.drop_index(op.f('ix_departments_created_at'), table_name='departments') op.drop_index(op.f("ix_departments_created_at"), table_name="departments")
op.drop_table('departments') op.drop_table("departments")
op.drop_index(op.f('ix_coins_created_at'), table_name='coins') op.drop_index(op.f("ix_coins_created_at"), table_name="coins")
op.drop_table('coins') op.drop_table("coins")
# ### end Alembic commands ### # ### end Alembic commands ###

View File

@ -1,16 +1,16 @@
"""postgres init migrations """postgres init migrations
Revision ID: postgres_init_migrations Revision ID: postgres_init_migrations
Revises: Revises:
Create Date: 2022-06-14 00:29:28.932954 Create Date: 2022-06-14 00:29:28.932954
""" """
import sqlalchemy as sa
from alembic import op from alembic import op
from sqlalchemy_study import sqlalchemy as sa from sqlalchemy.dialects import mysql
from sqlalchemy_study.sqlalchemy import mysql
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'postgres_init_migrations' revision = "postgres_init_migrations"
down_revision = None down_revision = None
branch_labels = None branch_labels = None
depends_on = None depends_on = None
@ -18,157 +18,168 @@ depends_on = None
def upgrade(): def upgrade():
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.create_table('coins', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "coins",
sa.Column('created_at', sa.TIMESTAMP(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True), sa.Column("created_at", sa.TIMESTAMP(), nullable=True),
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True), sa.Column("updated_at", sa.TIMESTAMP(), nullable=True),
sa.Column('enabled', sa.BOOLEAN(), nullable=True), sa.Column("coin_name", sa.VARCHAR(length=50), nullable=True),
sa.PrimaryKeyConstraint('id'), sa.Column("enabled", sa.BOOLEAN(), nullable=True),
sa.UniqueConstraint('coin_name'), sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint('id') sa.UniqueConstraint("coin_name"),
sa.UniqueConstraint("id"),
) )
op.create_index(op.f('ix_coins_created_at'), 'coins', ['created_at'], unique=False) op.create_index(op.f("ix_coins_created_at"), "coins", ["created_at"], unique=False)
op.create_table('departments', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "departments",
sa.Column('created_at', sa.TIMESTAMP(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True), sa.Column("created_at", sa.TIMESTAMP(), nullable=True),
sa.Column('name', sa.VARCHAR(length=255), nullable=False), sa.Column("updated_at", sa.TIMESTAMP(), nullable=True),
sa.Column('description', sa.VARCHAR(length=255), nullable=False), sa.Column("name", sa.VARCHAR(length=255), nullable=False),
sa.PrimaryKeyConstraint('id'), sa.Column("description", sa.VARCHAR(length=255), nullable=False),
sa.UniqueConstraint('id') sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
) )
op.create_index(op.f('ix_departments_created_at'), 'departments', ['created_at'], unique=False) op.create_index(op.f("ix_departments_created_at"), "departments", ["created_at"], unique=False)
op.create_table('skills', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "skills",
sa.Column('created_at', sa.TIMESTAMP(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True), sa.Column("created_at", sa.TIMESTAMP(), nullable=True),
sa.Column('name', sa.VARCHAR(length=255), nullable=False), sa.Column("updated_at", sa.TIMESTAMP(), nullable=True),
sa.Column('description', sa.Text(), nullable=True), sa.Column("name", sa.VARCHAR(length=255), nullable=False),
sa.PrimaryKeyConstraint('id'), sa.Column("description", sa.Text(), nullable=True),
sa.UniqueConstraint('id'), sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint('name') sa.UniqueConstraint("id"),
sa.UniqueConstraint("name"),
) )
op.create_index(op.f('ix_skills_created_at'), 'skills', ['created_at'], unique=False) op.create_index(op.f("ix_skills_created_at"), "skills", ["created_at"], unique=False)
op.create_table('users', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "users",
sa.Column('created_at', sa.TIMESTAMP(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True), sa.Column("created_at", sa.TIMESTAMP(), nullable=True),
sa.Column('username', sa.String(length=255), nullable=True), sa.Column("updated_at", sa.TIMESTAMP(), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True), sa.Column("username", sa.String(length=255), nullable=True),
sa.Column('hash_password', sa.String(length=255), nullable=True), sa.Column("email", sa.String(length=255), nullable=True),
sa.Column('auth_token', sa.String(length=255), nullable=True), sa.Column("hash_password", sa.String(length=255), nullable=True),
sa.Column('last_login', sa.DateTime(), nullable=True), sa.Column("auth_token", sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'), sa.Column("last_login", sa.DateTime(), nullable=True),
sa.UniqueConstraint('id'), sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint('username') sa.UniqueConstraint("id"),
sa.UniqueConstraint("username"),
) )
op.create_index(op.f('ix_users_created_at'), 'users', ['created_at'], unique=False) op.create_index(op.f("ix_users_created_at"), "users", ["created_at"], unique=False)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True) op.create_index(op.f("ix_users_email"), "users", ["email"], unique=True)
op.create_index(op.f('ix_users_last_login'), 'users', ['last_login'], unique=False) op.create_index(op.f("ix_users_last_login"), "users", ["last_login"], unique=False)
op.create_table('coin_types', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "coin_types",
sa.Column('created_at', sa.TIMESTAMP(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True), sa.Column("created_at", sa.TIMESTAMP(), nullable=True),
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True), sa.Column("updated_at", sa.TIMESTAMP(), nullable=True),
sa.Column('coin_id', sa.Integer(), nullable=True), sa.Column("coin_name", sa.VARCHAR(length=50), nullable=True),
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='CASCADE'), sa.Column("coin_id", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'), sa.ForeignKeyConstraint(["coin_id"], ["coins.id"], ondelete="CASCADE"),
sa.UniqueConstraint('id') sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
) )
op.create_index(op.f('ix_coin_types_created_at'), 'coin_types', ['created_at'], unique=False) op.create_index(op.f("ix_coin_types_created_at"), "coin_types", ["created_at"], unique=False)
op.create_table('employees', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "employees",
sa.Column('created_at', sa.TIMESTAMP(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True), sa.Column("created_at", sa.TIMESTAMP(), nullable=True),
sa.Column('first_name', mysql.VARCHAR(length=128), nullable=False), sa.Column("updated_at", sa.TIMESTAMP(), nullable=True),
sa.Column('last_name', mysql.VARCHAR(length=128), nullable=False), sa.Column("first_name", mysql.VARCHAR(length=128), nullable=False),
sa.Column('phone', mysql.VARCHAR(length=30), nullable=True), sa.Column("last_name", mysql.VARCHAR(length=128), nullable=False),
sa.Column('description', mysql.VARCHAR(length=255), nullable=True), sa.Column("phone", mysql.VARCHAR(length=30), nullable=True),
sa.Column('coin_id', sa.Integer(), nullable=True), sa.Column("description", mysql.VARCHAR(length=255), nullable=True),
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='SET NULL'), sa.Column("coin_id", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'), sa.ForeignKeyConstraint(["coin_id"], ["coins.id"], ondelete="SET NULL"),
sa.UniqueConstraint('id'), sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint('phone') sa.UniqueConstraint("id"),
sa.UniqueConstraint("phone"),
) )
op.create_index(op.f('ix_employees_created_at'), 'employees', ['created_at'], unique=False) op.create_index(op.f("ix_employees_created_at"), "employees", ["created_at"], unique=False)
op.create_table('cadre_movements', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "cadre_movements",
sa.Column('created_at', sa.TIMESTAMP(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True), sa.Column("created_at", sa.TIMESTAMP(), nullable=True),
sa.Column('employee', sa.Integer(), nullable=False), sa.Column("updated_at", sa.TIMESTAMP(), nullable=True),
sa.Column('old_department', sa.Integer(), nullable=False), sa.Column("employee", sa.Integer(), nullable=False),
sa.Column('new_department', sa.Integer(), nullable=False), sa.Column("old_department", sa.Integer(), nullable=False),
sa.Column('reason', sa.VARCHAR(length=500), nullable=True), sa.Column("new_department", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['employee'], ['employees.id'], ondelete='CASCADE'), sa.Column("reason", sa.VARCHAR(length=500), nullable=True),
sa.ForeignKeyConstraint(['new_department'], ['departments.id'], ondelete='CASCADE'), sa.ForeignKeyConstraint(["employee"], ["employees.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(['old_department'], ['departments.id'], ondelete='CASCADE'), sa.ForeignKeyConstraint(["new_department"], ["departments.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint('id'), sa.ForeignKeyConstraint(["old_department"], ["departments.id"], ondelete="CASCADE"),
sa.UniqueConstraint('id') sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
) )
op.create_index(op.f('ix_cadre_movements_created_at'), 'cadre_movements', ['created_at'], unique=False) op.create_index(op.f("ix_cadre_movements_created_at"), "cadre_movements", ["created_at"], unique=False)
op.create_index(op.f('ix_cadre_movements_employee'), 'cadre_movements', ['employee'], unique=False) op.create_index(op.f("ix_cadre_movements_employee"), "cadre_movements", ["employee"], unique=False)
op.create_index(op.f('ix_cadre_movements_new_department'), 'cadre_movements', ['new_department'], unique=False) op.create_index(op.f("ix_cadre_movements_new_department"), "cadre_movements", ["new_department"], unique=False)
op.create_index(op.f('ix_cadre_movements_old_department'), 'cadre_movements', ['old_department'], unique=False) op.create_index(op.f("ix_cadre_movements_old_department"), "cadre_movements", ["old_department"], unique=False)
op.create_table('employee_departments', op.create_table(
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), "employee_departments",
sa.Column('created_at', sa.TIMESTAMP(), nullable=True), sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True), sa.Column("created_at", sa.TIMESTAMP(), nullable=True),
sa.Column('employee_id', sa.Integer(), nullable=False), sa.Column("updated_at", sa.TIMESTAMP(), nullable=True),
sa.Column('department_id', sa.Integer(), nullable=False), sa.Column("employee_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['department_id'], ['departments.id'], ondelete='CASCADE'), sa.Column("department_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'), sa.ForeignKeyConstraint(["department_id"], ["departments.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint('id'), sa.ForeignKeyConstraint(["employee_id"], ["employees.id"], ondelete="CASCADE"),
sa.UniqueConstraint('id') sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
) )
op.create_index(op.f('ix_employee_departments_created_at'), 'employee_departments', ['created_at'], unique=False) op.create_index(op.f("ix_employee_departments_created_at"), "employee_departments", ["created_at"], unique=False)
op.create_index(op.f('ix_employee_departments_department_id'), 'employee_departments', ['department_id'], unique=False) op.create_index(
op.create_index(op.f('ix_employee_departments_employee_id'), 'employee_departments', ['employee_id'], unique=False) op.f("ix_employee_departments_department_id"), "employee_departments", ["department_id"], unique=False
op.create_table('employees_skills',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
sa.Column('employee_id', sa.Integer(), nullable=False),
sa.Column('skill_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['skill_id'], ['skills.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('employee_id', 'skill_id'),
sa.UniqueConstraint('id')
) )
op.create_index(op.f('ix_employees_skills_created_at'), 'employees_skills', ['created_at'], unique=False) op.create_index(op.f("ix_employee_departments_employee_id"), "employee_departments", ["employee_id"], unique=False)
op.create_index(op.f('ix_employees_skills_employee_id'), 'employees_skills', ['employee_id'], unique=False) op.create_table(
op.create_index(op.f('ix_employees_skills_skill_id'), 'employees_skills', ['skill_id'], unique=False) "employees_skills",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("created_at", sa.TIMESTAMP(), nullable=True),
sa.Column("updated_at", sa.TIMESTAMP(), nullable=True),
sa.Column("employee_id", sa.Integer(), nullable=False),
sa.Column("skill_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["employee_id"], ["employees.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["skill_id"], ["skills.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("employee_id", "skill_id"),
sa.UniqueConstraint("id"),
)
op.create_index(op.f("ix_employees_skills_created_at"), "employees_skills", ["created_at"], unique=False)
op.create_index(op.f("ix_employees_skills_employee_id"), "employees_skills", ["employee_id"], unique=False)
op.create_index(op.f("ix_employees_skills_skill_id"), "employees_skills", ["skill_id"], unique=False)
# ### end Alembic commands ### # ### end Alembic commands ###
def downgrade(): def downgrade():
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_employees_skills_skill_id'), table_name='employees_skills') op.drop_index(op.f("ix_employees_skills_skill_id"), table_name="employees_skills")
op.drop_index(op.f('ix_employees_skills_employee_id'), table_name='employees_skills') op.drop_index(op.f("ix_employees_skills_employee_id"), table_name="employees_skills")
op.drop_index(op.f('ix_employees_skills_created_at'), table_name='employees_skills') op.drop_index(op.f("ix_employees_skills_created_at"), table_name="employees_skills")
op.drop_table('employees_skills') op.drop_table("employees_skills")
op.drop_index(op.f('ix_employee_departments_employee_id'), table_name='employee_departments') op.drop_index(op.f("ix_employee_departments_employee_id"), table_name="employee_departments")
op.drop_index(op.f('ix_employee_departments_department_id'), table_name='employee_departments') op.drop_index(op.f("ix_employee_departments_department_id"), table_name="employee_departments")
op.drop_index(op.f('ix_employee_departments_created_at'), table_name='employee_departments') op.drop_index(op.f("ix_employee_departments_created_at"), table_name="employee_departments")
op.drop_table('employee_departments') op.drop_table("employee_departments")
op.drop_index(op.f('ix_cadre_movements_old_department'), table_name='cadre_movements') op.drop_index(op.f("ix_cadre_movements_old_department"), table_name="cadre_movements")
op.drop_index(op.f('ix_cadre_movements_new_department'), table_name='cadre_movements') op.drop_index(op.f("ix_cadre_movements_new_department"), table_name="cadre_movements")
op.drop_index(op.f('ix_cadre_movements_employee'), table_name='cadre_movements') op.drop_index(op.f("ix_cadre_movements_employee"), table_name="cadre_movements")
op.drop_index(op.f('ix_cadre_movements_created_at'), table_name='cadre_movements') op.drop_index(op.f("ix_cadre_movements_created_at"), table_name="cadre_movements")
op.drop_table('cadre_movements') op.drop_table("cadre_movements")
op.drop_index(op.f('ix_employees_created_at'), table_name='employees') op.drop_index(op.f("ix_employees_created_at"), table_name="employees")
op.drop_table('employees') op.drop_table("employees")
op.drop_index(op.f('ix_coin_types_created_at'), table_name='coin_types') op.drop_index(op.f("ix_coin_types_created_at"), table_name="coin_types")
op.drop_table('coin_types') op.drop_table("coin_types")
op.drop_index(op.f('ix_users_last_login'), table_name='users') op.drop_index(op.f("ix_users_last_login"), table_name="users")
op.drop_index(op.f('ix_users_email'), table_name='users') op.drop_index(op.f("ix_users_email"), table_name="users")
op.drop_index(op.f('ix_users_created_at'), table_name='users') op.drop_index(op.f("ix_users_created_at"), table_name="users")
op.drop_table('users') op.drop_table("users")
op.drop_index(op.f('ix_skills_created_at'), table_name='skills') op.drop_index(op.f("ix_skills_created_at"), table_name="skills")
op.drop_table('skills') op.drop_table("skills")
op.drop_index(op.f('ix_departments_created_at'), table_name='departments') op.drop_index(op.f("ix_departments_created_at"), table_name="departments")
op.drop_table('departments') op.drop_table("departments")
op.drop_index(op.f('ix_coins_created_at'), table_name='coins') op.drop_index(op.f("ix_coins_created_at"), table_name="coins")
op.drop_table('coins') op.drop_table("coins")
# ### end Alembic commands ### # ### end Alembic commands ###

View File

@ -1,4 +1,3 @@
from settings.settings import Settings from settings.settings import Settings
settings = Settings()
settings = Settings()

View File

@ -1,32 +1,32 @@
import os import os
from pathlib import Path from pathlib import Path
from pydantic import BaseSettings from pydantic_settings import BaseSettings
BASE_DIR = Path(__file__).parent.parent BASE_DIR = Path(__file__).parent.parent
SHARED_DIR = BASE_DIR.resolve().joinpath('shared') SHARED_DIR = BASE_DIR.resolve().joinpath("shared")
SHARED_DIR.joinpath('logs').mkdir(exist_ok=True) SHARED_DIR.joinpath("logs").mkdir(exist_ok=True)
DIR_LOGS = SHARED_DIR.joinpath('logs') DIR_LOGS = SHARED_DIR.joinpath("logs")
class Settings(BaseSettings): class Settings(BaseSettings):
"""Application settings.""" """Application settings."""
DB_HOST: str = 'db_host' DB_HOST: str = "db_host"
USE_DATABASE: str = 'mysql' USE_DATABASE: str = "mysql"
DB_ECHO: bool = False DB_ECHO: bool = False
# Postgres POSTGRES_DB_PORT: int = 5432
POSTGRES_DB_PORT: int POSTGRES_DB: str = "sqlalchemy_study"
POSTGRES_DB: str POSTGRES_USER: str = "user"
POSTGRES_USER: str POSTGRES_PASSWORD: str = "postgrespwd"
POSTGRES_PASSWORD: str
MYSQL_DB_PORT: int MYSQL_DB_PORT: int = 3307
MYSQL_DATABASE: str MYSQL_DATABASE: str = "sqlalchemy_study"
MYSQL_USER: str MYSQL_USER: str = "user"
MYSQL_PASSWORD: str MYSQL_PASSWORD: str = "mysqlpwd"
MYSQL_ROOT_PASSWORD: str = "mysqlpwd"
@property @property
def async_db_url(self) -> str: def async_db_url(self) -> str:
@ -35,14 +35,16 @@ class Settings(BaseSettings):
:return: database URL. :return: database URL.
""" """
async_postgres_url = (f'postgresql+asyncpg://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@' async_postgres_url = (
f'{self.DB_HOST}:{self.POSTGRES_DB_PORT}/{self.POSTGRES_DB}' f"postgresql+asyncpg://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@"
) f"{self.DB_HOST}:{self.POSTGRES_DB_PORT}/{self.POSTGRES_DB}"
)
async_mysql_url = (f'mysql+asyncmy://{self.MYSQL_USER}:{self.MYSQL_PASSWORD}@' async_mysql_url = (
f'{self.DB_HOST}:{self.MYSQL_DB_PORT}/{self.MYSQL_DATABASE}' f"mysql+asyncmy://{self.MYSQL_USER}:{self.MYSQL_PASSWORD}@"
) f"{self.DB_HOST}:{self.MYSQL_DB_PORT}/{self.MYSQL_DATABASE}"
if os.environ.get('USE_DATABASE', self.USE_DATABASE).lower() == 'postgres': )
if os.environ.get("USE_DATABASE", self.USE_DATABASE).lower() == "postgres":
return async_postgres_url return async_postgres_url
return async_mysql_url return async_mysql_url
@ -53,17 +55,19 @@ class Settings(BaseSettings):
:return: database URL. :return: database URL.
""" """
sync_postgres_url = (f'postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@' sync_postgres_url = (
f'{self.DB_HOST}:{self.POSTGRES_DB_PORT}/{self.POSTGRES_DB}' f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@"
) f"{self.DB_HOST}:{self.POSTGRES_DB_PORT}/{self.POSTGRES_DB}"
)
sync_mysql_url = (f'mysql+pymysql://{self.MYSQL_USER}:{self.MYSQL_PASSWORD}@' sync_mysql_url = (
f'{self.DB_HOST}:{self.MYSQL_DB_PORT}/{self.MYSQL_DATABASE}' f"mysql+pymysql://{self.MYSQL_USER}:{self.MYSQL_PASSWORD}@"
) f"{self.DB_HOST}:{self.MYSQL_DB_PORT}/{self.MYSQL_DATABASE}"
if os.environ.get('USE_DATABASE', self.USE_DATABASE).lower() == 'postgres': )
if os.environ.get("USE_DATABASE", self.USE_DATABASE).lower() == "postgres":
return sync_postgres_url return sync_postgres_url
return sync_mysql_url return sync_mysql_url
class Config: class Config:
env_file = 'config/.env' env_file = "config/.env"
env_file_encoding = "utf-8" env_file_encoding = "utf-8"