mirror of
https://github.com/Balshgit/different
synced 2025-12-11 02:00:41 +03:00
add some files from study directory
This commit is contained in:
147
sqlalchemy_study/.gitignore
vendored
Normal file
147
sqlalchemy_study/.gitignore
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
### Python template
|
||||
|
||||
.idea/
|
||||
.vscode/
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
*.db
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# my staff
|
||||
delete/
|
||||
delete.py
|
||||
103
sqlalchemy_study/README.md
Normal file
103
sqlalchemy_study/README.md
Normal file
@@ -0,0 +1,103 @@
|
||||
# SQLALCHEMY STUDY
|
||||
|
||||
---
|
||||
|
||||
*Note: MySQL will start on 3307 port*
|
||||
|
||||
*Note: Postgres will start on 5433 port*
|
||||
|
||||
---
|
||||
|
||||
## Create environment:
|
||||
|
||||
```bash
|
||||
cp ./src/config/.env.template ./src/config/.env
|
||||
```
|
||||
|
||||
*Note: Change USE_DATABASE variable to 'mysql' for MySQL training or 'postgres' for Postgres use.*
|
||||
|
||||
*Default is MySQL*
|
||||
|
||||
## Run without app in docker:
|
||||
|
||||
Requires python > 3.11 and poetry 1.3.1
|
||||
|
||||
- **install poetry dependencies:**
|
||||
```bash
|
||||
poetry install
|
||||
poetry shell
|
||||
```
|
||||
|
||||
- **run for mysql:** ```docker-compose -f docker-compose.mysql.yaml up```
|
||||
|
||||
- **run for postgres:** ```docker-compose -f docker-compose.postgres.yaml up```
|
||||
|
||||
- **run initial data:** ```python ./src/data/fill_data.py```
|
||||
|
||||
## Run all in docker:
|
||||
|
||||
**run for mysql:**
|
||||
```bash
|
||||
docker-compose -f docker-compose.mysql.yaml -f docker-compose.docker.yaml up
|
||||
```
|
||||
**run for postgres:**
|
||||
```bash
|
||||
docker-compose -f docker-compose.postgres.yaml -f docker-compose.docker.yaml up
|
||||
```
|
||||
*Note: docker will start all migrations automatically. You don't need creation data step*
|
||||
|
||||
## Help info:
|
||||
|
||||
### Create alembic migrations:
|
||||
|
||||
*Note: To generate migrations you should run:*
|
||||
```bash
|
||||
# For automatic change detection.
|
||||
alembic revision --autogenerate -m "migration message"
|
||||
|
||||
# For empty file generation.
|
||||
alembic revision
|
||||
```
|
||||
|
||||
*Note: If you want to migrate your database, you should run following commands:*
|
||||
```bash
|
||||
# To run all migrations untill the migration with revision_id.
|
||||
alembic upgrade "<revision_id>"
|
||||
|
||||
# To perform all pending migrations.
|
||||
alembic upgrade "head"
|
||||
```
|
||||
|
||||
### Reverting alembic migrations:
|
||||
|
||||
*Note: If you want to revert migrations, you should run:*
|
||||
```bash
|
||||
# revert all migrations up to: revision_id.
|
||||
alembic downgrade <revision_id>
|
||||
|
||||
# Revert everything.
|
||||
alembic downgrade base
|
||||
|
||||
# Revert N revisions.
|
||||
alembic downgrade -2
|
||||
```
|
||||
|
||||
### MySQL database access:
|
||||
|
||||
Postgres:
|
||||
```bash
|
||||
docker exec -it sqlalchemy_study_db psql -d sqlalchemy_study -U balsh
|
||||
```
|
||||
|
||||
- show help ```\?```
|
||||
- show all tables: ```\dt```
|
||||
- describe table ```\d {table name}```
|
||||
|
||||
|
||||
|
||||
## Clean database
|
||||
```bash
|
||||
docker-compose -f docker-compose.mysql.yaml down -v
|
||||
```
|
||||
|
||||
## Known issues:
|
||||
39
sqlalchemy_study/docker-compose.docker.yaml
Normal file
39
sqlalchemy_study/docker-compose.docker.yaml
Normal file
@@ -0,0 +1,39 @@
|
||||
version: '3.9'
|
||||
|
||||
networks:
|
||||
sqlalchemy_study_network:
|
||||
name: "sqlalchemy_study_network"
|
||||
ipam:
|
||||
config:
|
||||
- subnet: 200.20.0.0/24
|
||||
|
||||
|
||||
services:
|
||||
db:
|
||||
networks:
|
||||
sqlalchemy_study_network:
|
||||
ipv4_address: 200.20.0.12
|
||||
|
||||
app:
|
||||
container_name: "sqlalchemy_study_app"
|
||||
image: "sqlalchemy_study:latest"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./docker/Dockerfile
|
||||
args:
|
||||
USER: root
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
sqlalchemy_study_network:
|
||||
ipv4_address: 200.20.0.10
|
||||
env_file: ./src/config/.env
|
||||
environment:
|
||||
DB_HOST: db
|
||||
depends_on:
|
||||
- db
|
||||
command: >
|
||||
bash -c "/app/scripts/docker-entrypoint.sh
|
||||
&& /app/scripts/alembic-init-migrate.sh && python data/fill_data.py
|
||||
&& sleep infinity"
|
||||
volumes:
|
||||
- ./src:/app/src/
|
||||
29
sqlalchemy_study/docker-compose.mysql.yaml
Normal file
29
sqlalchemy_study/docker-compose.mysql.yaml
Normal file
@@ -0,0 +1,29 @@
|
||||
version: '3.9'
|
||||
|
||||
|
||||
volumes:
|
||||
sqlalchemy_study_db_data:
|
||||
name: "sqlalchemy_study_db_data"
|
||||
|
||||
services:
|
||||
|
||||
db:
|
||||
image: mysql:8.0.31
|
||||
platform: linux/amd64
|
||||
container_name: "sqlalchemy_study_db"
|
||||
hostname: 'db_host'
|
||||
volumes:
|
||||
- sqlalchemy_study_db_data:/var/lib/mysql
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file: ./src/config/.env
|
||||
environment:
|
||||
MYSQL_TCP_PORT: 3307
|
||||
restart: unless-stopped
|
||||
expose:
|
||||
- '3307'
|
||||
ports:
|
||||
- '3307:3307'
|
||||
security_opt:
|
||||
- seccomp:unconfined
|
||||
cap_add:
|
||||
- SYS_NICE # CAP_SYS_NICE
|
||||
23
sqlalchemy_study/docker-compose.postgres.yaml
Normal file
23
sqlalchemy_study/docker-compose.postgres.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
version: '3.9'
|
||||
|
||||
|
||||
volumes:
|
||||
sqlalchemy_study_db_data:
|
||||
name: "sqlalchemy_study_db_data"
|
||||
|
||||
services:
|
||||
|
||||
db:
|
||||
image: postgres:14.6
|
||||
container_name: "sqlalchemy_study_db"
|
||||
hostname: 'db_host'
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- sqlalchemy_study_db_data:/var/lib/postgresql/data
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file: ./src/config/.env
|
||||
expose:
|
||||
- '5433'
|
||||
ports:
|
||||
- '5433:5433'
|
||||
command: -p 5433
|
||||
60
sqlalchemy_study/docker/Dockerfile
Normal file
60
sqlalchemy_study/docker/Dockerfile
Normal file
@@ -0,0 +1,60 @@
|
||||
|
||||
FROM --platform=linux/amd64 python:3.11.1
|
||||
|
||||
ARG USER
|
||||
|
||||
ENV SOURCE_DIR=/app/src/
|
||||
|
||||
ENV USER=${USER} \
|
||||
PYTHONFAULTHANDLER=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PYTHONHASHSEED=random \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONPATH="${PYTHONPATH}:${SOURCE_DIR}" \
|
||||
# pip:
|
||||
PIP_NO_CACHE_DIR=off \
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=on \
|
||||
PIP_DEFAULT_TIMEOUT=100 \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
POETRY_CACHE_DIR='/var/cache/pypoetry' \
|
||||
PATH="$PATH:/root/.poetry/bin"
|
||||
|
||||
RUN printf "================\n\nStart build app. USER is: "${USER}"\n\n===============\n" \
|
||||
&& apt-get update \
|
||||
&& apt-get install --no-install-recommends -y \
|
||||
procps \
|
||||
bash \
|
||||
build-essential \
|
||||
curl \
|
||||
iputils-ping \
|
||||
gettext \
|
||||
git \
|
||||
libpq-dev \
|
||||
nano \
|
||||
sshpass \
|
||||
&& pip install --upgrade pip \
|
||||
# Installing `poetry` package manager:
|
||||
&& pip install poetry \
|
||||
# Cleaning cache:
|
||||
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
||||
&& apt-get clean -y && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR ${SOURCE_DIR}
|
||||
|
||||
RUN if [ "$USER" != "root" ]; then \
|
||||
groupadd -r "$USER" && useradd -d /home/"$USER" -r -g "$USER" "$USER" \
|
||||
&& chown "$USER":"$USER" -R /home/"$USER"; \
|
||||
fi
|
||||
|
||||
COPY --chown="$USER":"$USER" ./poetry.lock ./pyproject.toml ${SOURCE_DIR}
|
||||
|
||||
# Installing requirements
|
||||
RUN poetry install && rm -rf "$POETRY_CACHE_DIR"
|
||||
|
||||
COPY ./docker/scripts/ /app/scripts/
|
||||
RUN chmod +x /app/scripts/docker-entrypoint.sh /app/scripts/alembic-init-migrate.sh
|
||||
|
||||
USER "$USER"
|
||||
|
||||
# Copying actuall application
|
||||
COPY --chown="$USER":"$USER" . ${SOURCE_DIR}
|
||||
16
sqlalchemy_study/docker/scripts/alembic-init-migrate.sh
Normal file
16
sqlalchemy_study/docker/scripts/alembic-init-migrate.sh
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
|
||||
alembic_init_migrations(){
|
||||
echo "Chosen database IS $USE_DATABASE"
|
||||
if [ "$USE_DATABASE" = "mysql" ];
|
||||
then
|
||||
echo "Start migrations for MySQL"
|
||||
alembic upgrade mysql_init_migrations;
|
||||
elif [ "$USE_DATABASE" = "postgres" ];
|
||||
then
|
||||
echo "Start migrations for Postgres"
|
||||
alembic upgrade postgres_init_migrations;
|
||||
fi
|
||||
}
|
||||
|
||||
alembic_init_migrations
|
||||
26
sqlalchemy_study/docker/scripts/docker-entrypoint.sh
Executable file
26
sqlalchemy_study/docker/scripts/docker-entrypoint.sh
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/bin/bash
|
||||
|
||||
TIMEOUT=${TIMEOUT:-60}
|
||||
|
||||
DATABASE_HOST=${DB_HOST:-db_host}
|
||||
|
||||
POSTGRES_DATABASE_PORT=${POSTGRES_DB_PORT:-5432}
|
||||
POSTGRES_DATABASE="$DATABASE_HOST:$POSTGRES_DATABASE_PORT"
|
||||
|
||||
MYSQL_DATABASE_PORT=${MYSQL_DB_PORT:-3306}
|
||||
MYSQL_DATABASE="$DATABASE_HOST:$MYSQL_DATABASE_PORT"
|
||||
|
||||
wait_for_databases(){
|
||||
echo "Chosen database IS $USE_DATABASE"
|
||||
if [ "$USE_DATABASE" = "mysql" ];
|
||||
then
|
||||
echo "Waiting for DB on: $MYSQL_DATABASE"
|
||||
/app/scripts/wait-for-it.sh -t $TIMEOUT -s $MYSQL_DATABASE -- echo 'MySQL database connected';
|
||||
elif [ "$USE_DATABASE" = "postgres" ];
|
||||
then
|
||||
echo "Waiting for DB on: $POSTGRES_DATABASE"
|
||||
/app/scripts/wait-for-it.sh -t $TIMEOUT -s $POSTGRES_DATABASE -- echo 'Postgres database connected';
|
||||
fi
|
||||
}
|
||||
|
||||
wait_for_databases
|
||||
182
sqlalchemy_study/docker/scripts/wait-for-it.sh
Executable file
182
sqlalchemy_study/docker/scripts/wait-for-it.sh
Executable file
@@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env bash
|
||||
# Use this script to test if a given TCP host/port are available
|
||||
|
||||
WAITFORIT_cmdname=${0##*/}
|
||||
|
||||
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
||||
|
||||
usage()
|
||||
{
|
||||
cat << USAGE >&2
|
||||
Usage:
|
||||
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
USAGE
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for()
|
||||
{
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
else
|
||||
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
||||
fi
|
||||
WAITFORIT_start_ts=$(date +%s)
|
||||
while :
|
||||
do
|
||||
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
||||
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
||||
WAITFORIT_result=$?
|
||||
else
|
||||
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
||||
WAITFORIT_result=$?
|
||||
fi
|
||||
if [[ $WAITFORIT_result -eq 0 ]]; then
|
||||
WAITFORIT_end_ts=$(date +%s)
|
||||
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
return $WAITFORIT_result
|
||||
}
|
||||
|
||||
wait_for_wrapper()
|
||||
{
|
||||
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
||||
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
else
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
fi
|
||||
WAITFORIT_PID=$!
|
||||
trap "kill -INT -$WAITFORIT_PID" INT
|
||||
wait $WAITFORIT_PID
|
||||
WAITFORIT_RESULT=$?
|
||||
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
fi
|
||||
return $WAITFORIT_RESULT
|
||||
}
|
||||
|
||||
# process arguments
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case "$1" in
|
||||
*:* )
|
||||
WAITFORIT_hostport=(${1//:/ })
|
||||
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
||||
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
||||
shift 1
|
||||
;;
|
||||
--child)
|
||||
WAITFORIT_CHILD=1
|
||||
shift 1
|
||||
;;
|
||||
-q | --quiet)
|
||||
WAITFORIT_QUIET=1
|
||||
shift 1
|
||||
;;
|
||||
-s | --strict)
|
||||
WAITFORIT_STRICT=1
|
||||
shift 1
|
||||
;;
|
||||
-h)
|
||||
WAITFORIT_HOST="$2"
|
||||
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--host=*)
|
||||
WAITFORIT_HOST="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-p)
|
||||
WAITFORIT_PORT="$2"
|
||||
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--port=*)
|
||||
WAITFORIT_PORT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-t)
|
||||
WAITFORIT_TIMEOUT="$2"
|
||||
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--timeout=*)
|
||||
WAITFORIT_TIMEOUT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
WAITFORIT_CLI=("$@")
|
||||
break
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echoerr "Unknown argument: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
||||
echoerr "Error: you need to provide a host and port to test."
|
||||
usage
|
||||
fi
|
||||
|
||||
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
||||
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
||||
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
||||
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
||||
|
||||
# Check to see if timeout is from busybox?
|
||||
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
||||
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
||||
|
||||
WAITFORIT_BUSYTIMEFLAG=""
|
||||
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
||||
WAITFORIT_ISBUSY=1
|
||||
# Check if busybox timeout uses -t flag
|
||||
# (recent Alpine versions don't support -t anymore)
|
||||
if timeout &>/dev/stdout | grep -q -e '-t '; then
|
||||
WAITFORIT_BUSYTIMEFLAG="-t"
|
||||
fi
|
||||
else
|
||||
WAITFORIT_ISBUSY=0
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
exit $WAITFORIT_RESULT
|
||||
else
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
wait_for_wrapper
|
||||
WAITFORIT_RESULT=$?
|
||||
else
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CLI != "" ]]; then
|
||||
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
exec "${WAITFORIT_CLI[@]}"
|
||||
else
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
1104
sqlalchemy_study/poetry.lock
generated
Normal file
1104
sqlalchemy_study/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
28
sqlalchemy_study/pyproject.toml
Normal file
28
sqlalchemy_study/pyproject.toml
Normal file
@@ -0,0 +1,28 @@
|
||||
[tool.poetry]
|
||||
name = "sqlalchemy_study_project"
|
||||
version = "1.0.1"
|
||||
description = "for study sqlalchemy async models"
|
||||
authors = ["Dmitry Afanasyev <Balshbox@gmail.com>"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.11"
|
||||
SQLAlchemy = "^1.4"
|
||||
SQLAlchemy-Utils = "^0.38.2"
|
||||
pydantic = {version = "^1.9.1", extras = ["email"]}
|
||||
factory-boy = "^3.2.1"
|
||||
Faker = "^15.0.0"
|
||||
loguru = "^0.6.0"
|
||||
alembic = "^1.8.0"
|
||||
python-dotenv = "^0.20.0"
|
||||
asyncpg = "^0.27.0"
|
||||
asyncmy = "^0.2.5"
|
||||
PyMySQL = "^1.0.2"
|
||||
cryptography = "^37.0.2"
|
||||
psycopg2-binary = "^2.9.3"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
ipython = "^8.4.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
186
sqlalchemy_study/sqlalchemy.py
Normal file
186
sqlalchemy_study/sqlalchemy.py
Normal file
@@ -0,0 +1,186 @@
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy import Table, Column, String, MetaData, DATETIME, CHAR, INTEGER
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from pathlib import Path
|
||||
from decouple import AutoConfig
|
||||
|
||||
|
||||
BASE_DIR = PurePath(__file__).parent.parent
|
||||
config = AutoConfig(search_path=BASE_DIR.joinpath('config'))
|
||||
|
||||
DATABASE_USER = config('POSTGRES_USER')
|
||||
DATABASE_NAME = config('POSTGRES_DB')
|
||||
DATABASE_PASSWORD = config('POSTGRES_PASSWORD')
|
||||
DATABASE_HOST = config('DATABASE_HOST')
|
||||
DATABASE_PORT = config('DATABASE_PORT')
|
||||
|
||||
|
||||
engine = create_engine(
|
||||
f'postgresql+psycopg2://{DATABASE_USER}:{DATABASE_PASSWORD}@'
|
||||
f'{DATABASE_HOST}:{DATABASE_PORT}/{DATABASE_NAME}')
|
||||
|
||||
session_factory = sessionmaker(engine)
|
||||
session = session_factory()
|
||||
|
||||
|
||||
meta = MetaData(engine)
|
||||
|
||||
|
||||
def get_now(offset):
|
||||
_offset = timezone(timedelta(hours=offset))
|
||||
now = datetime.now(_offset)
|
||||
return now
|
||||
|
||||
|
||||
announce = Table('accounts_announce', meta,
|
||||
Column('id', INTEGER, primary_key=True),
|
||||
Column('announce', String, nullable=True, default=''),
|
||||
Column('created', DATETIME),
|
||||
Column('author', CHAR, nullable=False),
|
||||
)
|
||||
|
||||
|
||||
bot_users_table = Table('accounts_botusers', meta,
|
||||
Column('id', INTEGER, primary_key=True),
|
||||
Column('chat_id', CHAR, nullable=False),
|
||||
Column('nickname', CHAR, nullable=True, ),
|
||||
Column('name', CHAR, nullable=True, ),
|
||||
Column('telephone', CHAR, nullable=True),
|
||||
Column('location', CHAR, nullable=True, default=''),
|
||||
Column('user_created', DATETIME)
|
||||
)
|
||||
|
||||
|
||||
users_messages = Table('accounts_usersmessages', meta,
|
||||
Column('id', INTEGER, primary_key=True),
|
||||
Column('chat_id_id', INTEGER, nullable=True),
|
||||
Column('nickname', CHAR, nullable=True),
|
||||
Column('name', CHAR, nullable=True),
|
||||
Column('message', String, nullable=False),
|
||||
Column('location', CHAR, nullable=True),
|
||||
Column('message_time', DATETIME),
|
||||
Column('status', CHAR, nullable=True, default='')
|
||||
)
|
||||
|
||||
reply_messages = Table('accounts_messagesreplys', meta,
|
||||
Column('id', INTEGER, primary_key=True),
|
||||
Column('chat_id_id', INTEGER, nullable=True),
|
||||
Column('nickname', CHAR, nullable=True),
|
||||
Column('name', CHAR, nullable=True),
|
||||
Column('message', String, nullable=False),
|
||||
Column('message_time', DATETIME),
|
||||
Column('status', CHAR, nullable=True, default='')
|
||||
)
|
||||
|
||||
|
||||
def db_insert_or_update(chat_id, nickname=None, name=None,
|
||||
telephone=None, location=None,
|
||||
):
|
||||
|
||||
with engine.connect() as conn:
|
||||
try:
|
||||
insert_statement = bot_users_table.insert().values(chat_id=chat_id,
|
||||
nickname=nickname,
|
||||
name=name,
|
||||
telephone=telephone,
|
||||
location=location,
|
||||
user_created=get_now(3)
|
||||
)
|
||||
conn.execute(insert_statement)
|
||||
except:
|
||||
insert_statement = bot_users_table.update().values(nickname=nickname,
|
||||
name=name,
|
||||
telephone=telephone
|
||||
).\
|
||||
where(bot_users_table.c.chat_id == chat_id)
|
||||
conn.execute(insert_statement)
|
||||
|
||||
|
||||
def db_get_contact_number(chat_id):
|
||||
try:
|
||||
user = session.query(bot_users_table)\
|
||||
.filter(bot_users_table.c.chat_id == chat_id).one()
|
||||
return user.telephone
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def db_get_location(chat_id):
|
||||
|
||||
try:
|
||||
user = session.query(bot_users_table)\
|
||||
.filter(bot_users_table.c.chat_id == chat_id).one()
|
||||
return user.location
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def db_get_id(chat_id):
|
||||
|
||||
try:
|
||||
user = session.query(bot_users_table) \
|
||||
.filter(bot_users_table.c.chat_id == chat_id).one()
|
||||
return user.id
|
||||
except(Exception) as e:
|
||||
print('ERORO chat ID', e)
|
||||
pass
|
||||
|
||||
|
||||
def db_update_location(chat_id, location):
|
||||
with engine.connect() as conn:
|
||||
try:
|
||||
insert_statement = bot_users_table.update().values(location=location). \
|
||||
where(bot_users_table.c.chat_id == chat_id)
|
||||
conn.execute(insert_statement)
|
||||
except Exception as e:
|
||||
print('ERROR!!!!!!!!!!!!!!!!', e)
|
||||
pass
|
||||
|
||||
|
||||
def db_insert_reply_message(chat_id_id, nickname=None, name=None, reply_message=None):
|
||||
|
||||
with engine.connect() as conn:
|
||||
|
||||
insert_statement = reply_messages.insert().values(chat_id_id=chat_id_id,
|
||||
nickname=nickname,
|
||||
name=name,
|
||||
message=reply_message,
|
||||
message_time=get_now(3)
|
||||
)
|
||||
conn.execute(insert_statement)
|
||||
|
||||
|
||||
def db_insert_user_message(chat_id_id, nickname=None, location=None,
|
||||
name=None, message=None):
|
||||
|
||||
with engine.connect() as conn:
|
||||
|
||||
insert_statement = users_messages.insert().values(chat_id_id=chat_id_id,
|
||||
nickname=nickname,
|
||||
name=name,
|
||||
message=message,
|
||||
location=location,
|
||||
message_time=get_now(3)
|
||||
)
|
||||
conn.execute(insert_statement)
|
||||
|
||||
|
||||
def db_insert_announce(author, bot_announce):
|
||||
|
||||
with engine.connect() as conn:
|
||||
|
||||
insert_statement = announce.insert().values(announce=bot_announce,
|
||||
author=author,
|
||||
created=get_now(3)
|
||||
)
|
||||
conn.execute(insert_statement)
|
||||
|
||||
|
||||
# usage:
|
||||
|
||||
# db_insert_or_update(chat_id='417070387', nickname='Balsh', name='Dmitry', telephone='23432432')
|
||||
# print(db_get_contact_number('417070387'))
|
||||
# db_insert_reply_message(chat_id='1660356916', reply_message='asdasd')
|
||||
# db_update_location(chat_id='1660356916', location='lsdkjfldskj')
|
||||
# print(db_get_id('417070387'))
|
||||
43
sqlalchemy_study/src/alembic.ini
Normal file
43
sqlalchemy_study/src/alembic.ini
Normal file
@@ -0,0 +1,43 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
file_template = %%(year)d-%%(month).2d-%%(day).2d-%%(hour).2d-%%(minute).2d_%%(rev)s
|
||||
prepend_sys_path = .
|
||||
output_encoding = utf-8
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
25
sqlalchemy_study/src/config/.env.template
Normal file
25
sqlalchemy_study/src/config/.env.template
Normal file
@@ -0,0 +1,25 @@
|
||||
# --------------DATABASE-------------
|
||||
|
||||
# ==== DB provider ====: 'mysql' -> MySQL use | 'postgres' -> Postgres use
|
||||
|
||||
USE_DATABASE=mysql
|
||||
|
||||
# ==== DB common ====
|
||||
|
||||
DB_HOST=localhost
|
||||
DB_ECHO=True
|
||||
|
||||
# ==== Postgres ====
|
||||
|
||||
POSTGRES_DB_PORT=5433
|
||||
POSTGRES_DB=sqlalchemy_study
|
||||
POSTGRES_USER=user
|
||||
POSTGRES_PASSWORD=postgrespwd
|
||||
|
||||
# ==== MySQL ====
|
||||
|
||||
MYSQL_DB_PORT=3307
|
||||
MYSQL_ROOT_PASSWORD=mysqlpwd
|
||||
MYSQL_PASSWORD=mysqlpwd
|
||||
MYSQL_DATABASE=sqlalchemy_study
|
||||
MYSQL_USER=user
|
||||
0
sqlalchemy_study/src/data/__init__.py
Normal file
0
sqlalchemy_study/src/data/__init__.py
Normal file
150
sqlalchemy_study/src/data/factories.py
Normal file
150
sqlalchemy_study/src/data/factories.py
Normal file
@@ -0,0 +1,150 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
import factory
|
||||
from factory import fuzzy
|
||||
from faker import Faker
|
||||
|
||||
from db.dependencies import get_sync_db_session
|
||||
from db.models.coin import Coin, CoinType
|
||||
from db.models.department import Department, EmployeeDepartments
|
||||
from db.models.skills import Skill, EmployeesSkills
|
||||
from db.models.user import User, Employee
|
||||
|
||||
faker = Faker('ru_RU')
|
||||
|
||||
|
||||
Session = get_sync_db_session()
|
||||
|
||||
|
||||
class BaseModelFactory(factory.alchemy.SQLAlchemyModelFactory):
|
||||
class Meta:
|
||||
abstract = True
|
||||
sqlalchemy_session_persistence = 'commit'
|
||||
sqlalchemy_session = Session
|
||||
|
||||
|
||||
class UserFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
username = faker.profile(fields=['username'])['username']
|
||||
email = factory.Faker('email')
|
||||
hash_password = factory.Faker('password')
|
||||
auth_token = factory.Faker('uuid4')
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
sqlalchemy_get_or_create = (
|
||||
'username',
|
||||
)
|
||||
|
||||
|
||||
class CoinModelFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
name = factory.Faker('cryptocurrency_name')
|
||||
enabled = fuzzy.FuzzyChoice((0, 1))
|
||||
|
||||
class Meta:
|
||||
model = Coin
|
||||
sqlalchemy_get_or_create = (
|
||||
'name',
|
||||
)
|
||||
|
||||
@factory.post_generation
|
||||
def coin_type(obj, create: bool, extracted: Optional[Coin], *args, **kwargs) -> None:
|
||||
if create:
|
||||
CoinTypeFactory.create_batch(faker.random_int(min=3, max=7), coin_id=obj.id)
|
||||
|
||||
|
||||
class CoinTypeFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
name = factory.Faker('cryptocurrency_code')
|
||||
|
||||
class Meta:
|
||||
model = CoinType
|
||||
sqlalchemy_get_or_create = ('id',
|
||||
)
|
||||
|
||||
|
||||
class SkillFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
name = factory.Faker('job', locale='ru_ru')
|
||||
description = factory.Faker('text', max_nb_chars=160, locale='ru_RU')
|
||||
updated_at = factory.LazyFunction(datetime.now)
|
||||
|
||||
class Meta:
|
||||
model = Skill
|
||||
sqlalchemy_get_or_create = ('name',
|
||||
)
|
||||
|
||||
|
||||
class EmployeeFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
first_name = factory.Faker('first_name', locale='ru_RU')
|
||||
last_name = factory.Faker('last_name', locale='ru_RU')
|
||||
phone = factory.Faker('phone_number')
|
||||
description = factory.Faker('text', max_nb_chars=80, locale='ru_RU')
|
||||
coin_id = factory.Faker('random_int')
|
||||
|
||||
class Meta:
|
||||
model = Employee
|
||||
sqlalchemy_get_or_create = ('id',
|
||||
)
|
||||
|
||||
|
||||
class EmployeesSkillsFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
employee_id = factory.Faker('random_int')
|
||||
skill_id = factory.Faker('random_int')
|
||||
updated_at = factory.Faker(
|
||||
'date_time_between_dates', datetime_start=datetime.now() - timedelta(days=30), datetime_end=datetime.now()
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = EmployeesSkills
|
||||
sqlalchemy_get_or_create = (
|
||||
'id',
|
||||
'employee_id',
|
||||
'skill_id'
|
||||
)
|
||||
|
||||
|
||||
class DepartmentFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
name = factory.Faker('company')
|
||||
description = factory.Faker('bs')
|
||||
updated_at = factory.Faker(
|
||||
'date_time_between_dates', datetime_start=datetime.now() - timedelta(days=30), datetime_end=datetime.now()
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Department
|
||||
sqlalchemy_get_or_create = (
|
||||
'id',
|
||||
'name',
|
||||
)
|
||||
|
||||
|
||||
class EmployeeDepartmentFactory(BaseModelFactory):
|
||||
|
||||
employee_id = factory.Faker('random_int')
|
||||
department_id = factory.Faker('random_int')
|
||||
created_at = factory.Faker(
|
||||
'date_time_between_dates',
|
||||
datetime_start=datetime.now() - timedelta(days=30),
|
||||
datetime_end=datetime.now() - timedelta(days=10)
|
||||
)
|
||||
updated_at = factory.Faker(
|
||||
'date_time_between_dates',
|
||||
datetime_start=datetime.now() - timedelta(days=10),
|
||||
datetime_end=datetime.now()
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = EmployeeDepartments
|
||||
84
sqlalchemy_study/src/data/fill_data.py
Normal file
84
sqlalchemy_study/src/data/fill_data.py
Normal file
@@ -0,0 +1,84 @@
|
||||
import asyncio
|
||||
import random
|
||||
import uuid
|
||||
|
||||
from factory import fuzzy
|
||||
from faker import Faker
|
||||
|
||||
from data.factories import (
|
||||
UserFactory,
|
||||
CoinModelFactory,
|
||||
EmployeesSkillsFactory,
|
||||
SkillFactory,
|
||||
EmployeeFactory,
|
||||
DepartmentFactory,
|
||||
EmployeeDepartmentFactory
|
||||
)
|
||||
from db.dependencies import get_async_db_session
|
||||
from db.models.user import User
|
||||
from db.utils import drop_tables, run_migrations
|
||||
from settings.logger import logger
|
||||
|
||||
faker = Faker('ru_RU')
|
||||
|
||||
|
||||
async def add_users_data() -> None:
|
||||
|
||||
async with get_async_db_session() as session:
|
||||
users = []
|
||||
for _ in range(10):
|
||||
users.append(User(username=faker.profile(fields=['username'])['username'],
|
||||
hash_password=faker.password(),
|
||||
auth_token=str(uuid.uuid4()),
|
||||
)
|
||||
)
|
||||
session.add_all(users)
|
||||
|
||||
|
||||
def get_random_skill(skills: list[int]) -> list[int]:
|
||||
random_skills = random.sample(skills, random.randint(2, 9))
|
||||
return random_skills
|
||||
|
||||
|
||||
def fill_database() -> None:
|
||||
|
||||
# async add faker data
|
||||
asyncio.run(add_users_data())
|
||||
|
||||
# sync factory boy add data
|
||||
coins = [coin.id for coin in CoinModelFactory.create_batch(42)]
|
||||
|
||||
jonny = EmployeeFactory(first_name='Tony', last_name='Stark', coin_id=fuzzy.FuzzyChoice(coins))
|
||||
karl = EmployeeFactory(first_name='Karl', coin_id=fuzzy.FuzzyChoice(coins))
|
||||
employees = EmployeeFactory.create_batch(40, coin_id=fuzzy.FuzzyChoice(coins))
|
||||
|
||||
skills = [skill.id for skill in SkillFactory.create_batch(size=faker.random_int(min=20, max=42))]
|
||||
|
||||
for skill in get_random_skill(skills):
|
||||
EmployeesSkillsFactory(employee_id=jonny.id, skill_id=skill)
|
||||
|
||||
for skill in get_random_skill(skills):
|
||||
EmployeesSkillsFactory(employee_id=karl.id, skill_id=skill)
|
||||
|
||||
for employee in employees:
|
||||
for skill in get_random_skill(skills):
|
||||
EmployeesSkillsFactory(employee_id=employee.id, skill_id=skill)
|
||||
|
||||
# User data (first 20 rows if not exists)
|
||||
for user_id in range(20, 30):
|
||||
UserFactory(id=user_id, username=faker.profile(fields=['username'])['username'])
|
||||
|
||||
# Department data
|
||||
departments = DepartmentFactory.create_batch(5)
|
||||
departments = [department.id for department in departments]
|
||||
|
||||
for employee in [jonny, karl, *employees]:
|
||||
EmployeeDepartmentFactory(employee_id=employee.id, department_id=fuzzy.FuzzyChoice(departments))
|
||||
|
||||
logger.info('All data has been created. You can run data/get_data.py script')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
drop_tables()
|
||||
run_migrations()
|
||||
fill_database()
|
||||
66
sqlalchemy_study/src/data/get_data.py
Normal file
66
sqlalchemy_study/src/data/get_data.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import asyncio
|
||||
|
||||
from settings.logger import logger
|
||||
from sqlalchemy_study.sqlalchemy import select
|
||||
from sqlalchemy_study.sqlalchemy import load_only, contains_eager, joinedload
|
||||
|
||||
from db.dependencies import get_async_db_session
|
||||
from db.models.coin import Coin
|
||||
from db.models.department import EmployeeDepartments, Department
|
||||
from db.models.skills import Skill
|
||||
from db.models.user import Employee, User
|
||||
|
||||
|
||||
async def get_data() -> list[Employee]:
|
||||
|
||||
query = (
|
||||
select(Employee)
|
||||
.join(Employee.coin).options(
|
||||
contains_eager(Employee.coin).options(load_only(Coin.name,
|
||||
Coin.enabled)))
|
||||
.join(Employee.skills).options(
|
||||
contains_eager(Employee.skills).load_only(Skill.name)
|
||||
).options(load_only(Employee.id,
|
||||
Employee.first_name,
|
||||
Employee.phone,
|
||||
)
|
||||
)
|
||||
.outerjoin(Employee.department).options(
|
||||
contains_eager(Employee.department).options(
|
||||
joinedload(EmployeeDepartments.department)
|
||||
.options(load_only(Department.name,
|
||||
Department.description, )
|
||||
)
|
||||
)
|
||||
)
|
||||
.outerjoin(Employee.user).options(
|
||||
contains_eager(Employee.user).options(load_only(User.username,
|
||||
)
|
||||
)
|
||||
)
|
||||
).order_by(Employee.id, Skill.name)
|
||||
|
||||
async with get_async_db_session() as session:
|
||||
result = await session.execute(query)
|
||||
data = result.unique().scalars().all()
|
||||
return data
|
||||
|
||||
employees = asyncio.run(get_data())
|
||||
|
||||
|
||||
for employee in employees:
|
||||
print(''.center(40, '-'), '\nEmployee id: {0}\nFirst name: {1}\nPhone: {2}\nSkills: {3}\n'
|
||||
'Coin name: {4}\nCoin enabled: {5}\nDepartment: {6} -> {7}\nUsername: {8}'
|
||||
.format(employee.id,
|
||||
employee.first_name,
|
||||
employee.phone,
|
||||
', '.join([skill.name for skill in employee.skills[:5]]),
|
||||
employee.coin.name,
|
||||
employee.coin.enabled,
|
||||
employee.department.department.name,
|
||||
employee.department.department.description,
|
||||
employee.user.username if hasattr(employee.user, 'username') else None,
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(f'Total employees: {len(employees)}')
|
||||
31
sqlalchemy_study/src/db/base.py
Normal file
31
sqlalchemy_study/src/db/base.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from typing import Any, Tuple, Union, Type
|
||||
|
||||
from sqlalchemy_study.sqlalchemy import Table, Column, Integer, DATETIME, TIMESTAMP, func
|
||||
from sqlalchemy_study.sqlalchemy import as_declarative
|
||||
|
||||
from db.meta import meta
|
||||
from settings import settings
|
||||
|
||||
DB_TIME_FORMAT: Type[Union[DATETIME, TIMESTAMP]] = DATETIME if settings.USE_DATABASE == 'mysql' else TIMESTAMP
|
||||
|
||||
|
||||
@as_declarative(metadata=meta)
|
||||
class BaseModel:
|
||||
"""
|
||||
BaseModel for all models.
|
||||
|
||||
It has some type definitions to
|
||||
enhance autocompletion.
|
||||
"""
|
||||
|
||||
__tablename__: str
|
||||
__table__: Table
|
||||
__table_args__: Tuple[Any, ...]
|
||||
__abstract__ = True
|
||||
|
||||
id = Column(Integer, nullable=False, unique=True, primary_key=True, autoincrement=True)
|
||||
created_at = Column(DB_TIME_FORMAT, default=func.now(), index=True)
|
||||
updated_at = Column(DB_TIME_FORMAT, nullable=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{self.__class__.__name__}(id={self.id!r})>"
|
||||
57
sqlalchemy_study/src/db/dependencies.py
Normal file
57
sqlalchemy_study/src/db/dependencies.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from asyncio import current_task
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from sqlalchemy_study.sqlalchemy import create_engine
|
||||
from sqlalchemy_study.sqlalchemy import create_async_engine, AsyncSession, async_scoped_session, AsyncEngine
|
||||
from sqlalchemy_study.sqlalchemy import sessionmaker, Session
|
||||
|
||||
from settings import settings
|
||||
|
||||
async_engine: AsyncEngine = create_async_engine(str(settings.async_db_url), echo=settings.DB_ECHO)
|
||||
async_session_factory = async_scoped_session(
|
||||
sessionmaker(
|
||||
autocommit=False,
|
||||
autoflush=False,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
bind=async_engine,
|
||||
),
|
||||
scopefunc=current_task,
|
||||
)
|
||||
|
||||
|
||||
sync_engine = create_engine(settings.sync_db_url, echo=settings.DB_ECHO)
|
||||
sync_session_factory = sessionmaker(sync_engine)
|
||||
|
||||
|
||||
def get_sync_db_session() -> Session:
|
||||
session: Session = sync_session_factory()
|
||||
try:
|
||||
return session
|
||||
except Exception as err:
|
||||
session.rollback()
|
||||
raise err
|
||||
finally:
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_async_db_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
"""
|
||||
Create and get database session.
|
||||
|
||||
:param request: current request.
|
||||
:yield: database session.
|
||||
"""
|
||||
session = async_session_factory()
|
||||
try:
|
||||
yield session
|
||||
except Exception as err:
|
||||
await session.rollback()
|
||||
raise err
|
||||
finally:
|
||||
await session.commit()
|
||||
await session.close()
|
||||
await async_session_factory.remove()
|
||||
3
sqlalchemy_study/src/db/meta.py
Normal file
3
sqlalchemy_study/src/db/meta.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from sqlalchemy_study import sqlalchemy as sa
|
||||
|
||||
meta = sa.MetaData()
|
||||
13
sqlalchemy_study/src/db/models/__init__.py
Normal file
13
sqlalchemy_study/src/db/models/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import pkgutil
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def load_all_models() -> None:
|
||||
"""Load all models from this folder."""
|
||||
root_dir = Path(__file__).resolve().parent
|
||||
modules = pkgutil.walk_packages(
|
||||
path=[str(root_dir)],
|
||||
prefix="db.models.",
|
||||
)
|
||||
for module in modules:
|
||||
__import__(module.name)
|
||||
16
sqlalchemy_study/src/db/models/cadre_movements.py
Executable file
16
sqlalchemy_study/src/db/models/cadre_movements.py
Executable file
@@ -0,0 +1,16 @@
|
||||
from sqlalchemy_study.sqlalchemy import Column, Integer, ForeignKey, VARCHAR
|
||||
from sqlalchemy_study.sqlalchemy import relation
|
||||
|
||||
from db.base import BaseModel
|
||||
from db.models.department import Department
|
||||
|
||||
|
||||
class CadreMovement(BaseModel):
|
||||
__tablename__ = 'cadre_movements'
|
||||
|
||||
employee = Column(Integer, ForeignKey('employees.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
old_department = Column(Integer, ForeignKey('departments.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
new_department = Column(Integer, ForeignKey('departments.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
reason = Column(VARCHAR(500), nullable=True)
|
||||
|
||||
department = relation(Department, foreign_keys=new_department, lazy='select')
|
||||
35
sqlalchemy_study/src/db/models/coin.py
Normal file
35
sqlalchemy_study/src/db/models/coin.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from sqlalchemy_study.sqlalchemy import VARCHAR
|
||||
from sqlalchemy_study.sqlalchemy import relationship
|
||||
from sqlalchemy_study.sqlalchemy import Column
|
||||
from sqlalchemy_study.sqlalchemy import ForeignKey
|
||||
from sqlalchemy_study.sqlalchemy import Integer, BOOLEAN
|
||||
|
||||
from db.base import BaseModel
|
||||
|
||||
|
||||
class Coin(BaseModel):
|
||||
"""Model for coin."""
|
||||
|
||||
__tablename__ = "coins"
|
||||
|
||||
name = Column('coin_name', VARCHAR(50), unique=True)
|
||||
enabled = Column('enabled', BOOLEAN)
|
||||
|
||||
coin_type_id = relationship("CoinType",
|
||||
primaryjoin="Coin.id == CoinType.coin_id",
|
||||
back_populates='coin',
|
||||
uselist=False,
|
||||
viewonly=True,
|
||||
lazy="raise",
|
||||
)
|
||||
employee = relationship('Employee', back_populates='coin')
|
||||
|
||||
|
||||
class CoinType(BaseModel):
|
||||
"""Model for coin type."""
|
||||
|
||||
__tablename__ = "coin_types"
|
||||
|
||||
name = Column('coin_name', VARCHAR(50))
|
||||
coin_id = Column(Integer, ForeignKey('coins.id', ondelete='CASCADE'))
|
||||
coin = relationship(Coin, back_populates='coin_type_id')
|
||||
23
sqlalchemy_study/src/db/models/department.py
Executable file
23
sqlalchemy_study/src/db/models/department.py
Executable file
@@ -0,0 +1,23 @@
|
||||
from sqlalchemy_study.sqlalchemy import Column, VARCHAR, Integer, ForeignKey
|
||||
from sqlalchemy_study.sqlalchemy import relationship
|
||||
|
||||
from db.base import BaseModel
|
||||
|
||||
|
||||
class Department(BaseModel):
|
||||
__tablename__ = 'departments'
|
||||
|
||||
name = Column(VARCHAR(255), nullable=False)
|
||||
description = Column(VARCHAR(255), nullable=False)
|
||||
|
||||
|
||||
class EmployeeDepartments(BaseModel):
|
||||
__tablename__ = 'employee_departments'
|
||||
|
||||
employee_id = Column(Integer, ForeignKey('employees.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
department_id = Column(Integer, ForeignKey('departments.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
|
||||
department = relationship(Department,
|
||||
lazy='noload',
|
||||
backref='emp_depart',
|
||||
)
|
||||
19
sqlalchemy_study/src/db/models/skills.py
Normal file
19
sqlalchemy_study/src/db/models/skills.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from sqlalchemy_study.sqlalchemy import Column, ForeignKey, VARCHAR, Text, UniqueConstraint
|
||||
|
||||
from db.base import BaseModel
|
||||
from db.models.user import Employee
|
||||
|
||||
|
||||
class Skill(BaseModel):
|
||||
__tablename__ = 'skills'
|
||||
|
||||
name = Column(VARCHAR(255), nullable=False, unique=True)
|
||||
description = Column(Text, nullable=True)
|
||||
|
||||
|
||||
class EmployeesSkills(BaseModel):
|
||||
__tablename__ = 'employees_skills'
|
||||
__table_args__ = (UniqueConstraint("employee_id", "skill_id"),)
|
||||
|
||||
employee_id = Column(ForeignKey(Employee.id, ondelete='CASCADE'), nullable=False, index=True)
|
||||
skill_id = Column(ForeignKey(Skill.id, ondelete='CASCADE'), nullable=False, index=True)
|
||||
62
sqlalchemy_study/src/db/models/user.py
Normal file
62
sqlalchemy_study/src/db/models/user.py
Normal file
@@ -0,0 +1,62 @@
|
||||
import datetime
|
||||
|
||||
from sqlalchemy_study.sqlalchemy import Column, String, DateTime, ForeignKey
|
||||
from sqlalchemy_study.sqlalchemy import VARCHAR
|
||||
from sqlalchemy_study.sqlalchemy import relationship
|
||||
|
||||
from db.base import BaseModel
|
||||
from db.models.coin import Coin
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
__tablename__ = 'users'
|
||||
|
||||
username: str = Column(String(255), unique=True)
|
||||
email: str = Column(String(255), index=True, unique=True, nullable=True)
|
||||
hash_password: str = Column(String(255))
|
||||
auth_token: str = Column(String(255))
|
||||
last_login: datetime.datetime = Column(DateTime, default=datetime.datetime.now, index=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f'User: id:{self.id}, name: {self.username}'
|
||||
|
||||
employee = relationship('Employee',
|
||||
primaryjoin='foreign(User.id)==remote(Employee.id)',
|
||||
lazy='noload',
|
||||
backref='user_employee',
|
||||
)
|
||||
|
||||
|
||||
class Employee(BaseModel):
|
||||
__tablename__ = 'employees'
|
||||
|
||||
first_name = Column(VARCHAR(128), nullable=False)
|
||||
last_name = Column(VARCHAR(128), nullable=False)
|
||||
phone = Column(VARCHAR(30), unique=True, nullable=True)
|
||||
description = Column(VARCHAR(255), nullable=True)
|
||||
coin_id = Column('coin_id', ForeignKey('coins.id', ondelete='SET NULL'), nullable=True)
|
||||
|
||||
coin = relationship(Coin,
|
||||
back_populates='employee',
|
||||
primaryjoin='Employee.coin_id==Coin.id',
|
||||
lazy='noload',
|
||||
uselist=False,
|
||||
)
|
||||
|
||||
skills = relationship('Skill',
|
||||
secondary="employees_skills",
|
||||
lazy='noload',
|
||||
uselist=True,
|
||||
)
|
||||
|
||||
department = relationship('EmployeeDepartments',
|
||||
lazy='noload',
|
||||
backref='employee',
|
||||
uselist=False,
|
||||
)
|
||||
|
||||
user = relationship('User',
|
||||
primaryjoin='foreign(Employee.id)==remote(User.id)',
|
||||
lazy='raise',
|
||||
backref='user_employee',
|
||||
)
|
||||
56
sqlalchemy_study/src/db/utils.py
Normal file
56
sqlalchemy_study/src/db/utils.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from alembic import command, config as alembic_config
|
||||
from sqlalchemy_study.sqlalchemy import MetaData, Table, ForeignKeyConstraint
|
||||
from sqlalchemy_study.sqlalchemy import inspect
|
||||
from sqlalchemy_study.sqlalchemy import NoSuchTableError
|
||||
from sqlalchemy_study.sqlalchemy import DropConstraint
|
||||
|
||||
from db.dependencies import sync_engine
|
||||
from db.meta import meta
|
||||
from db.models import load_all_models
|
||||
from settings import settings
|
||||
from settings.logger import logger
|
||||
|
||||
alembic_cfg = alembic_config.Config("alembic.ini")
|
||||
|
||||
|
||||
def remove_foreign_keys() -> None:
|
||||
logger.info("Dropping all foreign key constraints from archive database")
|
||||
|
||||
inspector = inspect(sync_engine)
|
||||
fake_metadata = MetaData()
|
||||
|
||||
fake_tables = []
|
||||
all_fks = []
|
||||
for table_name in meta.tables:
|
||||
fks = []
|
||||
try:
|
||||
for fk in inspector.get_foreign_keys(table_name):
|
||||
if fk['name']:
|
||||
fks.append(ForeignKeyConstraint((), (), name=fk['name']))
|
||||
except NoSuchTableError:
|
||||
logger.error(f'Table {table_name} not exist')
|
||||
t = Table(table_name, fake_metadata, *fks)
|
||||
fake_tables.append(t)
|
||||
all_fks.extend(fks)
|
||||
connection = sync_engine.connect()
|
||||
transaction = connection.begin()
|
||||
for fkc in all_fks:
|
||||
connection.execute(DropConstraint(fkc))
|
||||
transaction.commit()
|
||||
|
||||
|
||||
def drop_tables() -> None:
|
||||
load_all_models()
|
||||
remove_foreign_keys()
|
||||
meta.drop_all(bind=sync_engine, checkfirst=True)
|
||||
sync_engine.execute('DROP TABLE IF EXISTS alembic_version')
|
||||
sync_engine.dispose()
|
||||
logger.info("All tables are dropped")
|
||||
|
||||
|
||||
def run_migrations() -> None:
|
||||
with sync_engine.begin() as connection:
|
||||
alembic_cfg.attributes['connection'] = connection
|
||||
migration_dialect = 'mysql_init_migrations' if settings.USE_DATABASE == 'mysql' else 'postgres_init_migrations'
|
||||
command.upgrade(alembic_cfg, migration_dialect)
|
||||
logger.info('Tables recreated')
|
||||
1
sqlalchemy_study/src/migrations/README
Normal file
1
sqlalchemy_study/src/migrations/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
||||
0
sqlalchemy_study/src/migrations/__init__.py
Normal file
0
sqlalchemy_study/src/migrations/__init__.py
Normal file
73
sqlalchemy_study/src/migrations/env.py
Normal file
73
sqlalchemy_study/src/migrations/env.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy_study.sqlalchemy import create_async_engine
|
||||
from sqlalchemy_study.sqlalchemy import Connection
|
||||
|
||||
from db.base import BaseModel
|
||||
from db.models import load_all_models
|
||||
from settings import settings
|
||||
|
||||
config = context.config
|
||||
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
target_metadata = BaseModel.metadata
|
||||
load_all_models()
|
||||
|
||||
|
||||
async def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
|
||||
context.configure(
|
||||
url=settings.async_db_url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""
|
||||
Run actual sync migrations.
|
||||
|
||||
:param connection: connection to the database.
|
||||
"""
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = create_async_engine(settings.async_db_url)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
asyncio.run(run_migrations_offline())
|
||||
else:
|
||||
asyncio.run(run_migrations_online())
|
||||
24
sqlalchemy_study/src/migrations/script.py.mako
Normal file
24
sqlalchemy_study/src/migrations/script.py.mako
Normal file
@@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,174 @@
|
||||
"""mysql init models
|
||||
|
||||
Revision ID: mysql_init_migrations
|
||||
Revises:
|
||||
Create Date: 2022-05-29 19:26:09.995005
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
from sqlalchemy_study import sqlalchemy as sa
|
||||
from sqlalchemy_study.sqlalchemy import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'mysql_init_migrations'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('coins',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True),
|
||||
sa.Column('enabled', sa.BOOLEAN(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('coin_name'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_coins_created_at'), 'coins', ['created_at'], unique=False)
|
||||
op.create_table('departments',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('name', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.Column('description', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_departments_created_at'), 'departments', ['created_at'], unique=False)
|
||||
op.create_table('skills',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('name', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id'),
|
||||
sa.UniqueConstraint('name')
|
||||
)
|
||||
op.create_index(op.f('ix_skills_created_at'), 'skills', ['created_at'], unique=False)
|
||||
op.create_table('users',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('username', sa.String(length=255), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=True),
|
||||
sa.Column('hash_password', sa.String(length=255), nullable=True),
|
||||
sa.Column('auth_token', sa.String(length=255), nullable=True),
|
||||
sa.Column('last_login', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id'),
|
||||
sa.UniqueConstraint('username')
|
||||
)
|
||||
op.create_index(op.f('ix_users_created_at'), 'users', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
|
||||
op.create_index(op.f('ix_users_last_login'), 'users', ['last_login'], unique=False)
|
||||
op.create_table('coin_types',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True),
|
||||
sa.Column('coin_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_coin_types_created_at'), 'coin_types', ['created_at'], unique=False)
|
||||
op.create_table('employees',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('first_name', mysql.VARCHAR(length=128), nullable=False),
|
||||
sa.Column('last_name', mysql.VARCHAR(length=128), nullable=False),
|
||||
sa.Column('phone', mysql.VARCHAR(length=30), nullable=True),
|
||||
sa.Column('description', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('coin_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='SET NULL'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id'),
|
||||
sa.UniqueConstraint('phone')
|
||||
)
|
||||
op.create_index(op.f('ix_employees_created_at'), 'employees', ['created_at'], unique=False)
|
||||
op.create_table('cadre_movements',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('employee', sa.Integer(), nullable=False),
|
||||
sa.Column('old_department', sa.Integer(), nullable=False),
|
||||
sa.Column('new_department', sa.Integer(), nullable=False),
|
||||
sa.Column('reason', sa.VARCHAR(length=500), nullable=True),
|
||||
sa.ForeignKeyConstraint(['employee'], ['employees.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['new_department'], ['departments.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['old_department'], ['departments.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_cadre_movements_created_at'), 'cadre_movements', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_cadre_movements_employee'), 'cadre_movements', ['employee'], unique=False)
|
||||
op.create_index(op.f('ix_cadre_movements_new_department'), 'cadre_movements', ['new_department'], unique=False)
|
||||
op.create_index(op.f('ix_cadre_movements_old_department'), 'cadre_movements', ['old_department'], unique=False)
|
||||
op.create_table('employee_departments',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('employee_id', sa.Integer(), nullable=False),
|
||||
sa.Column('department_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['department_id'], ['departments.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_employee_departments_created_at'), 'employee_departments', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_employee_departments_department_id'), 'employee_departments', ['department_id'], unique=False)
|
||||
op.create_index(op.f('ix_employee_departments_employee_id'), 'employee_departments', ['employee_id'], unique=False)
|
||||
op.create_table('employees_skills',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('employee_id', sa.Integer(), nullable=False),
|
||||
sa.Column('skill_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['skill_id'], ['skills.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('employee_id', 'skill_id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_employees_skills_created_at'), 'employees_skills', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_employees_skills_employee_id'), 'employees_skills', ['employee_id'], unique=False)
|
||||
op.create_index(op.f('ix_employees_skills_skill_id'), 'employees_skills', ['skill_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_employees_skills_skill_id'), table_name='employees_skills')
|
||||
op.drop_index(op.f('ix_employees_skills_employee_id'), table_name='employees_skills')
|
||||
op.drop_index(op.f('ix_employees_skills_created_at'), table_name='employees_skills')
|
||||
op.drop_table('employees_skills')
|
||||
op.drop_index(op.f('ix_employee_departments_employee_id'), table_name='employee_departments')
|
||||
op.drop_index(op.f('ix_employee_departments_department_id'), table_name='employee_departments')
|
||||
op.drop_index(op.f('ix_employee_departments_created_at'), table_name='employee_departments')
|
||||
op.drop_table('employee_departments')
|
||||
op.drop_index(op.f('ix_cadre_movements_old_department'), table_name='cadre_movements')
|
||||
op.drop_index(op.f('ix_cadre_movements_new_department'), table_name='cadre_movements')
|
||||
op.drop_index(op.f('ix_cadre_movements_employee'), table_name='cadre_movements')
|
||||
op.drop_index(op.f('ix_cadre_movements_created_at'), table_name='cadre_movements')
|
||||
op.drop_table('cadre_movements')
|
||||
op.drop_index(op.f('ix_employees_created_at'), table_name='employees')
|
||||
op.drop_table('employees')
|
||||
op.drop_index(op.f('ix_coin_types_created_at'), table_name='coin_types')
|
||||
op.drop_table('coin_types')
|
||||
op.drop_index(op.f('ix_users_last_login'), table_name='users')
|
||||
op.drop_index(op.f('ix_users_email'), table_name='users')
|
||||
op.drop_index(op.f('ix_users_created_at'), table_name='users')
|
||||
op.drop_table('users')
|
||||
op.drop_index(op.f('ix_skills_created_at'), table_name='skills')
|
||||
op.drop_table('skills')
|
||||
op.drop_index(op.f('ix_departments_created_at'), table_name='departments')
|
||||
op.drop_table('departments')
|
||||
op.drop_index(op.f('ix_coins_created_at'), table_name='coins')
|
||||
op.drop_table('coins')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,174 @@
|
||||
"""postgres init migrations
|
||||
|
||||
Revision ID: postgres_init_migrations
|
||||
Revises:
|
||||
Create Date: 2022-06-14 00:29:28.932954
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
from sqlalchemy_study import sqlalchemy as sa
|
||||
from sqlalchemy_study.sqlalchemy import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'postgres_init_migrations'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('coins',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True),
|
||||
sa.Column('enabled', sa.BOOLEAN(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('coin_name'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_coins_created_at'), 'coins', ['created_at'], unique=False)
|
||||
op.create_table('departments',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('name', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.Column('description', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_departments_created_at'), 'departments', ['created_at'], unique=False)
|
||||
op.create_table('skills',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('name', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id'),
|
||||
sa.UniqueConstraint('name')
|
||||
)
|
||||
op.create_index(op.f('ix_skills_created_at'), 'skills', ['created_at'], unique=False)
|
||||
op.create_table('users',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('username', sa.String(length=255), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=True),
|
||||
sa.Column('hash_password', sa.String(length=255), nullable=True),
|
||||
sa.Column('auth_token', sa.String(length=255), nullable=True),
|
||||
sa.Column('last_login', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id'),
|
||||
sa.UniqueConstraint('username')
|
||||
)
|
||||
op.create_index(op.f('ix_users_created_at'), 'users', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
|
||||
op.create_index(op.f('ix_users_last_login'), 'users', ['last_login'], unique=False)
|
||||
op.create_table('coin_types',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True),
|
||||
sa.Column('coin_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_coin_types_created_at'), 'coin_types', ['created_at'], unique=False)
|
||||
op.create_table('employees',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('first_name', mysql.VARCHAR(length=128), nullable=False),
|
||||
sa.Column('last_name', mysql.VARCHAR(length=128), nullable=False),
|
||||
sa.Column('phone', mysql.VARCHAR(length=30), nullable=True),
|
||||
sa.Column('description', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('coin_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='SET NULL'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id'),
|
||||
sa.UniqueConstraint('phone')
|
||||
)
|
||||
op.create_index(op.f('ix_employees_created_at'), 'employees', ['created_at'], unique=False)
|
||||
op.create_table('cadre_movements',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('employee', sa.Integer(), nullable=False),
|
||||
sa.Column('old_department', sa.Integer(), nullable=False),
|
||||
sa.Column('new_department', sa.Integer(), nullable=False),
|
||||
sa.Column('reason', sa.VARCHAR(length=500), nullable=True),
|
||||
sa.ForeignKeyConstraint(['employee'], ['employees.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['new_department'], ['departments.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['old_department'], ['departments.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_cadre_movements_created_at'), 'cadre_movements', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_cadre_movements_employee'), 'cadre_movements', ['employee'], unique=False)
|
||||
op.create_index(op.f('ix_cadre_movements_new_department'), 'cadre_movements', ['new_department'], unique=False)
|
||||
op.create_index(op.f('ix_cadre_movements_old_department'), 'cadre_movements', ['old_department'], unique=False)
|
||||
op.create_table('employee_departments',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('employee_id', sa.Integer(), nullable=False),
|
||||
sa.Column('department_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['department_id'], ['departments.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_employee_departments_created_at'), 'employee_departments', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_employee_departments_department_id'), 'employee_departments', ['department_id'], unique=False)
|
||||
op.create_index(op.f('ix_employee_departments_employee_id'), 'employee_departments', ['employee_id'], unique=False)
|
||||
op.create_table('employees_skills',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('employee_id', sa.Integer(), nullable=False),
|
||||
sa.Column('skill_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['skill_id'], ['skills.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('employee_id', 'skill_id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_employees_skills_created_at'), 'employees_skills', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_employees_skills_employee_id'), 'employees_skills', ['employee_id'], unique=False)
|
||||
op.create_index(op.f('ix_employees_skills_skill_id'), 'employees_skills', ['skill_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_employees_skills_skill_id'), table_name='employees_skills')
|
||||
op.drop_index(op.f('ix_employees_skills_employee_id'), table_name='employees_skills')
|
||||
op.drop_index(op.f('ix_employees_skills_created_at'), table_name='employees_skills')
|
||||
op.drop_table('employees_skills')
|
||||
op.drop_index(op.f('ix_employee_departments_employee_id'), table_name='employee_departments')
|
||||
op.drop_index(op.f('ix_employee_departments_department_id'), table_name='employee_departments')
|
||||
op.drop_index(op.f('ix_employee_departments_created_at'), table_name='employee_departments')
|
||||
op.drop_table('employee_departments')
|
||||
op.drop_index(op.f('ix_cadre_movements_old_department'), table_name='cadre_movements')
|
||||
op.drop_index(op.f('ix_cadre_movements_new_department'), table_name='cadre_movements')
|
||||
op.drop_index(op.f('ix_cadre_movements_employee'), table_name='cadre_movements')
|
||||
op.drop_index(op.f('ix_cadre_movements_created_at'), table_name='cadre_movements')
|
||||
op.drop_table('cadre_movements')
|
||||
op.drop_index(op.f('ix_employees_created_at'), table_name='employees')
|
||||
op.drop_table('employees')
|
||||
op.drop_index(op.f('ix_coin_types_created_at'), table_name='coin_types')
|
||||
op.drop_table('coin_types')
|
||||
op.drop_index(op.f('ix_users_last_login'), table_name='users')
|
||||
op.drop_index(op.f('ix_users_email'), table_name='users')
|
||||
op.drop_index(op.f('ix_users_created_at'), table_name='users')
|
||||
op.drop_table('users')
|
||||
op.drop_index(op.f('ix_skills_created_at'), table_name='skills')
|
||||
op.drop_table('skills')
|
||||
op.drop_index(op.f('ix_departments_created_at'), table_name='departments')
|
||||
op.drop_table('departments')
|
||||
op.drop_index(op.f('ix_coins_created_at'), table_name='coins')
|
||||
op.drop_table('coins')
|
||||
# ### end Alembic commands ###
|
||||
4
sqlalchemy_study/src/settings/__init__.py
Normal file
4
sqlalchemy_study/src/settings/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from settings.settings import Settings
|
||||
|
||||
|
||||
settings = Settings()
|
||||
11
sqlalchemy_study/src/settings/logger.py
Normal file
11
sqlalchemy_study/src/settings/logger.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from loguru import logger
|
||||
|
||||
logger.remove()
|
||||
|
||||
formatter = "<cyan>{time}</cyan> | <level>{level}</level> | <magenta>{message}</magenta>"
|
||||
sink = sys.stdout
|
||||
|
||||
logger.add(sink=sink, colorize=True, level=logging.INFO, format=formatter)
|
||||
69
sqlalchemy_study/src/settings/settings.py
Normal file
69
sqlalchemy_study/src/settings/settings.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import BaseSettings
|
||||
|
||||
BASE_DIR = Path(__file__).parent.parent
|
||||
|
||||
SHARED_DIR = BASE_DIR.resolve().joinpath('shared')
|
||||
SHARED_DIR.joinpath('logs').mkdir(exist_ok=True)
|
||||
DIR_LOGS = SHARED_DIR.joinpath('logs')
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Application settings."""
|
||||
|
||||
DB_HOST: str = 'db_host'
|
||||
USE_DATABASE: str = 'mysql'
|
||||
DB_ECHO: bool = False
|
||||
|
||||
# Postgres
|
||||
POSTGRES_DB_PORT: int
|
||||
POSTGRES_DB: str
|
||||
POSTGRES_USER: str
|
||||
POSTGRES_PASSWORD: str
|
||||
|
||||
MYSQL_DB_PORT: int
|
||||
MYSQL_DATABASE: str
|
||||
MYSQL_USER: str
|
||||
MYSQL_PASSWORD: str
|
||||
|
||||
@property
|
||||
def async_db_url(self) -> str:
|
||||
"""
|
||||
Assemble database URL from settings.
|
||||
|
||||
:return: database URL.
|
||||
"""
|
||||
async_postgres_url = (f'postgresql+asyncpg://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@'
|
||||
f'{self.DB_HOST}:{self.POSTGRES_DB_PORT}/{self.POSTGRES_DB}'
|
||||
)
|
||||
|
||||
async_mysql_url = (f'mysql+asyncmy://{self.MYSQL_USER}:{self.MYSQL_PASSWORD}@'
|
||||
f'{self.DB_HOST}:{self.MYSQL_DB_PORT}/{self.MYSQL_DATABASE}'
|
||||
)
|
||||
if os.environ.get('USE_DATABASE', self.USE_DATABASE).lower() == 'postgres':
|
||||
return async_postgres_url
|
||||
return async_mysql_url
|
||||
|
||||
@property
|
||||
def sync_db_url(self) -> str:
|
||||
"""
|
||||
Assemble database URL from settings.
|
||||
|
||||
:return: database URL.
|
||||
"""
|
||||
sync_postgres_url = (f'postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@'
|
||||
f'{self.DB_HOST}:{self.POSTGRES_DB_PORT}/{self.POSTGRES_DB}'
|
||||
)
|
||||
|
||||
sync_mysql_url = (f'mysql+pymysql://{self.MYSQL_USER}:{self.MYSQL_PASSWORD}@'
|
||||
f'{self.DB_HOST}:{self.MYSQL_DB_PORT}/{self.MYSQL_DATABASE}'
|
||||
)
|
||||
if os.environ.get('USE_DATABASE', self.USE_DATABASE).lower() == 'postgres':
|
||||
return sync_postgres_url
|
||||
return sync_mysql_url
|
||||
|
||||
class Config:
|
||||
env_file = 'config/.env'
|
||||
env_file_encoding = "utf-8"
|
||||
Reference in New Issue
Block a user