add some files from study directory

This commit is contained in:
Dmitry Afanasyev 2023-03-09 02:27:19 +03:00
parent d2a43ab6b6
commit 9cb85e8be8
48 changed files with 3676 additions and 224 deletions

164
dockerhub.py Normal file
View File

@ -0,0 +1,164 @@
import asyncio
import re
import sys
from logging import Logger
from multiprocessing import Process
from typing import Any
import httpx
from httpx import AsyncHTTPTransport, AsyncClient
from packaging.version import parse as parse_version
from termcolor import colored
SERVICES = {
'nextcloud': '25.0.4',
'gitea/gitea': '1.18.5',
'caddy': '2.6.4',
'mediawiki': '1.39.2',
'bitwarden/server': '2023.2.0',
'redis': '7.0.8',
'nginx': '1.23.3',
'mariadb': '10.11.2',
'postgres': '15.2',
'mysql': '8.0.32',
'selenoid/firefox': '110.0',
'python': '3.11.1',
}
def configure_logger() -> Logger:
try:
from loguru import logger as loguru_logger
loguru_logger.remove()
loguru_logger.add(
sink=sys.stdout,
colorize=True,
level='DEBUG',
format='<cyan>{time:DD.MM.YYYY HH:mm:ss}</cyan> | <level>{level}</level> | <magenta>{message}</magenta>',
)
return loguru_logger # type: ignore
except ImportError:
import logging
logging_logger = logging.getLogger('main_logger')
formatter = logging.Formatter(
datefmt='%Y.%m.%d %H:%M:%S',
fmt='%(asctime)s | %(levelname)s | func name: %(funcName)s | message: %(message)s',
)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
logging_logger.setLevel(logging.INFO)
logging_logger.addHandler(handler)
return logging_logger
logger = configure_logger()
class DockerHubScanner:
# bitwarden/server
# https://hub.docker.com/v2/namespaces/bitwarden/repositories/server/tags?page=2
# caddy
# https://registry.hub.docker.com/v2/repositories/library/caddy/tags?page=1
DOCKERHUB_REGISTRY_API = 'https://registry.hub.docker.com/v2/repositories/library'
DOCKERHUB_API = 'https://hub.docker.com/v2/namespaces'
def _docker_hub_api_url(self, service_name: str) -> str:
if '/' in service_name:
namespace, name = service_name.split('/')
url = f'{self.DOCKERHUB_API}/{namespace}/repositories/{name}/tags'
else:
url = f'{self.DOCKERHUB_REGISTRY_API}/{service_name}/tags'
return url
@staticmethod
async def _async_request(client: AsyncClient, url: str) -> dict[str, Any] | None:
response = await client.get(url)
status = response.status_code
if status == httpx.codes.OK:
return response.json()
return None
@staticmethod
def _get_next_page_and_tags_from_payload(payload: dict[str, Any]) -> tuple[str | None, list[str]]:
next_page = payload['next']
names = [release['name'] for release in payload['results']]
return next_page, names
async def get_tags(self, service_name: str) -> dict[str, list[str]]:
"""
To make method really async it should be rewritten on pages not by get next page each time.
Also, dockerhub protected from bruteforce requests.
Better with getting next page each time
"""
tags = []
url = self._docker_hub_api_url(service_name)
transport = AsyncHTTPTransport(retries=1)
async with AsyncClient(transport=transport) as client:
payload = await self._async_request(client=client, url=url)
if not payload:
return {service_name: tags}
next_page, names = self._get_next_page_and_tags_from_payload(payload)
tags.extend(names)
while SERVICES[service_name] not in tags:
payload = await self._async_request(client=client, url=next_page)
next_page, names = self._get_next_page_and_tags_from_payload(payload)
tags.extend(names)
# filter tags contains versions 1.18.3 and not contains letters 1.18.3-fpm-alpine. Sort by version number
tags = sorted(
list(filter(lambda t: re.search(r'\d+\.\d', t) and not re.search(r'[a-z]', t), tags)),
reverse=True,
key=parse_version,
)
# Do not show older versions than current in tags
tags = tags[:tags.index(SERVICES[service_name]) + 1]
return {service_name: tags}
def get_data(self, service_name: str) -> dict[str, list[str]]:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
services_tags = loop.run_until_complete(self.get_tags(service_name))
return services_tags
def print_data(self, service_name: str) -> None:
data = self.get_data(service_name)
print(
f"Service: {colored(service_name, color='light_grey')}",
f"\nTags: {colored(str(data[service_name]), color='magenta')}",
f"\nCurrent version: {colored(SERVICES[service_name], color='cyan')}"
)
if data[service_name][0] > SERVICES[service_name]:
print(f"New version of {service_name}: {colored(data[service_name][0], color='yellow')}")
print()
if __name__ == '__main__':
print('Services'.center(50, '-'), '\n')
dockerhub_scanner = DockerHubScanner()
processes = []
for service in SERVICES:
process = Process(target=dockerhub_scanner.print_data, kwargs={'service_name': service})
processes.append(process)
process.start()
for process in processes:
process.join()

View File

@ -1,4 +0,0 @@
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)

View File

@ -1,27 +0,0 @@
import importlib.util
import logging
import sys
from pathlib import Path
current_dir = Path(__file__).parent.parent
# use loguru if it is possible for color output
if importlib.util.find_spec('loguru') is not None:
from loguru import logger
logger.remove()
logger.add(sink=sys.stdout, colorize=True, level='DEBUG',
format="<cyan>{time:DD.MM.YYYY HH:mm:ss}</cyan> | <level>{level}</level> | "
"<magenta>{message}</magenta>")
# use standard logging
else:
logger = logging.getLogger()
logger.setLevel(logging.INFO)
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
log_formatter = logging.Formatter("%(asctime)s | %(levelname)s | %(message)s")
console_handler.setFormatter(log_formatter)
logger.addHandler(console_handler)

View File

@ -1,3 +0,0 @@
#!/usr/bin/bash
git pull --all

View File

@ -1,51 +0,0 @@
import json # noqa # pylint: disable=unused-import
import subprocess
import sys
import time
import requests
from get_project_core.settings import current_dir, logger
GITLAB_TOKEN = ''
headers = {'PRIVATE-TOKEN': GITLAB_TOKEN}
def create_repositories(group_id: int):
"""
Create submodules from gitlab group
:param group_id: Can be find under group name
"""
request = requests.get(f'https://scm.x5.ru/api/v4/groups/{group_id}/projects', headers=headers, verify=False)
# logger.info(f'{json.dumps(request.json(), indent=4, separators=(",", ":"))}')
repos = request.json()
for repo in repos:
name = str(repo.get("ssh_url_to_repo", None)).strip()
subprocess.Popen(['git', 'submodule', 'add', name])
logger.info(f'Created: {name}')
time.sleep(15)
def update_submodules():
"""
Update all submodules
"""
subprocess.Popen(['git', 'submodule', 'foreach', f'{current_dir}/get-project-core/update-repos.sh'])
if __name__ == '__main__':
args = sys.argv[1:]
try:
group = args[0]
logger.info(group)
create_repositories(group_id=int(group))
update_submodules()
except IndexError:
logger.error('Gitlab group id must be set')
except ValueError:
logger.error('Gitlab group id must be integer')

View File

@ -1 +0,0 @@
requests

200
goodgame.py Normal file
View File

@ -0,0 +1,200 @@
import asyncio
import sys
import time
from logging import Logger
from multiprocessing import Process
from typing import Any
import aiohttp
import requests
def configure_logger() -> Logger:
try:
from loguru import logger as loguru_logger
loguru_logger.remove()
loguru_logger.add(
sink=sys.stdout,
colorize=True,
level='DEBUG',
format="<cyan>{time:DD.MM.YYYY HH:mm:ss}</cyan> | <level>{level}</level> | <magenta>{message}</magenta>",
)
return loguru_logger # type: ignore
except ImportError:
import logging
logging_logger = logging.getLogger('main_logger')
formatter = logging.Formatter(
datefmt="%Y.%m.%d %H:%M:%S",
fmt='%(asctime)s | %(levelname)s | func name: %(funcName)s | message: %(message)s',
)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
logging_logger.setLevel(logging.INFO)
logging_logger.addHandler(handler)
return logging_logger
logger = configure_logger()
class GoodGame:
BASE_URL = 'https://goodgame.ru/api/4/streams'
PAGES_FOR_ASYNC_SCAN = 25
CURRENT_WATCHERS_FILTER = 1
def __init__(self) -> None:
self.all_streams: dict[int, dict[str, Any]] = dict()
@staticmethod
def _show_time_and_result(message: str) -> Any:
def wrapper(func: Any) -> Any:
def new_func(*args: Any, **kwargs: Any) -> None:
begin = time.time()
result = func(*args, **kwargs)
end = time.time()
logger.info(f'{message} execution time, sec: {round(end - begin, 2)}')
print(result)
return new_func
return wrapper
def get_last_page_number(self) -> int:
"""
Deprecated
"""
last_page = 1
for page in range(20, 0, -1):
response = requests.get(f'{self.BASE_URL}?page={page}')
if response.json()["streams"]:
last_page = page
break
return last_page
def get_max_current_viewers_count(self) -> int | None:
"""
Deprecated
"""
response = requests.get(f'{self.BASE_URL}?page=1')
max_current_viewers = response.json()['streams'][0].get('viewers', None)
return max_current_viewers
def _sort_trim_dict(self, data: dict[str, int]) -> dict[str, int]:
sorted_data = dict(sorted(data.items(), key=lambda x: x[1], reverse=True))
new_data = {
stream: viewers_count
for stream, viewers_count in sorted_data.items()
if int(viewers_count) >= self.CURRENT_WATCHERS_FILTER
}
return new_data
def __count_streams_with_watchers(self, current_watchers: list[int]) -> int:
return len(
list(
filter(
lambda stream: stream['viewers'] in current_watchers,
self.all_streams.values(),
)
)
)
def __prepare_result(self, max_current_viewers: int) -> str:
total_viewers: dict[str, int] = dict()
for stream in self.all_streams.values():
if (
max_current_viewers
and int(stream.get('viewers', 0)) <= max_current_viewers
):
total_viewers[
f'{stream["streamer"]["username"]} [{stream["game"]["url"]}]'
] = int(stream['viewers'])
watchers_0 = self.__count_streams_with_watchers(current_watchers=[0])
watchers_1 = self.__count_streams_with_watchers(current_watchers=[1])
minimal_watchers = self.__count_streams_with_watchers(current_watchers=[0, 1])
return (
f'Total streams: {len(self.all_streams)} -> '
f'with minimal watchers {round(minimal_watchers / len(self.all_streams) * 100)}%\n'
f'Total streams with 0 viewers: {watchers_0} -> {round(watchers_0/len(self.all_streams) * 100)}%\n'
f'Total streams with 1 viewer: {watchers_1} -> {round(watchers_1/len(self.all_streams) * 100)}%\n'
f'Total viewers: {sum(total_viewers.values())}\n'
f'Streams: {self._sort_trim_dict(total_viewers)}\n'
f'{"-"*76}'
)
async def _async_request(self, session: aiohttp.ClientSession, url: str) -> None:
async with asyncio.Semaphore(500):
counter = 0
while True:
try:
counter += 1
resp = await session.get(url)
async with resp:
if resp.status == 200:
data = await resp.json()
for stream in data['streams']:
self.all_streams.update({stream['id']: stream})
return data['streams']
except Exception as connection_error:
if counter < 5:
await asyncio.sleep(10)
else:
raise connection_error
async def _async_data_scrapper(self) -> int:
async with aiohttp.ClientSession() as session:
streams = await asyncio.gather(
*[
self._async_request(session, f'{self.BASE_URL}?page={page}')
for page in range(1, self.PAGES_FOR_ASYNC_SCAN + 1)
],
return_exceptions=True,
)
max_current_viewers = streams[0][0]['viewers']
return max_current_viewers
@_show_time_and_result(message='Async counter')
def async_counter(self) -> str:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
max_current_viewers = loop.run_until_complete(self._async_data_scrapper())
return self.__prepare_result(max_current_viewers)
@_show_time_and_result(message='Sync counter')
def sync_counter(self) -> str:
page = 1
resp = requests.get(f'{self.BASE_URL}?page={page}')
streams = resp.json()['streams']
for stream in streams:
self.all_streams.update({stream['id']: stream})
max_current_viewers = streams[0]['viewers']
while streams:
page += 1
resp = requests.get(f'{self.BASE_URL}?page={page}')
streams = resp.json()['streams']
for stream in streams:
self.all_streams.update({stream['id']: stream})
return self.__prepare_result(max_current_viewers)
if __name__ == '__main__':
print("-" * 76)
good_game = GoodGame()
start = time.time()
async_process = Process(
target=good_game.async_counter, args=(), kwargs={}, name='async_process'
)
sync_process = Process(
target=good_game.sync_counter, args=(), kwargs={}, name='sync_process'
)
async_process.start()
sync_process.start()
async_process.join()
sync_process.join()
stop = time.time()
logger.info(f'End all processes. Execution time: {round(stop-start, 2)} seconds')

123
linked_list.py Normal file
View File

@ -0,0 +1,123 @@
# Python3 program to merge sort of linked list
# create Node using class Node.
class Node:
def __init__(self, data):
self.data = data
self.next = None
def __repr__(self):
return f'{self.data}'
class LinkedList:
def __init__(self):
self.head = None
# push new value to linked list
# using append method
def append(self, new_value):
# Allocate new node
new_node = Node(new_value)
# if head is None, initialize it to new node
if self.head is None:
self.head = new_node
return
curr_node = self.head
while curr_node.next is not None:
curr_node = curr_node.next
# Append the new node at the end
# of the linked list
curr_node.next = new_node
def sorted_merge(self, node_a, node_b):
# Base cases
if node_a is None:
return node_b
if node_b is None:
return node_a
# pick either a or b and recur..
if node_a.data <= node_b.data:
result = node_a
result.next = self.sorted_merge(node_a.next, node_b)
else:
result = node_b
result.next = self.sorted_merge(node_a, node_b.next)
return result
def merge_sort(self, head):
# Base case if head is None
if head is None or head.next is None:
return head
# get the middle of the list
middle = self.get_middle(head)
next_to_middle = middle.next
# set the next of middle node to None
middle.next = None
# Apply mergeSort on left list
left = self.merge_sort(head)
# Apply mergeSort on right list
right = self.merge_sort(next_to_middle)
# Merge the left and right lists
sorted_list = self.sorted_merge(left, right)
return sorted_list
# Utility function to get the middle
# of the linked list
@staticmethod
def get_middle(head):
if head is None:
return head
slow = head
fast = head
while fast.next is not None and fast.next.next is not None:
slow = slow.next
fast = fast.next.next
return slow
def __repr__(self):
# Utility function to print the linked list
represent = ''
if self.head is None:
print(' ')
return
curr_node = self.head
while curr_node:
represent += f'{curr_node.data} -> '
curr_node = curr_node.next
return represent[:-4]
# Driver Code
if __name__ == '__main__':
li = LinkedList()
li.append(15)
li.append(10)
li.append(5)
li.append(20)
li.append(3)
li.append(2)
print(li)
# Apply merge Sort
li.head = li.merge_sort(li.head)
print("Sorted Linked List is:")
print(li)

View File

@ -1,159 +1,177 @@
aiohttp==3.8.1 aiohttp==3.8.4
aiosignal==1.2.0 aiosignal==1.3.1
alembic==1.7.6 alembic==1.9.4
altgraph==0.17.2 altgraph==0.17.3
anyio==3.5.0 amqp==5.1.1
arrow==1.2.2 anyio==3.6.2
asgiref==3.5.0 arrow==1.2.3
asttokens==2.0.5 asgiref==3.6.0
asttokens==2.2.1
async-generator==1.10 async-generator==1.10
async-timeout==4.0.2 async-timeout==4.0.2
attrs==21.4.0 attrs==22.2.0
Babel==2.9.1
backcall==0.2.0 backcall==0.2.0
backports.entry-points-selectable==1.1.1 bcrypt==4.0.1
bcrypt==3.2.0 billiard==3.6.4.0
bidict==0.21.4
binaryornot==0.4.4 binaryornot==0.4.4
black==22.1.0 black==22.12.0
blinker==1.4 CacheControl==0.12.11
Brotli==1.0.9
CacheControl==0.12.10
cachy==0.3.0 cachy==0.3.0
certifi==2021.10.8 celery==5.2.7
cffi==1.15.0 certifi==2022.12.7
chardet==4.0.0 cffi==1.15.1
charset-normalizer==2.0.12 cfgv==3.3.1
cleo==0.8.1 chardet==5.1.0
click==8.0.4 charset-normalizer==3.0.1
cleo==2.0.1
click==8.1.3
click-didyoumean==0.3.0
click-plugins==1.1.1
click-repl==0.2.0
clikit==0.6.2 clikit==0.6.2
cookiecutter==1.7.3 cookiecutter==2.1.1
coverage==6.3.2 coverage==7.2.1
crashtest==0.3.1 crashtest==0.4.1
cryptography==36.0.1 cryptography==39.0.0
cyclonedx-python-lib==3.1.5
decorator==5.1.1 decorator==5.1.1
distlib==0.3.4 distlib==0.3.6
Django==4.0.3 Django==4.1.7
dnspython==2.2.0 dparse==0.6.2
email-validator==1.1.3 dulwich==0.20.50
executing==0.8.3 executing==1.2.0
fastapi==0.74.1 factory-boy==3.2.1
filelock==3.6.0 Faker==16.9.0
Flask==2.0.3 fastapi==0.89.1
Flask-Login==0.5.0 filelock==3.9.0
Flask-Principal==0.4.0 flake8==6.0.0
Flask-SQLAlchemy==2.5.1 frozenlist==1.3.3
Flask-WTF==1.0.0 greenlet==2.0.2
frozenlist==1.3.0 gunicorn==20.1.0
greenlet==1.1.2 h11==0.14.0
h11==0.13.0
html5lib==1.1 html5lib==1.1
idna==3.3 httpcore==0.16.3
importlib-metadata==4.11.2 httpx==0.23.3
iniconfig==1.1.1 identify==2.5.18
ipython==8.1.0 idna==3.4
itsdangerous==2.1.0 importlib-metadata==6.0.0
jedi==0.18.1 iniconfig==2.0.0
jeepney==0.7.1 ipython==8.11.0
Jinja2==3.0.3 jaraco.classes==3.2.3
jedi==0.18.2
jeepney==0.8.0
Jinja2==3.1.2
jinja2-time==0.2.0 jinja2-time==0.2.0
keyring==23.5.0 jsonschema==4.17.3
keyring==23.13.1
kombu==5.2.4
lockfile==0.12.2 lockfile==0.12.2
loguru==0.6.0 loguru==0.6.0
Mako==1.1.6 Mako==1.2.4
MarkupSafe==2.1.0 markdown-it-py==2.1.0
matplotlib-inline==0.1.3 MarkupSafe==2.1.2
MouseInfo==0.1.3 matplotlib-inline==0.1.6
msgpack==1.0.3 mccabe==0.7.0
multidict==6.0.2 mdurl==0.1.2
mypy==0.931 more-itertools==9.0.0
mypy-extensions==0.4.3 MouseInfo==0.1.0
outcome==1.1.0 msgpack==1.0.4
multidict==6.0.4
mypy==0.991
mypy-extensions==1.0.0
nodeenv==1.7.0
numpy==1.24.2
orjson==3.8.7
outcome==1.2.0
packageurl-python==0.10.4
packaging==21.3 packaging==21.3
paramiko==2.9.2
parso==0.8.3 parso==0.8.3
passlib==1.7.4
pastel==0.2.1 pastel==0.2.1
pathspec==0.9.0 pathspec==0.11.0
pexpect==4.8.0 pexpect==4.8.0
pickleshare==0.7.5 pickleshare==0.7.5
Pillow==9.0.1 Pillow==9.4.0
pkginfo==1.8.2 pip-api==0.0.30
platformdirs==2.5.1 pip-requirements-parser==32.0.1
pip_audit==2.4.14
pkginfo==1.9.6
platformdirs==3.0.0
pluggy==1.0.0 pluggy==1.0.0
poetry==1.1.13 poetry==1.3.2
poetry-core==1.0.8 poetry-core==1.4.0
poyo==0.5.0 poetry-plugin-export==1.2.0
prompt-toolkit==3.0.28 pre-commit==2.21.0
psycopg2-binary==2.9.3 prompt-toolkit==3.0.38
psycopg2-binary==2.9.5
ptyprocess==0.7.0 ptyprocess==0.7.0
pure-eval==0.2.2 pure-eval==0.2.2
py==1.11.0
pyasn1==0.4.8
PyAutoGUI==0.9.53 PyAutoGUI==0.9.53
pycodestyle==2.10.0
pycparser==2.21 pycparser==2.21
pydantic==1.9.0 pydantic==1.10.5
pyflakes==3.0.1
PyGetWindow==0.0.9 PyGetWindow==0.0.9
Pygments==2.11.2 Pygments==2.14.0
pyinstaller==4.9 pyinstaller==5.8.0
pyinstaller-hooks-contrib==2022.2 pyinstaller-hooks-contrib==2023.0
pylev==1.4.0 pylev==1.4.0
PyMsgBox==1.0.9 PyMsgBox==1.0.9
PyNaCl==1.5.0 pyparsing==3.0.9
pyOpenSSL==22.0.0
pyparsing==3.0.7
pyperclip==1.8.2 pyperclip==1.8.2
PyQt6==6.2.3 PyQt6==6.4.2
PyQt6-Qt6==6.2.3 PyQt6-Qt6==6.4.2
PyQt6-sip==13.2.1 PyQt6-sip==13.4.1
PyRect==0.1.4 PyRect==0.2.0
pyrsistent==0.19.3
PyScreeze==0.1.28 PyScreeze==0.1.28
PySocks==1.7.1 PySocks==1.7.1
pytest==7.0.1 pytest==7.2.1
pytest-cov==3.0.0 pytest-cov==4.0.0
python-dateutil==2.8.2 python-dateutil==2.8.2
python-decouple==3.6 python-decouple==3.8
python-dotenv==0.19.2 python-slugify==8.0.1
python-engineio==4.3.1
python-slugify==6.1.1
python-socketio==5.5.2
python3-xlib==0.15 python3-xlib==0.15
pytweening==1.0.4 pytweening==1.0.4
pytz==2021.3 pytz==2022.7.1
qt6-applications==6.1.0.2.2 PyYAML==6.0
qt6-tools==6.1.0.1.2 rapidfuzz==2.13.7
requests==2.27.1 redis==4.5.1
requests==2.28.2
requests-toolbelt==0.9.1 requests-toolbelt==0.9.1
SecretStorage==3.3.1 resolvelib==0.9.0
selenium==4.1.2 rfc3986==1.5.0
shellingham==1.4.0 rich==13.3.1
simplejson==3.17.6 ruamel.yaml==0.17.21
safety==2.3.5
SecretStorage==3.3.3
selenium==4.8.2
shellingham==1.5.0.post1
simple-term-menu==1.6.1
six==1.16.0 six==1.16.0
sniffio==1.2.0 sniffio==1.3.0
sortedcontainers==2.4.0 sortedcontainers==2.4.0
speaklater==1.3 SQLAlchemy==1.4.46
speaklater3==1.4 SQLAlchemy-Utils==0.38.3
SQLAlchemy==1.4.31 sqlparse==0.4.3
sqlparse==0.4.2 stack-data==0.6.2
sshtunnel==0.4.0 starlette==0.22.0
stack-data==0.2.0 termcolor==2.2.0
starlette==0.18.0
style==1.1.6
text-unidecode==1.3 text-unidecode==1.3
tomli==2.0.1 toml==0.10.2
tomlkit==0.10.0 tomlkit==0.11.6
traitlets==5.1.1 traitlets==5.9.0
trio==0.20.0 trio==0.22.0
trio-websocket==0.9.2 trio-websocket==0.9.2
typing_extensions==4.1.1 trove-classifiers==2023.1.20
ua-parser==0.10.0 typing_extensions==4.5.0
urllib3==1.26.8 urllib3==1.26.14
user-agents==2.2.0 uvicorn==0.20.0
virtualenv==20.13.2 validators==0.20.0
wcwidth==0.2.5 vine==5.0.0
virtualenv==20.20.0
wcwidth==0.2.6
webencodings==0.5.1 webencodings==0.5.1
Werkzeug==2.0.3 wget==3.2
wsproto==1.1.0 wsproto==1.2.0
WTForms==3.0.1 yarl==1.8.2
yarl==1.7.2 zipp==3.15.0
zipp==3.7.0

72
snake.py Normal file
View File

@ -0,0 +1,72 @@
import math
from itertools import cycle
class Snake:
def __init__(self):
self.x = 0
self.y = 0
self.move = self.move_right
def move_right(self) -> None:
self.x += 1
def move_left(self) -> None:
self.x -= 1
def move_down(self) -> None:
self.y += 1
def move_up(self) -> None:
self.y -= 1
def move_direction(self) -> cycle:
return cycle([self.move_right, self.move_down, self.move_left, self.move_up])
def move_back(self) -> None:
match self.move:
case self.move_right:
self.x -= 1
case self.move_left:
self.x += 1
case self.move_down:
self.y -= 1
case self.move_up:
self.y += 1
def get_current_element_or_none(self, board: dict[int, list[str]]) -> str | None:
try:
return board.get(self.y)[self.x]
except IndexError:
return None
except TypeError:
return None
def snake(n: int) -> None:
board: dict[int, list[str]] = {row: ['0' for column in range(n)] for row in range(n)}
python = Snake()
move_direction = python.move_direction()
next(move_direction)
python.move_back() # get on -1 position. And next move wil be on zero position
for number in range(n ** 2):
python.move()
element = python.get_current_element_or_none(board)
if not element or element != '0':
python.move_back()
python.move = next(move_direction)
python.move()
board[python.y][python.x] = f'{number + 1}'.rjust(int(math.log10(n**2)) + 1, ' ')
for line in board.values():
print(*line)
if __name__ == '__main__':
snake(7)

View File

@ -1,3 +1,7 @@
### Python template
.idea/
.vscode/
# Byte-compiled / optimized / DLL files # Byte-compiled / optimized / DLL files
__pycache__/ __pycache__/
*.py[cod] *.py[cod]
@ -20,6 +24,7 @@ parts/
sdist/ sdist/
var/ var/
wheels/ wheels/
share/python-wheels/
*.egg-info/ *.egg-info/
.installed.cfg .installed.cfg
*.egg *.egg
@ -38,14 +43,17 @@ pip-delete-this-directory.txt
# Unit test / coverage reports # Unit test / coverage reports
htmlcov/ htmlcov/
.tox/ .tox/
.nox/
.coverage .coverage
.coverage.* .coverage.*
.cache .cache
nosetests.xml nosetests.xml
coverage.xml coverage.xml
*.cover *.cover
*.py,cover
.hypothesis/ .hypothesis/
.pytest_cache/ .pytest_cache/
cover/
# Translations # Translations
*.mo *.mo
@ -55,6 +63,8 @@ coverage.xml
*.log *.log
local_settings.py local_settings.py
db.sqlite3 db.sqlite3
db.sqlite3-journal
*.db
# Flask stuff: # Flask stuff:
instance/ instance/
@ -67,16 +77,34 @@ instance/
docs/_build/ docs/_build/
# PyBuilder # PyBuilder
.pybuilder/
target/ target/
# Jupyter Notebook # Jupyter Notebook
.ipynb_checkpoints .ipynb_checkpoints
# pyenv # IPython
.python-version profile_default/
ipython_config.py
# celery beat schedule file # pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule celerybeat-schedule
celerybeat.pid
# SageMath parsed files # SageMath parsed files
*.sage.py *.sage.py
@ -102,6 +130,18 @@ venv.bak/
# mypy # mypy
.mypy_cache/ .mypy_cache/
.dmypy.json
dmypy.json
.idea/ # Pyre type checker
.vscode/ .pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# my staff
delete/
delete.py

103
sqlalchemy_study/README.md Normal file
View File

@ -0,0 +1,103 @@
# SQLALCHEMY STUDY
---
*Note: MySQL will start on 3307 port*
*Note: Postgres will start on 5433 port*
---
## Create environment:
```bash
cp ./src/config/.env.template ./src/config/.env
```
*Note: Change USE_DATABASE variable to 'mysql' for MySQL training or 'postgres' for Postgres use.*
*Default is MySQL*
## Run without app in docker:
Requires python > 3.11 and poetry 1.3.1
- **install poetry dependencies:**
```bash
poetry install
poetry shell
```
- **run for mysql:** ```docker-compose -f docker-compose.mysql.yaml up```
- **run for postgres:** ```docker-compose -f docker-compose.postgres.yaml up```
- **run initial data:** ```python ./src/data/fill_data.py```
## Run all in docker:
**run for mysql:**
```bash
docker-compose -f docker-compose.mysql.yaml -f docker-compose.docker.yaml up
```
**run for postgres:**
```bash
docker-compose -f docker-compose.postgres.yaml -f docker-compose.docker.yaml up
```
*Note: docker will start all migrations automatically. You don't need creation data step*
## Help info:
### Create alembic migrations:
*Note: To generate migrations you should run:*
```bash
# For automatic change detection.
alembic revision --autogenerate -m "migration message"
# For empty file generation.
alembic revision
```
*Note: If you want to migrate your database, you should run following commands:*
```bash
# To run all migrations untill the migration with revision_id.
alembic upgrade "<revision_id>"
# To perform all pending migrations.
alembic upgrade "head"
```
### Reverting alembic migrations:
*Note: If you want to revert migrations, you should run:*
```bash
# revert all migrations up to: revision_id.
alembic downgrade <revision_id>
# Revert everything.
alembic downgrade base
# Revert N revisions.
alembic downgrade -2
```
### MySQL database access:
Postgres:
```bash
docker exec -it sqlalchemy_study_db psql -d sqlalchemy_study -U balsh
```
- show help ```\?```
- show all tables: ```\dt```
- describe table ```\d {table name}```
## Clean database
```bash
docker-compose -f docker-compose.mysql.yaml down -v
```
## Known issues:

View File

@ -0,0 +1,39 @@
version: '3.9'
networks:
sqlalchemy_study_network:
name: "sqlalchemy_study_network"
ipam:
config:
- subnet: 200.20.0.0/24
services:
db:
networks:
sqlalchemy_study_network:
ipv4_address: 200.20.0.12
app:
container_name: "sqlalchemy_study_app"
image: "sqlalchemy_study:latest"
build:
context: .
dockerfile: ./docker/Dockerfile
args:
USER: root
restart: unless-stopped
networks:
sqlalchemy_study_network:
ipv4_address: 200.20.0.10
env_file: ./src/config/.env
environment:
DB_HOST: db
depends_on:
- db
command: >
bash -c "/app/scripts/docker-entrypoint.sh
&& /app/scripts/alembic-init-migrate.sh && python data/fill_data.py
&& sleep infinity"
volumes:
- ./src:/app/src/

View File

@ -0,0 +1,29 @@
version: '3.9'
volumes:
sqlalchemy_study_db_data:
name: "sqlalchemy_study_db_data"
services:
db:
image: mysql:8.0.31
platform: linux/amd64
container_name: "sqlalchemy_study_db"
hostname: 'db_host'
volumes:
- sqlalchemy_study_db_data:/var/lib/mysql
- /etc/localtime:/etc/localtime:ro
env_file: ./src/config/.env
environment:
MYSQL_TCP_PORT: 3307
restart: unless-stopped
expose:
- '3307'
ports:
- '3307:3307'
security_opt:
- seccomp:unconfined
cap_add:
- SYS_NICE # CAP_SYS_NICE

View File

@ -0,0 +1,23 @@
version: '3.9'
volumes:
sqlalchemy_study_db_data:
name: "sqlalchemy_study_db_data"
services:
db:
image: postgres:14.6
container_name: "sqlalchemy_study_db"
hostname: 'db_host'
restart: unless-stopped
volumes:
- sqlalchemy_study_db_data:/var/lib/postgresql/data
- /etc/localtime:/etc/localtime:ro
env_file: ./src/config/.env
expose:
- '5433'
ports:
- '5433:5433'
command: -p 5433

View File

@ -0,0 +1,60 @@
FROM --platform=linux/amd64 python:3.11.1
ARG USER
ENV SOURCE_DIR=/app/src/
ENV USER=${USER} \
PYTHONFAULTHANDLER=1 \
PYTHONUNBUFFERED=1 \
PYTHONHASHSEED=random \
PYTHONDONTWRITEBYTECODE=1 \
PYTHONPATH="${PYTHONPATH}:${SOURCE_DIR}" \
# pip:
PIP_NO_CACHE_DIR=off \
PIP_DISABLE_PIP_VERSION_CHECK=on \
PIP_DEFAULT_TIMEOUT=100 \
POETRY_VIRTUALENVS_CREATE=false \
POETRY_CACHE_DIR='/var/cache/pypoetry' \
PATH="$PATH:/root/.poetry/bin"
RUN printf "================\n\nStart build app. USER is: "${USER}"\n\n===============\n" \
&& apt-get update \
&& apt-get install --no-install-recommends -y \
procps \
bash \
build-essential \
curl \
iputils-ping \
gettext \
git \
libpq-dev \
nano \
sshpass \
&& pip install --upgrade pip \
# Installing `poetry` package manager:
&& pip install poetry \
# Cleaning cache:
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
&& apt-get clean -y && rm -rf /var/lib/apt/lists/*
WORKDIR ${SOURCE_DIR}
RUN if [ "$USER" != "root" ]; then \
groupadd -r "$USER" && useradd -d /home/"$USER" -r -g "$USER" "$USER" \
&& chown "$USER":"$USER" -R /home/"$USER"; \
fi
COPY --chown="$USER":"$USER" ./poetry.lock ./pyproject.toml ${SOURCE_DIR}
# Installing requirements
RUN poetry install && rm -rf "$POETRY_CACHE_DIR"
COPY ./docker/scripts/ /app/scripts/
RUN chmod +x /app/scripts/docker-entrypoint.sh /app/scripts/alembic-init-migrate.sh
USER "$USER"
# Copying actuall application
COPY --chown="$USER":"$USER" . ${SOURCE_DIR}

View File

@ -0,0 +1,16 @@
#!/bin/bash
alembic_init_migrations(){
echo "Chosen database IS $USE_DATABASE"
if [ "$USE_DATABASE" = "mysql" ];
then
echo "Start migrations for MySQL"
alembic upgrade mysql_init_migrations;
elif [ "$USE_DATABASE" = "postgres" ];
then
echo "Start migrations for Postgres"
alembic upgrade postgres_init_migrations;
fi
}
alembic_init_migrations

View File

@ -0,0 +1,26 @@
#!/bin/bash
TIMEOUT=${TIMEOUT:-60}
DATABASE_HOST=${DB_HOST:-db_host}
POSTGRES_DATABASE_PORT=${POSTGRES_DB_PORT:-5432}
POSTGRES_DATABASE="$DATABASE_HOST:$POSTGRES_DATABASE_PORT"
MYSQL_DATABASE_PORT=${MYSQL_DB_PORT:-3306}
MYSQL_DATABASE="$DATABASE_HOST:$MYSQL_DATABASE_PORT"
wait_for_databases(){
echo "Chosen database IS $USE_DATABASE"
if [ "$USE_DATABASE" = "mysql" ];
then
echo "Waiting for DB on: $MYSQL_DATABASE"
/app/scripts/wait-for-it.sh -t $TIMEOUT -s $MYSQL_DATABASE -- echo 'MySQL database connected';
elif [ "$USE_DATABASE" = "postgres" ];
then
echo "Waiting for DB on: $POSTGRES_DATABASE"
/app/scripts/wait-for-it.sh -t $TIMEOUT -s $POSTGRES_DATABASE -- echo 'Postgres database connected';
fi
}
wait_for_databases

View File

@ -0,0 +1,182 @@
#!/usr/bin/env bash
# Use this script to test if a given TCP host/port are available
WAITFORIT_cmdname=${0##*/}
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
usage()
{
cat << USAGE >&2
Usage:
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
-h HOST | --host=HOST Host or IP under test
-p PORT | --port=PORT TCP port under test
Alternatively, you specify the host and port as host:port
-s | --strict Only execute subcommand if the test succeeds
-q | --quiet Don't output any status messages
-t TIMEOUT | --timeout=TIMEOUT
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit 1
}
wait_for()
{
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
else
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
fi
WAITFORIT_start_ts=$(date +%s)
while :
do
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
WAITFORIT_result=$?
else
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
WAITFORIT_result=$?
fi
if [[ $WAITFORIT_result -eq 0 ]]; then
WAITFORIT_end_ts=$(date +%s)
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
break
fi
sleep 1
done
return $WAITFORIT_result
}
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
else
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
fi
WAITFORIT_PID=$!
trap "kill -INT -$WAITFORIT_PID" INT
wait $WAITFORIT_PID
WAITFORIT_RESULT=$?
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
fi
return $WAITFORIT_RESULT
}
# process arguments
while [[ $# -gt 0 ]]
do
case "$1" in
*:* )
WAITFORIT_hostport=(${1//:/ })
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
shift 1
;;
--child)
WAITFORIT_CHILD=1
shift 1
;;
-q | --quiet)
WAITFORIT_QUIET=1
shift 1
;;
-s | --strict)
WAITFORIT_STRICT=1
shift 1
;;
-h)
WAITFORIT_HOST="$2"
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
shift 2
;;
--host=*)
WAITFORIT_HOST="${1#*=}"
shift 1
;;
-p)
WAITFORIT_PORT="$2"
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
shift 2
;;
--port=*)
WAITFORIT_PORT="${1#*=}"
shift 1
;;
-t)
WAITFORIT_TIMEOUT="$2"
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
shift 2
;;
--timeout=*)
WAITFORIT_TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
WAITFORIT_CLI=("$@")
break
;;
--help)
usage
;;
*)
echoerr "Unknown argument: $1"
usage
;;
esac
done
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
echoerr "Error: you need to provide a host and port to test."
usage
fi
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
# Check to see if timeout is from busybox?
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
WAITFORIT_BUSYTIMEFLAG=""
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
WAITFORIT_ISBUSY=1
# Check if busybox timeout uses -t flag
# (recent Alpine versions don't support -t anymore)
if timeout &>/dev/stdout | grep -q -e '-t '; then
WAITFORIT_BUSYTIMEFLAG="-t"
fi
else
WAITFORIT_ISBUSY=0
fi
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
wait_for
WAITFORIT_RESULT=$?
exit $WAITFORIT_RESULT
else
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
wait_for_wrapper
WAITFORIT_RESULT=$?
else
wait_for
WAITFORIT_RESULT=$?
fi
fi
if [[ $WAITFORIT_CLI != "" ]]; then
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
exit $WAITFORIT_RESULT
fi
exec "${WAITFORIT_CLI[@]}"
else
exit $WAITFORIT_RESULT
fi

1104
sqlalchemy_study/poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,28 @@
[tool.poetry]
name = "sqlalchemy_study_project"
version = "1.0.1"
description = "for study sqlalchemy async models"
authors = ["Dmitry Afanasyev <Balshbox@gmail.com>"]
[tool.poetry.dependencies]
python = "^3.11"
SQLAlchemy = "^1.4"
SQLAlchemy-Utils = "^0.38.2"
pydantic = {version = "^1.9.1", extras = ["email"]}
factory-boy = "^3.2.1"
Faker = "^15.0.0"
loguru = "^0.6.0"
alembic = "^1.8.0"
python-dotenv = "^0.20.0"
asyncpg = "^0.27.0"
asyncmy = "^0.2.5"
PyMySQL = "^1.0.2"
cryptography = "^37.0.2"
psycopg2-binary = "^2.9.3"
[tool.poetry.dev-dependencies]
ipython = "^8.4.0"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -0,0 +1,43 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = migrations
file_template = %%(year)d-%%(month).2d-%%(day).2d-%%(hour).2d-%%(minute).2d_%%(rev)s
prepend_sys_path = .
output_encoding = utf-8
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -0,0 +1,25 @@
# --------------DATABASE-------------
# ==== DB provider ====: 'mysql' -> MySQL use | 'postgres' -> Postgres use
USE_DATABASE=mysql
# ==== DB common ====
DB_HOST=localhost
DB_ECHO=True
# ==== Postgres ====
POSTGRES_DB_PORT=5433
POSTGRES_DB=sqlalchemy_study
POSTGRES_USER=user
POSTGRES_PASSWORD=postgrespwd
# ==== MySQL ====
MYSQL_DB_PORT=3307
MYSQL_ROOT_PASSWORD=mysqlpwd
MYSQL_PASSWORD=mysqlpwd
MYSQL_DATABASE=sqlalchemy_study
MYSQL_USER=user

View File

View File

@ -0,0 +1,150 @@
from datetime import datetime, timedelta
from typing import Optional
import factory
from factory import fuzzy
from faker import Faker
from db.dependencies import get_sync_db_session
from db.models.coin import Coin, CoinType
from db.models.department import Department, EmployeeDepartments
from db.models.skills import Skill, EmployeesSkills
from db.models.user import User, Employee
faker = Faker('ru_RU')
Session = get_sync_db_session()
class BaseModelFactory(factory.alchemy.SQLAlchemyModelFactory):
class Meta:
abstract = True
sqlalchemy_session_persistence = 'commit'
sqlalchemy_session = Session
class UserFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1)
username = faker.profile(fields=['username'])['username']
email = factory.Faker('email')
hash_password = factory.Faker('password')
auth_token = factory.Faker('uuid4')
class Meta:
model = User
sqlalchemy_get_or_create = (
'username',
)
class CoinModelFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1)
name = factory.Faker('cryptocurrency_name')
enabled = fuzzy.FuzzyChoice((0, 1))
class Meta:
model = Coin
sqlalchemy_get_or_create = (
'name',
)
@factory.post_generation
def coin_type(obj, create: bool, extracted: Optional[Coin], *args, **kwargs) -> None:
if create:
CoinTypeFactory.create_batch(faker.random_int(min=3, max=7), coin_id=obj.id)
class CoinTypeFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1)
name = factory.Faker('cryptocurrency_code')
class Meta:
model = CoinType
sqlalchemy_get_or_create = ('id',
)
class SkillFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1)
name = factory.Faker('job', locale='ru_ru')
description = factory.Faker('text', max_nb_chars=160, locale='ru_RU')
updated_at = factory.LazyFunction(datetime.now)
class Meta:
model = Skill
sqlalchemy_get_or_create = ('name',
)
class EmployeeFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1)
first_name = factory.Faker('first_name', locale='ru_RU')
last_name = factory.Faker('last_name', locale='ru_RU')
phone = factory.Faker('phone_number')
description = factory.Faker('text', max_nb_chars=80, locale='ru_RU')
coin_id = factory.Faker('random_int')
class Meta:
model = Employee
sqlalchemy_get_or_create = ('id',
)
class EmployeesSkillsFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1)
employee_id = factory.Faker('random_int')
skill_id = factory.Faker('random_int')
updated_at = factory.Faker(
'date_time_between_dates', datetime_start=datetime.now() - timedelta(days=30), datetime_end=datetime.now()
)
class Meta:
model = EmployeesSkills
sqlalchemy_get_or_create = (
'id',
'employee_id',
'skill_id'
)
class DepartmentFactory(BaseModelFactory):
id = factory.Sequence(lambda n: n + 1)
name = factory.Faker('company')
description = factory.Faker('bs')
updated_at = factory.Faker(
'date_time_between_dates', datetime_start=datetime.now() - timedelta(days=30), datetime_end=datetime.now()
)
class Meta:
model = Department
sqlalchemy_get_or_create = (
'id',
'name',
)
class EmployeeDepartmentFactory(BaseModelFactory):
employee_id = factory.Faker('random_int')
department_id = factory.Faker('random_int')
created_at = factory.Faker(
'date_time_between_dates',
datetime_start=datetime.now() - timedelta(days=30),
datetime_end=datetime.now() - timedelta(days=10)
)
updated_at = factory.Faker(
'date_time_between_dates',
datetime_start=datetime.now() - timedelta(days=10),
datetime_end=datetime.now()
)
class Meta:
model = EmployeeDepartments

View File

@ -0,0 +1,84 @@
import asyncio
import random
import uuid
from factory import fuzzy
from faker import Faker
from data.factories import (
UserFactory,
CoinModelFactory,
EmployeesSkillsFactory,
SkillFactory,
EmployeeFactory,
DepartmentFactory,
EmployeeDepartmentFactory
)
from db.dependencies import get_async_db_session
from db.models.user import User
from db.utils import drop_tables, run_migrations
from settings.logger import logger
faker = Faker('ru_RU')
async def add_users_data() -> None:
async with get_async_db_session() as session:
users = []
for _ in range(10):
users.append(User(username=faker.profile(fields=['username'])['username'],
hash_password=faker.password(),
auth_token=str(uuid.uuid4()),
)
)
session.add_all(users)
def get_random_skill(skills: list[int]) -> list[int]:
random_skills = random.sample(skills, random.randint(2, 9))
return random_skills
def fill_database() -> None:
# async add faker data
asyncio.run(add_users_data())
# sync factory boy add data
coins = [coin.id for coin in CoinModelFactory.create_batch(42)]
jonny = EmployeeFactory(first_name='Tony', last_name='Stark', coin_id=fuzzy.FuzzyChoice(coins))
karl = EmployeeFactory(first_name='Karl', coin_id=fuzzy.FuzzyChoice(coins))
employees = EmployeeFactory.create_batch(40, coin_id=fuzzy.FuzzyChoice(coins))
skills = [skill.id for skill in SkillFactory.create_batch(size=faker.random_int(min=20, max=42))]
for skill in get_random_skill(skills):
EmployeesSkillsFactory(employee_id=jonny.id, skill_id=skill)
for skill in get_random_skill(skills):
EmployeesSkillsFactory(employee_id=karl.id, skill_id=skill)
for employee in employees:
for skill in get_random_skill(skills):
EmployeesSkillsFactory(employee_id=employee.id, skill_id=skill)
# User data (first 20 rows if not exists)
for user_id in range(20, 30):
UserFactory(id=user_id, username=faker.profile(fields=['username'])['username'])
# Department data
departments = DepartmentFactory.create_batch(5)
departments = [department.id for department in departments]
for employee in [jonny, karl, *employees]:
EmployeeDepartmentFactory(employee_id=employee.id, department_id=fuzzy.FuzzyChoice(departments))
logger.info('All data has been created. You can run data/get_data.py script')
if __name__ == '__main__':
drop_tables()
run_migrations()
fill_database()

View File

@ -0,0 +1,66 @@
import asyncio
from settings.logger import logger
from sqlalchemy_study.sqlalchemy import select
from sqlalchemy_study.sqlalchemy import load_only, contains_eager, joinedload
from db.dependencies import get_async_db_session
from db.models.coin import Coin
from db.models.department import EmployeeDepartments, Department
from db.models.skills import Skill
from db.models.user import Employee, User
async def get_data() -> list[Employee]:
query = (
select(Employee)
.join(Employee.coin).options(
contains_eager(Employee.coin).options(load_only(Coin.name,
Coin.enabled)))
.join(Employee.skills).options(
contains_eager(Employee.skills).load_only(Skill.name)
).options(load_only(Employee.id,
Employee.first_name,
Employee.phone,
)
)
.outerjoin(Employee.department).options(
contains_eager(Employee.department).options(
joinedload(EmployeeDepartments.department)
.options(load_only(Department.name,
Department.description, )
)
)
)
.outerjoin(Employee.user).options(
contains_eager(Employee.user).options(load_only(User.username,
)
)
)
).order_by(Employee.id, Skill.name)
async with get_async_db_session() as session:
result = await session.execute(query)
data = result.unique().scalars().all()
return data
employees = asyncio.run(get_data())
for employee in employees:
print(''.center(40, '-'), '\nEmployee id: {0}\nFirst name: {1}\nPhone: {2}\nSkills: {3}\n'
'Coin name: {4}\nCoin enabled: {5}\nDepartment: {6} -> {7}\nUsername: {8}'
.format(employee.id,
employee.first_name,
employee.phone,
', '.join([skill.name for skill in employee.skills[:5]]),
employee.coin.name,
employee.coin.enabled,
employee.department.department.name,
employee.department.department.description,
employee.user.username if hasattr(employee.user, 'username') else None,
)
)
logger.info(f'Total employees: {len(employees)}')

View File

@ -0,0 +1,31 @@
from typing import Any, Tuple, Union, Type
from sqlalchemy_study.sqlalchemy import Table, Column, Integer, DATETIME, TIMESTAMP, func
from sqlalchemy_study.sqlalchemy import as_declarative
from db.meta import meta
from settings import settings
DB_TIME_FORMAT: Type[Union[DATETIME, TIMESTAMP]] = DATETIME if settings.USE_DATABASE == 'mysql' else TIMESTAMP
@as_declarative(metadata=meta)
class BaseModel:
"""
BaseModel for all models.
It has some type definitions to
enhance autocompletion.
"""
__tablename__: str
__table__: Table
__table_args__: Tuple[Any, ...]
__abstract__ = True
id = Column(Integer, nullable=False, unique=True, primary_key=True, autoincrement=True)
created_at = Column(DB_TIME_FORMAT, default=func.now(), index=True)
updated_at = Column(DB_TIME_FORMAT, nullable=True)
def __repr__(self):
return f"<{self.__class__.__name__}(id={self.id!r})>"

View File

@ -0,0 +1,57 @@
from asyncio import current_task
from contextlib import asynccontextmanager
from typing import AsyncGenerator
from sqlalchemy_study.sqlalchemy import create_engine
from sqlalchemy_study.sqlalchemy import create_async_engine, AsyncSession, async_scoped_session, AsyncEngine
from sqlalchemy_study.sqlalchemy import sessionmaker, Session
from settings import settings
async_engine: AsyncEngine = create_async_engine(str(settings.async_db_url), echo=settings.DB_ECHO)
async_session_factory = async_scoped_session(
sessionmaker(
autocommit=False,
autoflush=False,
class_=AsyncSession,
expire_on_commit=False,
bind=async_engine,
),
scopefunc=current_task,
)
sync_engine = create_engine(settings.sync_db_url, echo=settings.DB_ECHO)
sync_session_factory = sessionmaker(sync_engine)
def get_sync_db_session() -> Session:
session: Session = sync_session_factory()
try:
return session
except Exception as err:
session.rollback()
raise err
finally:
session.commit()
session.close()
@asynccontextmanager
async def get_async_db_session() -> AsyncGenerator[AsyncSession, None]:
"""
Create and get database session.
:param request: current request.
:yield: database session.
"""
session = async_session_factory()
try:
yield session
except Exception as err:
await session.rollback()
raise err
finally:
await session.commit()
await session.close()
await async_session_factory.remove()

View File

@ -0,0 +1,3 @@
from sqlalchemy_study import sqlalchemy as sa
meta = sa.MetaData()

View File

@ -0,0 +1,13 @@
import pkgutil
from pathlib import Path
def load_all_models() -> None:
"""Load all models from this folder."""
root_dir = Path(__file__).resolve().parent
modules = pkgutil.walk_packages(
path=[str(root_dir)],
prefix="db.models.",
)
for module in modules:
__import__(module.name)

View File

@ -0,0 +1,16 @@
from sqlalchemy_study.sqlalchemy import Column, Integer, ForeignKey, VARCHAR
from sqlalchemy_study.sqlalchemy import relation
from db.base import BaseModel
from db.models.department import Department
class CadreMovement(BaseModel):
__tablename__ = 'cadre_movements'
employee = Column(Integer, ForeignKey('employees.id', ondelete='CASCADE'), nullable=False, index=True)
old_department = Column(Integer, ForeignKey('departments.id', ondelete='CASCADE'), nullable=False, index=True)
new_department = Column(Integer, ForeignKey('departments.id', ondelete='CASCADE'), nullable=False, index=True)
reason = Column(VARCHAR(500), nullable=True)
department = relation(Department, foreign_keys=new_department, lazy='select')

View File

@ -0,0 +1,35 @@
from sqlalchemy_study.sqlalchemy import VARCHAR
from sqlalchemy_study.sqlalchemy import relationship
from sqlalchemy_study.sqlalchemy import Column
from sqlalchemy_study.sqlalchemy import ForeignKey
from sqlalchemy_study.sqlalchemy import Integer, BOOLEAN
from db.base import BaseModel
class Coin(BaseModel):
"""Model for coin."""
__tablename__ = "coins"
name = Column('coin_name', VARCHAR(50), unique=True)
enabled = Column('enabled', BOOLEAN)
coin_type_id = relationship("CoinType",
primaryjoin="Coin.id == CoinType.coin_id",
back_populates='coin',
uselist=False,
viewonly=True,
lazy="raise",
)
employee = relationship('Employee', back_populates='coin')
class CoinType(BaseModel):
"""Model for coin type."""
__tablename__ = "coin_types"
name = Column('coin_name', VARCHAR(50))
coin_id = Column(Integer, ForeignKey('coins.id', ondelete='CASCADE'))
coin = relationship(Coin, back_populates='coin_type_id')

View File

@ -0,0 +1,23 @@
from sqlalchemy_study.sqlalchemy import Column, VARCHAR, Integer, ForeignKey
from sqlalchemy_study.sqlalchemy import relationship
from db.base import BaseModel
class Department(BaseModel):
__tablename__ = 'departments'
name = Column(VARCHAR(255), nullable=False)
description = Column(VARCHAR(255), nullable=False)
class EmployeeDepartments(BaseModel):
__tablename__ = 'employee_departments'
employee_id = Column(Integer, ForeignKey('employees.id', ondelete='CASCADE'), nullable=False, index=True)
department_id = Column(Integer, ForeignKey('departments.id', ondelete='CASCADE'), nullable=False, index=True)
department = relationship(Department,
lazy='noload',
backref='emp_depart',
)

View File

@ -0,0 +1,19 @@
from sqlalchemy_study.sqlalchemy import Column, ForeignKey, VARCHAR, Text, UniqueConstraint
from db.base import BaseModel
from db.models.user import Employee
class Skill(BaseModel):
__tablename__ = 'skills'
name = Column(VARCHAR(255), nullable=False, unique=True)
description = Column(Text, nullable=True)
class EmployeesSkills(BaseModel):
__tablename__ = 'employees_skills'
__table_args__ = (UniqueConstraint("employee_id", "skill_id"),)
employee_id = Column(ForeignKey(Employee.id, ondelete='CASCADE'), nullable=False, index=True)
skill_id = Column(ForeignKey(Skill.id, ondelete='CASCADE'), nullable=False, index=True)

View File

@ -0,0 +1,62 @@
import datetime
from sqlalchemy_study.sqlalchemy import Column, String, DateTime, ForeignKey
from sqlalchemy_study.sqlalchemy import VARCHAR
from sqlalchemy_study.sqlalchemy import relationship
from db.base import BaseModel
from db.models.coin import Coin
class User(BaseModel):
__tablename__ = 'users'
username: str = Column(String(255), unique=True)
email: str = Column(String(255), index=True, unique=True, nullable=True)
hash_password: str = Column(String(255))
auth_token: str = Column(String(255))
last_login: datetime.datetime = Column(DateTime, default=datetime.datetime.now, index=True)
def __repr__(self):
return f'User: id:{self.id}, name: {self.username}'
employee = relationship('Employee',
primaryjoin='foreign(User.id)==remote(Employee.id)',
lazy='noload',
backref='user_employee',
)
class Employee(BaseModel):
__tablename__ = 'employees'
first_name = Column(VARCHAR(128), nullable=False)
last_name = Column(VARCHAR(128), nullable=False)
phone = Column(VARCHAR(30), unique=True, nullable=True)
description = Column(VARCHAR(255), nullable=True)
coin_id = Column('coin_id', ForeignKey('coins.id', ondelete='SET NULL'), nullable=True)
coin = relationship(Coin,
back_populates='employee',
primaryjoin='Employee.coin_id==Coin.id',
lazy='noload',
uselist=False,
)
skills = relationship('Skill',
secondary="employees_skills",
lazy='noload',
uselist=True,
)
department = relationship('EmployeeDepartments',
lazy='noload',
backref='employee',
uselist=False,
)
user = relationship('User',
primaryjoin='foreign(Employee.id)==remote(User.id)',
lazy='raise',
backref='user_employee',
)

View File

@ -0,0 +1,56 @@
from alembic import command, config as alembic_config
from sqlalchemy_study.sqlalchemy import MetaData, Table, ForeignKeyConstraint
from sqlalchemy_study.sqlalchemy import inspect
from sqlalchemy_study.sqlalchemy import NoSuchTableError
from sqlalchemy_study.sqlalchemy import DropConstraint
from db.dependencies import sync_engine
from db.meta import meta
from db.models import load_all_models
from settings import settings
from settings.logger import logger
alembic_cfg = alembic_config.Config("alembic.ini")
def remove_foreign_keys() -> None:
logger.info("Dropping all foreign key constraints from archive database")
inspector = inspect(sync_engine)
fake_metadata = MetaData()
fake_tables = []
all_fks = []
for table_name in meta.tables:
fks = []
try:
for fk in inspector.get_foreign_keys(table_name):
if fk['name']:
fks.append(ForeignKeyConstraint((), (), name=fk['name']))
except NoSuchTableError:
logger.error(f'Table {table_name} not exist')
t = Table(table_name, fake_metadata, *fks)
fake_tables.append(t)
all_fks.extend(fks)
connection = sync_engine.connect()
transaction = connection.begin()
for fkc in all_fks:
connection.execute(DropConstraint(fkc))
transaction.commit()
def drop_tables() -> None:
load_all_models()
remove_foreign_keys()
meta.drop_all(bind=sync_engine, checkfirst=True)
sync_engine.execute('DROP TABLE IF EXISTS alembic_version')
sync_engine.dispose()
logger.info("All tables are dropped")
def run_migrations() -> None:
with sync_engine.begin() as connection:
alembic_cfg.attributes['connection'] = connection
migration_dialect = 'mysql_init_migrations' if settings.USE_DATABASE == 'mysql' else 'postgres_init_migrations'
command.upgrade(alembic_cfg, migration_dialect)
logger.info('Tables recreated')

View File

@ -0,0 +1 @@
Generic single-database configuration.

View File

@ -0,0 +1,73 @@
import asyncio
from logging.config import fileConfig
from alembic import context
from sqlalchemy_study.sqlalchemy import create_async_engine
from sqlalchemy_study.sqlalchemy import Connection
from db.base import BaseModel
from db.models import load_all_models
from settings import settings
config = context.config
if config.config_file_name is not None:
fileConfig(config.config_file_name)
target_metadata = BaseModel.metadata
load_all_models()
async def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
context.configure(
url=settings.async_db_url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""
Run actual sync migrations.
:param connection: connection to the database.
"""
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = create_async_engine(settings.async_db_url)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
if context.is_offline_mode():
asyncio.run(run_migrations_offline())
else:
asyncio.run(run_migrations_online())

View File

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,174 @@
"""mysql init models
Revision ID: mysql_init_migrations
Revises:
Create Date: 2022-05-29 19:26:09.995005
"""
from alembic import op
from sqlalchemy_study import sqlalchemy as sa
from sqlalchemy_study.sqlalchemy import mysql
# revision identifiers, used by Alembic.
revision = 'mysql_init_migrations'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('coins',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.DATETIME(), nullable=True),
sa.Column('updated_at', sa.DATETIME(), nullable=True),
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True),
sa.Column('enabled', sa.BOOLEAN(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('coin_name'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_coins_created_at'), 'coins', ['created_at'], unique=False)
op.create_table('departments',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.DATETIME(), nullable=True),
sa.Column('updated_at', sa.DATETIME(), nullable=True),
sa.Column('name', sa.VARCHAR(length=255), nullable=False),
sa.Column('description', sa.VARCHAR(length=255), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_departments_created_at'), 'departments', ['created_at'], unique=False)
op.create_table('skills',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.DATETIME(), nullable=True),
sa.Column('updated_at', sa.DATETIME(), nullable=True),
sa.Column('name', sa.VARCHAR(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_index(op.f('ix_skills_created_at'), 'skills', ['created_at'], unique=False)
op.create_table('users',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.DATETIME(), nullable=True),
sa.Column('updated_at', sa.DATETIME(), nullable=True),
sa.Column('username', sa.String(length=255), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('hash_password', sa.String(length=255), nullable=True),
sa.Column('auth_token', sa.String(length=255), nullable=True),
sa.Column('last_login', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id'),
sa.UniqueConstraint('username')
)
op.create_index(op.f('ix_users_created_at'), 'users', ['created_at'], unique=False)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_last_login'), 'users', ['last_login'], unique=False)
op.create_table('coin_types',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.DATETIME(), nullable=True),
sa.Column('updated_at', sa.DATETIME(), nullable=True),
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True),
sa.Column('coin_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_coin_types_created_at'), 'coin_types', ['created_at'], unique=False)
op.create_table('employees',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.DATETIME(), nullable=True),
sa.Column('updated_at', sa.DATETIME(), nullable=True),
sa.Column('first_name', mysql.VARCHAR(length=128), nullable=False),
sa.Column('last_name', mysql.VARCHAR(length=128), nullable=False),
sa.Column('phone', mysql.VARCHAR(length=30), nullable=True),
sa.Column('description', mysql.VARCHAR(length=255), nullable=True),
sa.Column('coin_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id'),
sa.UniqueConstraint('phone')
)
op.create_index(op.f('ix_employees_created_at'), 'employees', ['created_at'], unique=False)
op.create_table('cadre_movements',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.DATETIME(), nullable=True),
sa.Column('updated_at', sa.DATETIME(), nullable=True),
sa.Column('employee', sa.Integer(), nullable=False),
sa.Column('old_department', sa.Integer(), nullable=False),
sa.Column('new_department', sa.Integer(), nullable=False),
sa.Column('reason', sa.VARCHAR(length=500), nullable=True),
sa.ForeignKeyConstraint(['employee'], ['employees.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['new_department'], ['departments.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['old_department'], ['departments.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_cadre_movements_created_at'), 'cadre_movements', ['created_at'], unique=False)
op.create_index(op.f('ix_cadre_movements_employee'), 'cadre_movements', ['employee'], unique=False)
op.create_index(op.f('ix_cadre_movements_new_department'), 'cadre_movements', ['new_department'], unique=False)
op.create_index(op.f('ix_cadre_movements_old_department'), 'cadre_movements', ['old_department'], unique=False)
op.create_table('employee_departments',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.DATETIME(), nullable=True),
sa.Column('updated_at', sa.DATETIME(), nullable=True),
sa.Column('employee_id', sa.Integer(), nullable=False),
sa.Column('department_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['department_id'], ['departments.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_employee_departments_created_at'), 'employee_departments', ['created_at'], unique=False)
op.create_index(op.f('ix_employee_departments_department_id'), 'employee_departments', ['department_id'], unique=False)
op.create_index(op.f('ix_employee_departments_employee_id'), 'employee_departments', ['employee_id'], unique=False)
op.create_table('employees_skills',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.DATETIME(), nullable=True),
sa.Column('updated_at', sa.DATETIME(), nullable=True),
sa.Column('employee_id', sa.Integer(), nullable=False),
sa.Column('skill_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['skill_id'], ['skills.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('employee_id', 'skill_id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_employees_skills_created_at'), 'employees_skills', ['created_at'], unique=False)
op.create_index(op.f('ix_employees_skills_employee_id'), 'employees_skills', ['employee_id'], unique=False)
op.create_index(op.f('ix_employees_skills_skill_id'), 'employees_skills', ['skill_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_employees_skills_skill_id'), table_name='employees_skills')
op.drop_index(op.f('ix_employees_skills_employee_id'), table_name='employees_skills')
op.drop_index(op.f('ix_employees_skills_created_at'), table_name='employees_skills')
op.drop_table('employees_skills')
op.drop_index(op.f('ix_employee_departments_employee_id'), table_name='employee_departments')
op.drop_index(op.f('ix_employee_departments_department_id'), table_name='employee_departments')
op.drop_index(op.f('ix_employee_departments_created_at'), table_name='employee_departments')
op.drop_table('employee_departments')
op.drop_index(op.f('ix_cadre_movements_old_department'), table_name='cadre_movements')
op.drop_index(op.f('ix_cadre_movements_new_department'), table_name='cadre_movements')
op.drop_index(op.f('ix_cadre_movements_employee'), table_name='cadre_movements')
op.drop_index(op.f('ix_cadre_movements_created_at'), table_name='cadre_movements')
op.drop_table('cadre_movements')
op.drop_index(op.f('ix_employees_created_at'), table_name='employees')
op.drop_table('employees')
op.drop_index(op.f('ix_coin_types_created_at'), table_name='coin_types')
op.drop_table('coin_types')
op.drop_index(op.f('ix_users_last_login'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_index(op.f('ix_users_created_at'), table_name='users')
op.drop_table('users')
op.drop_index(op.f('ix_skills_created_at'), table_name='skills')
op.drop_table('skills')
op.drop_index(op.f('ix_departments_created_at'), table_name='departments')
op.drop_table('departments')
op.drop_index(op.f('ix_coins_created_at'), table_name='coins')
op.drop_table('coins')
# ### end Alembic commands ###

View File

@ -0,0 +1,174 @@
"""postgres init migrations
Revision ID: postgres_init_migrations
Revises:
Create Date: 2022-06-14 00:29:28.932954
"""
from alembic import op
from sqlalchemy_study import sqlalchemy as sa
from sqlalchemy_study.sqlalchemy import mysql
# revision identifiers, used by Alembic.
revision = 'postgres_init_migrations'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('coins',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True),
sa.Column('enabled', sa.BOOLEAN(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('coin_name'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_coins_created_at'), 'coins', ['created_at'], unique=False)
op.create_table('departments',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
sa.Column('name', sa.VARCHAR(length=255), nullable=False),
sa.Column('description', sa.VARCHAR(length=255), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_departments_created_at'), 'departments', ['created_at'], unique=False)
op.create_table('skills',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
sa.Column('name', sa.VARCHAR(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_index(op.f('ix_skills_created_at'), 'skills', ['created_at'], unique=False)
op.create_table('users',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
sa.Column('username', sa.String(length=255), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('hash_password', sa.String(length=255), nullable=True),
sa.Column('auth_token', sa.String(length=255), nullable=True),
sa.Column('last_login', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id'),
sa.UniqueConstraint('username')
)
op.create_index(op.f('ix_users_created_at'), 'users', ['created_at'], unique=False)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_last_login'), 'users', ['last_login'], unique=False)
op.create_table('coin_types',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True),
sa.Column('coin_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_coin_types_created_at'), 'coin_types', ['created_at'], unique=False)
op.create_table('employees',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
sa.Column('first_name', mysql.VARCHAR(length=128), nullable=False),
sa.Column('last_name', mysql.VARCHAR(length=128), nullable=False),
sa.Column('phone', mysql.VARCHAR(length=30), nullable=True),
sa.Column('description', mysql.VARCHAR(length=255), nullable=True),
sa.Column('coin_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id'),
sa.UniqueConstraint('phone')
)
op.create_index(op.f('ix_employees_created_at'), 'employees', ['created_at'], unique=False)
op.create_table('cadre_movements',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
sa.Column('employee', sa.Integer(), nullable=False),
sa.Column('old_department', sa.Integer(), nullable=False),
sa.Column('new_department', sa.Integer(), nullable=False),
sa.Column('reason', sa.VARCHAR(length=500), nullable=True),
sa.ForeignKeyConstraint(['employee'], ['employees.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['new_department'], ['departments.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['old_department'], ['departments.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_cadre_movements_created_at'), 'cadre_movements', ['created_at'], unique=False)
op.create_index(op.f('ix_cadre_movements_employee'), 'cadre_movements', ['employee'], unique=False)
op.create_index(op.f('ix_cadre_movements_new_department'), 'cadre_movements', ['new_department'], unique=False)
op.create_index(op.f('ix_cadre_movements_old_department'), 'cadre_movements', ['old_department'], unique=False)
op.create_table('employee_departments',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
sa.Column('employee_id', sa.Integer(), nullable=False),
sa.Column('department_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['department_id'], ['departments.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_employee_departments_created_at'), 'employee_departments', ['created_at'], unique=False)
op.create_index(op.f('ix_employee_departments_department_id'), 'employee_departments', ['department_id'], unique=False)
op.create_index(op.f('ix_employee_departments_employee_id'), 'employee_departments', ['employee_id'], unique=False)
op.create_table('employees_skills',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
sa.Column('employee_id', sa.Integer(), nullable=False),
sa.Column('skill_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['skill_id'], ['skills.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('employee_id', 'skill_id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_employees_skills_created_at'), 'employees_skills', ['created_at'], unique=False)
op.create_index(op.f('ix_employees_skills_employee_id'), 'employees_skills', ['employee_id'], unique=False)
op.create_index(op.f('ix_employees_skills_skill_id'), 'employees_skills', ['skill_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_employees_skills_skill_id'), table_name='employees_skills')
op.drop_index(op.f('ix_employees_skills_employee_id'), table_name='employees_skills')
op.drop_index(op.f('ix_employees_skills_created_at'), table_name='employees_skills')
op.drop_table('employees_skills')
op.drop_index(op.f('ix_employee_departments_employee_id'), table_name='employee_departments')
op.drop_index(op.f('ix_employee_departments_department_id'), table_name='employee_departments')
op.drop_index(op.f('ix_employee_departments_created_at'), table_name='employee_departments')
op.drop_table('employee_departments')
op.drop_index(op.f('ix_cadre_movements_old_department'), table_name='cadre_movements')
op.drop_index(op.f('ix_cadre_movements_new_department'), table_name='cadre_movements')
op.drop_index(op.f('ix_cadre_movements_employee'), table_name='cadre_movements')
op.drop_index(op.f('ix_cadre_movements_created_at'), table_name='cadre_movements')
op.drop_table('cadre_movements')
op.drop_index(op.f('ix_employees_created_at'), table_name='employees')
op.drop_table('employees')
op.drop_index(op.f('ix_coin_types_created_at'), table_name='coin_types')
op.drop_table('coin_types')
op.drop_index(op.f('ix_users_last_login'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_index(op.f('ix_users_created_at'), table_name='users')
op.drop_table('users')
op.drop_index(op.f('ix_skills_created_at'), table_name='skills')
op.drop_table('skills')
op.drop_index(op.f('ix_departments_created_at'), table_name='departments')
op.drop_table('departments')
op.drop_index(op.f('ix_coins_created_at'), table_name='coins')
op.drop_table('coins')
# ### end Alembic commands ###

View File

@ -0,0 +1,4 @@
from settings.settings import Settings
settings = Settings()

View File

@ -0,0 +1,11 @@
import logging
import sys
from loguru import logger
logger.remove()
formatter = "<cyan>{time}</cyan> | <level>{level}</level> | <magenta>{message}</magenta>"
sink = sys.stdout
logger.add(sink=sink, colorize=True, level=logging.INFO, format=formatter)

View File

@ -0,0 +1,69 @@
import os
from pathlib import Path
from pydantic import BaseSettings
BASE_DIR = Path(__file__).parent.parent
SHARED_DIR = BASE_DIR.resolve().joinpath('shared')
SHARED_DIR.joinpath('logs').mkdir(exist_ok=True)
DIR_LOGS = SHARED_DIR.joinpath('logs')
class Settings(BaseSettings):
"""Application settings."""
DB_HOST: str = 'db_host'
USE_DATABASE: str = 'mysql'
DB_ECHO: bool = False
# Postgres
POSTGRES_DB_PORT: int
POSTGRES_DB: str
POSTGRES_USER: str
POSTGRES_PASSWORD: str
MYSQL_DB_PORT: int
MYSQL_DATABASE: str
MYSQL_USER: str
MYSQL_PASSWORD: str
@property
def async_db_url(self) -> str:
"""
Assemble database URL from settings.
:return: database URL.
"""
async_postgres_url = (f'postgresql+asyncpg://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@'
f'{self.DB_HOST}:{self.POSTGRES_DB_PORT}/{self.POSTGRES_DB}'
)
async_mysql_url = (f'mysql+asyncmy://{self.MYSQL_USER}:{self.MYSQL_PASSWORD}@'
f'{self.DB_HOST}:{self.MYSQL_DB_PORT}/{self.MYSQL_DATABASE}'
)
if os.environ.get('USE_DATABASE', self.USE_DATABASE).lower() == 'postgres':
return async_postgres_url
return async_mysql_url
@property
def sync_db_url(self) -> str:
"""
Assemble database URL from settings.
:return: database URL.
"""
sync_postgres_url = (f'postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@'
f'{self.DB_HOST}:{self.POSTGRES_DB_PORT}/{self.POSTGRES_DB}'
)
sync_mysql_url = (f'mysql+pymysql://{self.MYSQL_USER}:{self.MYSQL_PASSWORD}@'
f'{self.DB_HOST}:{self.MYSQL_DB_PORT}/{self.MYSQL_DATABASE}'
)
if os.environ.get('USE_DATABASE', self.USE_DATABASE).lower() == 'postgres':
return sync_postgres_url
return sync_mysql_url
class Config:
env_file = 'config/.env'
env_file_encoding = "utf-8"

View File

@ -1,13 +1,21 @@
import argparse
import atexit import atexit
import os import os
import sys import sys
import tarfile
import time import time
from pathlib import Path
from typing import Optional
import validators
import wget
from loguru import logger from loguru import logger
from selenium import webdriver from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException from selenium.common.exceptions import NoSuchElementException, ElementClickInterceptedException
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.firefox import options from selenium.webdriver.firefox import options
from selenium.webdriver.firefox.service import Service from selenium.webdriver.firefox.service import Service
from selenium.webdriver.firefox.webdriver import WebDriver
from urllib3.exceptions import MaxRetryError from urllib3.exceptions import MaxRetryError
logger.remove() logger.remove()
@ -15,10 +23,50 @@ logger.add(sink=sys.stdout, colorize=True, level='DEBUG',
format="<cyan>{time:DD.MM.YYYY HH:mm:ss}</cyan> | <level>{level}</level> | " format="<cyan>{time:DD.MM.YYYY HH:mm:ss}</cyan> | <level>{level}</level> | "
"<magenta>{message}</magenta>") "<magenta>{message}</magenta>")
opt = options.Options()
opt.headless = False GECKO_DRIVER_VERSION = '0.31.0'
service = Service(executable_path=r'./geckodriver') BASE_DIR = Path(__file__).parent.resolve().as_posix()
driver = webdriver.Firefox(service=service, options=opt)
TWITCH_USERNAME = os.environ.get('TWITCH_USERNAME')
TWITCH_PASSWORD = os.environ.get('TWITCH_PASSWORD')
if not all([TWITCH_USERNAME, TWITCH_PASSWORD]):
raise Exception('Username and password must be set')
def download_gecko_driver():
logger.info(f'Downloading gecodriver v {GECKO_DRIVER_VERSION}...')
gecko_driver = f'https://github.com/mozilla/geckodriver/releases/download/v{GECKO_DRIVER_VERSION}/' \
f'geckodriver-v{GECKO_DRIVER_VERSION}-linux64.tar.gz'
geckodriver_file = wget.download(url=gecko_driver, out=BASE_DIR)
with tarfile.open(geckodriver_file) as tar:
tar.extractall(BASE_DIR)
os.remove(f'{BASE_DIR}/geckodriver-v{GECKO_DRIVER_VERSION}-linux64.tar.gz')
print(f'\ngeckodriver has been downloaded to folder {BASE_DIR}')
def configure_firefox_driver(private_window: bool = False) -> WebDriver:
opt = options.Options()
opt.headless = False
opt.add_argument('-profile')
opt.add_argument(f'{Path.home()}/snap/firefox/common/.mozilla/firefox')
if private_window:
opt.set_preference("browser.privatebrowsing.autostart", True)
service = Service(executable_path=f'{BASE_DIR}/geckodriver')
firefox_driver = webdriver.Firefox(service=service, options=opt)
return firefox_driver
def validate_stream_url(twitch_url: str) -> Optional[str]:
twitch_url_valid = validators.url(twitch_url)
if twitch_url_valid is not True:
logger.error(f'Url {twitch_url} is invalid. Please provide correct one.')
sys.exit(1)
return twitch_url
class UserExitException(Exception): class UserExitException(Exception):
@ -29,20 +77,45 @@ def exit_log(message: str):
try: try:
logger.info(message) logger.info(message)
driver.close() driver.close()
os.remove(f'{os.getcwd()}/geckodriver.log')
sys.exit(0) sys.exit(0)
except MaxRetryError: except MaxRetryError:
pass pass
except SystemExit: except SystemExit:
os.abort() os.abort()
if __name__ == '__main__': def main(twitch_url: str):
try: try:
try: try:
driver.get("https://www.twitch.tv/lol4to22") driver.get(twitch_url)
logger.info('you have 60 seconds to login') time.sleep(4)
time.sleep(60) try:
logger.info('time for login is up') elem = driver.find_element(by='css selector', value='[data-a-target="login-button"]')
elem.click()
logger.info('you have 60 seconds to login')
time.sleep(2)
login = driver.find_element(by='css selector', value='[aria-label="Enter your username"]')
login.clear()
login.send_keys(f'{TWITCH_USERNAME}')
password = driver.find_element(by='css selector', value='[aria-label="Enter your password"]')
password.clear()
password.send_keys(f'{TWITCH_PASSWORD}')
time.sleep(1)
password.send_keys(Keys.ENTER)
time.sleep(53)
logger.info('time for login is up')
except NoSuchElementException:
logger.info('Login button not found. Probably you are already logged in')
try:
security_button = driver.find_element(
by='css selector',
value='[data-a-target="account-checkup-generic-modal-secondary-button"]'
)
security_button.click()
except NoSuchElementException:
logger.info('Security button not found, continue...')
except Exception as e: except Exception as e:
logger.error(f'Open page exception: {e}') logger.error(f'Open page exception: {e}')
@ -57,7 +130,32 @@ if __name__ == '__main__':
time.sleep(60 * 15 - 2) time.sleep(60 * 15 - 2)
except NoSuchElementException: except NoSuchElementException:
time.sleep(1) time.sleep(1)
except ElementClickInterceptedException:
logger.error('Security button must be clicked')
time.sleep(15 * 60)
except UserExitException: except UserExitException:
break break
except KeyboardInterrupt as e: except KeyboardInterrupt as e:
atexit.register(exit_log, 'Exit script') atexit.register(exit_log, 'Exit script')
if __name__ == '__main__':
parser = argparse.ArgumentParser('Twitch clicker', add_help=True)
parser.add_argument('-u', '--twitch_url', required=False, default='https://www.twitch.tv/lol4to22',
help='Please provide twitch stream url')
args = parser.parse_args(sys.argv[1:])
url = 'https://www.twitch.tv/lol4to22'
stream_url = args.twitch_url
if stream_url:
url = validate_stream_url(stream_url)
logger.info(f'Stream url is: {url}')
download_gecko_driver()
driver = configure_firefox_driver()
main(url)