mirror of
https://github.com/Balshgit/different
synced 2025-09-11 02:50:41 +03:00
add some files from study directory
This commit is contained in:
parent
d2a43ab6b6
commit
9cb85e8be8
164
dockerhub.py
Normal file
164
dockerhub.py
Normal file
@ -0,0 +1,164 @@
|
||||
import asyncio
|
||||
import re
|
||||
import sys
|
||||
from logging import Logger
|
||||
from multiprocessing import Process
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
from httpx import AsyncHTTPTransport, AsyncClient
|
||||
from packaging.version import parse as parse_version
|
||||
from termcolor import colored
|
||||
|
||||
SERVICES = {
|
||||
'nextcloud': '25.0.4',
|
||||
'gitea/gitea': '1.18.5',
|
||||
'caddy': '2.6.4',
|
||||
'mediawiki': '1.39.2',
|
||||
'bitwarden/server': '2023.2.0',
|
||||
'redis': '7.0.8',
|
||||
'nginx': '1.23.3',
|
||||
'mariadb': '10.11.2',
|
||||
'postgres': '15.2',
|
||||
'mysql': '8.0.32',
|
||||
'selenoid/firefox': '110.0',
|
||||
'python': '3.11.1',
|
||||
}
|
||||
|
||||
|
||||
def configure_logger() -> Logger:
|
||||
try:
|
||||
from loguru import logger as loguru_logger
|
||||
|
||||
loguru_logger.remove()
|
||||
loguru_logger.add(
|
||||
sink=sys.stdout,
|
||||
colorize=True,
|
||||
level='DEBUG',
|
||||
format='<cyan>{time:DD.MM.YYYY HH:mm:ss}</cyan> | <level>{level}</level> | <magenta>{message}</magenta>',
|
||||
)
|
||||
return loguru_logger # type: ignore
|
||||
except ImportError:
|
||||
import logging
|
||||
|
||||
logging_logger = logging.getLogger('main_logger')
|
||||
formatter = logging.Formatter(
|
||||
datefmt='%Y.%m.%d %H:%M:%S',
|
||||
fmt='%(asctime)s | %(levelname)s | func name: %(funcName)s | message: %(message)s',
|
||||
)
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setFormatter(formatter)
|
||||
logging_logger.setLevel(logging.INFO)
|
||||
logging_logger.addHandler(handler)
|
||||
return logging_logger
|
||||
|
||||
|
||||
logger = configure_logger()
|
||||
|
||||
|
||||
class DockerHubScanner:
|
||||
|
||||
# bitwarden/server
|
||||
# https://hub.docker.com/v2/namespaces/bitwarden/repositories/server/tags?page=2
|
||||
|
||||
# caddy
|
||||
# https://registry.hub.docker.com/v2/repositories/library/caddy/tags?page=1
|
||||
|
||||
DOCKERHUB_REGISTRY_API = 'https://registry.hub.docker.com/v2/repositories/library'
|
||||
DOCKERHUB_API = 'https://hub.docker.com/v2/namespaces'
|
||||
|
||||
def _docker_hub_api_url(self, service_name: str) -> str:
|
||||
|
||||
if '/' in service_name:
|
||||
namespace, name = service_name.split('/')
|
||||
url = f'{self.DOCKERHUB_API}/{namespace}/repositories/{name}/tags'
|
||||
else:
|
||||
url = f'{self.DOCKERHUB_REGISTRY_API}/{service_name}/tags'
|
||||
return url
|
||||
|
||||
@staticmethod
|
||||
async def _async_request(client: AsyncClient, url: str) -> dict[str, Any] | None:
|
||||
|
||||
response = await client.get(url)
|
||||
status = response.status_code
|
||||
if status == httpx.codes.OK:
|
||||
return response.json()
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_next_page_and_tags_from_payload(payload: dict[str, Any]) -> tuple[str | None, list[str]]:
|
||||
next_page = payload['next']
|
||||
names = [release['name'] for release in payload['results']]
|
||||
return next_page, names
|
||||
|
||||
async def get_tags(self, service_name: str) -> dict[str, list[str]]:
|
||||
"""
|
||||
To make method really async it should be rewritten on pages not by get next page each time.
|
||||
Also, dockerhub protected from bruteforce requests.
|
||||
Better with getting next page each time
|
||||
"""
|
||||
|
||||
tags = []
|
||||
url = self._docker_hub_api_url(service_name)
|
||||
transport = AsyncHTTPTransport(retries=1)
|
||||
async with AsyncClient(transport=transport) as client:
|
||||
payload = await self._async_request(client=client, url=url)
|
||||
|
||||
if not payload:
|
||||
return {service_name: tags}
|
||||
|
||||
next_page, names = self._get_next_page_and_tags_from_payload(payload)
|
||||
|
||||
tags.extend(names)
|
||||
|
||||
while SERVICES[service_name] not in tags:
|
||||
payload = await self._async_request(client=client, url=next_page)
|
||||
next_page, names = self._get_next_page_and_tags_from_payload(payload)
|
||||
tags.extend(names)
|
||||
|
||||
# filter tags contains versions 1.18.3 and not contains letters 1.18.3-fpm-alpine. Sort by version number
|
||||
tags = sorted(
|
||||
list(filter(lambda t: re.search(r'\d+\.\d', t) and not re.search(r'[a-z]', t), tags)),
|
||||
reverse=True,
|
||||
key=parse_version,
|
||||
)
|
||||
|
||||
# Do not show older versions than current in tags
|
||||
tags = tags[:tags.index(SERVICES[service_name]) + 1]
|
||||
return {service_name: tags}
|
||||
|
||||
def get_data(self, service_name: str) -> dict[str, list[str]]:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
services_tags = loop.run_until_complete(self.get_tags(service_name))
|
||||
|
||||
return services_tags
|
||||
|
||||
def print_data(self, service_name: str) -> None:
|
||||
|
||||
data = self.get_data(service_name)
|
||||
print(
|
||||
f"Service: {colored(service_name, color='light_grey')}",
|
||||
f"\nTags: {colored(str(data[service_name]), color='magenta')}",
|
||||
f"\nCurrent version: {colored(SERVICES[service_name], color='cyan')}"
|
||||
)
|
||||
|
||||
if data[service_name][0] > SERVICES[service_name]:
|
||||
print(f"New version of {service_name}: {colored(data[service_name][0], color='yellow')}")
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
print('Services'.center(50, '-'), '\n')
|
||||
|
||||
dockerhub_scanner = DockerHubScanner()
|
||||
processes = []
|
||||
|
||||
for service in SERVICES:
|
||||
process = Process(target=dockerhub_scanner.print_data, kwargs={'service_name': service})
|
||||
processes.append(process)
|
||||
process.start()
|
||||
|
||||
for process in processes:
|
||||
process.join()
|
@ -1,4 +0,0 @@
|
||||
import requests
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
@ -1,27 +0,0 @@
|
||||
import importlib.util
|
||||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
current_dir = Path(__file__).parent.parent
|
||||
|
||||
# use loguru if it is possible for color output
|
||||
if importlib.util.find_spec('loguru') is not None:
|
||||
from loguru import logger
|
||||
logger.remove()
|
||||
logger.add(sink=sys.stdout, colorize=True, level='DEBUG',
|
||||
format="<cyan>{time:DD.MM.YYYY HH:mm:ss}</cyan> | <level>{level}</level> | "
|
||||
"<magenta>{message}</magenta>")
|
||||
|
||||
# use standard logging
|
||||
else:
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(logging.INFO)
|
||||
log_formatter = logging.Formatter("%(asctime)s | %(levelname)s | %(message)s")
|
||||
console_handler.setFormatter(log_formatter)
|
||||
|
||||
logger.addHandler(console_handler)
|
@ -1,3 +0,0 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
git pull --all
|
@ -1,51 +0,0 @@
|
||||
import json # noqa # pylint: disable=unused-import
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
import requests
|
||||
|
||||
from get_project_core.settings import current_dir, logger
|
||||
|
||||
GITLAB_TOKEN = ''
|
||||
|
||||
headers = {'PRIVATE-TOKEN': GITLAB_TOKEN}
|
||||
|
||||
|
||||
def create_repositories(group_id: int):
|
||||
"""
|
||||
Create submodules from gitlab group
|
||||
|
||||
:param group_id: Can be find under group name
|
||||
"""
|
||||
request = requests.get(f'https://scm.x5.ru/api/v4/groups/{group_id}/projects', headers=headers, verify=False)
|
||||
# logger.info(f'{json.dumps(request.json(), indent=4, separators=(",", ":"))}')
|
||||
|
||||
repos = request.json()
|
||||
|
||||
for repo in repos:
|
||||
name = str(repo.get("ssh_url_to_repo", None)).strip()
|
||||
subprocess.Popen(['git', 'submodule', 'add', name])
|
||||
logger.info(f'Created: {name}')
|
||||
time.sleep(15)
|
||||
|
||||
|
||||
def update_submodules():
|
||||
"""
|
||||
Update all submodules
|
||||
|
||||
"""
|
||||
subprocess.Popen(['git', 'submodule', 'foreach', f'{current_dir}/get-project-core/update-repos.sh'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = sys.argv[1:]
|
||||
try:
|
||||
group = args[0]
|
||||
logger.info(group)
|
||||
create_repositories(group_id=int(group))
|
||||
update_submodules()
|
||||
except IndexError:
|
||||
logger.error('Gitlab group id must be set')
|
||||
except ValueError:
|
||||
logger.error('Gitlab group id must be integer')
|
@ -1 +0,0 @@
|
||||
requests
|
200
goodgame.py
Normal file
200
goodgame.py
Normal file
@ -0,0 +1,200 @@
|
||||
import asyncio
|
||||
import sys
|
||||
import time
|
||||
from logging import Logger
|
||||
from multiprocessing import Process
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
import requests
|
||||
|
||||
|
||||
def configure_logger() -> Logger:
|
||||
try:
|
||||
from loguru import logger as loguru_logger
|
||||
|
||||
loguru_logger.remove()
|
||||
loguru_logger.add(
|
||||
sink=sys.stdout,
|
||||
colorize=True,
|
||||
level='DEBUG',
|
||||
format="<cyan>{time:DD.MM.YYYY HH:mm:ss}</cyan> | <level>{level}</level> | <magenta>{message}</magenta>",
|
||||
)
|
||||
return loguru_logger # type: ignore
|
||||
except ImportError:
|
||||
import logging
|
||||
|
||||
logging_logger = logging.getLogger('main_logger')
|
||||
formatter = logging.Formatter(
|
||||
datefmt="%Y.%m.%d %H:%M:%S",
|
||||
fmt='%(asctime)s | %(levelname)s | func name: %(funcName)s | message: %(message)s',
|
||||
)
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setFormatter(formatter)
|
||||
logging_logger.setLevel(logging.INFO)
|
||||
logging_logger.addHandler(handler)
|
||||
return logging_logger
|
||||
|
||||
|
||||
logger = configure_logger()
|
||||
|
||||
|
||||
class GoodGame:
|
||||
BASE_URL = 'https://goodgame.ru/api/4/streams'
|
||||
PAGES_FOR_ASYNC_SCAN = 25
|
||||
CURRENT_WATCHERS_FILTER = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.all_streams: dict[int, dict[str, Any]] = dict()
|
||||
|
||||
@staticmethod
|
||||
def _show_time_and_result(message: str) -> Any:
|
||||
def wrapper(func: Any) -> Any:
|
||||
def new_func(*args: Any, **kwargs: Any) -> None:
|
||||
begin = time.time()
|
||||
result = func(*args, **kwargs)
|
||||
end = time.time()
|
||||
logger.info(f'{message} execution time, sec: {round(end - begin, 2)}')
|
||||
print(result)
|
||||
|
||||
return new_func
|
||||
|
||||
return wrapper
|
||||
|
||||
def get_last_page_number(self) -> int:
|
||||
"""
|
||||
Deprecated
|
||||
"""
|
||||
last_page = 1
|
||||
for page in range(20, 0, -1):
|
||||
response = requests.get(f'{self.BASE_URL}?page={page}')
|
||||
if response.json()["streams"]:
|
||||
last_page = page
|
||||
break
|
||||
return last_page
|
||||
|
||||
def get_max_current_viewers_count(self) -> int | None:
|
||||
"""
|
||||
Deprecated
|
||||
"""
|
||||
response = requests.get(f'{self.BASE_URL}?page=1')
|
||||
max_current_viewers = response.json()['streams'][0].get('viewers', None)
|
||||
return max_current_viewers
|
||||
|
||||
def _sort_trim_dict(self, data: dict[str, int]) -> dict[str, int]:
|
||||
sorted_data = dict(sorted(data.items(), key=lambda x: x[1], reverse=True))
|
||||
new_data = {
|
||||
stream: viewers_count
|
||||
for stream, viewers_count in sorted_data.items()
|
||||
if int(viewers_count) >= self.CURRENT_WATCHERS_FILTER
|
||||
}
|
||||
return new_data
|
||||
|
||||
def __count_streams_with_watchers(self, current_watchers: list[int]) -> int:
|
||||
return len(
|
||||
list(
|
||||
filter(
|
||||
lambda stream: stream['viewers'] in current_watchers,
|
||||
self.all_streams.values(),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
def __prepare_result(self, max_current_viewers: int) -> str:
|
||||
total_viewers: dict[str, int] = dict()
|
||||
for stream in self.all_streams.values():
|
||||
if (
|
||||
max_current_viewers
|
||||
and int(stream.get('viewers', 0)) <= max_current_viewers
|
||||
):
|
||||
total_viewers[
|
||||
f'{stream["streamer"]["username"]} [{stream["game"]["url"]}]'
|
||||
] = int(stream['viewers'])
|
||||
watchers_0 = self.__count_streams_with_watchers(current_watchers=[0])
|
||||
watchers_1 = self.__count_streams_with_watchers(current_watchers=[1])
|
||||
minimal_watchers = self.__count_streams_with_watchers(current_watchers=[0, 1])
|
||||
return (
|
||||
f'Total streams: {len(self.all_streams)} -> '
|
||||
f'with minimal watchers {round(minimal_watchers / len(self.all_streams) * 100)}%\n'
|
||||
f'Total streams with 0 viewers: {watchers_0} -> {round(watchers_0/len(self.all_streams) * 100)}%\n'
|
||||
f'Total streams with 1 viewer: {watchers_1} -> {round(watchers_1/len(self.all_streams) * 100)}%\n'
|
||||
f'Total viewers: {sum(total_viewers.values())}\n'
|
||||
f'Streams: {self._sort_trim_dict(total_viewers)}\n'
|
||||
f'{"-"*76}'
|
||||
)
|
||||
|
||||
async def _async_request(self, session: aiohttp.ClientSession, url: str) -> None:
|
||||
async with asyncio.Semaphore(500):
|
||||
counter = 0
|
||||
while True:
|
||||
try:
|
||||
counter += 1
|
||||
resp = await session.get(url)
|
||||
async with resp:
|
||||
if resp.status == 200:
|
||||
data = await resp.json()
|
||||
for stream in data['streams']:
|
||||
self.all_streams.update({stream['id']: stream})
|
||||
return data['streams']
|
||||
except Exception as connection_error:
|
||||
if counter < 5:
|
||||
await asyncio.sleep(10)
|
||||
else:
|
||||
raise connection_error
|
||||
|
||||
async def _async_data_scrapper(self) -> int:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
|
||||
streams = await asyncio.gather(
|
||||
*[
|
||||
self._async_request(session, f'{self.BASE_URL}?page={page}')
|
||||
for page in range(1, self.PAGES_FOR_ASYNC_SCAN + 1)
|
||||
],
|
||||
return_exceptions=True,
|
||||
)
|
||||
max_current_viewers = streams[0][0]['viewers']
|
||||
return max_current_viewers
|
||||
|
||||
@_show_time_and_result(message='Async counter')
|
||||
def async_counter(self) -> str:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
max_current_viewers = loop.run_until_complete(self._async_data_scrapper())
|
||||
return self.__prepare_result(max_current_viewers)
|
||||
|
||||
@_show_time_and_result(message='Sync counter')
|
||||
def sync_counter(self) -> str:
|
||||
page = 1
|
||||
|
||||
resp = requests.get(f'{self.BASE_URL}?page={page}')
|
||||
streams = resp.json()['streams']
|
||||
for stream in streams:
|
||||
self.all_streams.update({stream['id']: stream})
|
||||
max_current_viewers = streams[0]['viewers']
|
||||
while streams:
|
||||
page += 1
|
||||
resp = requests.get(f'{self.BASE_URL}?page={page}')
|
||||
streams = resp.json()['streams']
|
||||
for stream in streams:
|
||||
self.all_streams.update({stream['id']: stream})
|
||||
return self.__prepare_result(max_current_viewers)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print("-" * 76)
|
||||
good_game = GoodGame()
|
||||
start = time.time()
|
||||
async_process = Process(
|
||||
target=good_game.async_counter, args=(), kwargs={}, name='async_process'
|
||||
)
|
||||
sync_process = Process(
|
||||
target=good_game.sync_counter, args=(), kwargs={}, name='sync_process'
|
||||
)
|
||||
|
||||
async_process.start()
|
||||
sync_process.start()
|
||||
|
||||
async_process.join()
|
||||
sync_process.join()
|
||||
stop = time.time()
|
||||
logger.info(f'End all processes. Execution time: {round(stop-start, 2)} seconds')
|
123
linked_list.py
Normal file
123
linked_list.py
Normal file
@ -0,0 +1,123 @@
|
||||
# Python3 program to merge sort of linked list
|
||||
|
||||
# create Node using class Node.
|
||||
class Node:
|
||||
def __init__(self, data):
|
||||
self.data = data
|
||||
self.next = None
|
||||
|
||||
def __repr__(self):
|
||||
return f'{self.data}'
|
||||
|
||||
|
||||
class LinkedList:
|
||||
def __init__(self):
|
||||
self.head = None
|
||||
|
||||
# push new value to linked list
|
||||
# using append method
|
||||
def append(self, new_value):
|
||||
|
||||
# Allocate new node
|
||||
new_node = Node(new_value)
|
||||
|
||||
# if head is None, initialize it to new node
|
||||
if self.head is None:
|
||||
self.head = new_node
|
||||
return
|
||||
curr_node = self.head
|
||||
while curr_node.next is not None:
|
||||
curr_node = curr_node.next
|
||||
|
||||
# Append the new node at the end
|
||||
# of the linked list
|
||||
curr_node.next = new_node
|
||||
|
||||
def sorted_merge(self, node_a, node_b):
|
||||
|
||||
# Base cases
|
||||
if node_a is None:
|
||||
return node_b
|
||||
if node_b is None:
|
||||
return node_a
|
||||
|
||||
# pick either a or b and recur..
|
||||
if node_a.data <= node_b.data:
|
||||
result = node_a
|
||||
result.next = self.sorted_merge(node_a.next, node_b)
|
||||
else:
|
||||
result = node_b
|
||||
result.next = self.sorted_merge(node_a, node_b.next)
|
||||
return result
|
||||
|
||||
def merge_sort(self, head):
|
||||
|
||||
# Base case if head is None
|
||||
if head is None or head.next is None:
|
||||
return head
|
||||
|
||||
# get the middle of the list
|
||||
middle = self.get_middle(head)
|
||||
next_to_middle = middle.next
|
||||
|
||||
# set the next of middle node to None
|
||||
middle.next = None
|
||||
|
||||
# Apply mergeSort on left list
|
||||
left = self.merge_sort(head)
|
||||
|
||||
# Apply mergeSort on right list
|
||||
right = self.merge_sort(next_to_middle)
|
||||
|
||||
# Merge the left and right lists
|
||||
sorted_list = self.sorted_merge(left, right)
|
||||
return sorted_list
|
||||
|
||||
# Utility function to get the middle
|
||||
# of the linked list
|
||||
@staticmethod
|
||||
def get_middle(head):
|
||||
if head is None:
|
||||
return head
|
||||
|
||||
slow = head
|
||||
fast = head
|
||||
|
||||
while fast.next is not None and fast.next.next is not None:
|
||||
slow = slow.next
|
||||
fast = fast.next.next
|
||||
|
||||
return slow
|
||||
|
||||
def __repr__(self):
|
||||
# Utility function to print the linked list
|
||||
represent = ''
|
||||
if self.head is None:
|
||||
print(' ')
|
||||
return
|
||||
curr_node = self.head
|
||||
while curr_node:
|
||||
represent += f'{curr_node.data} -> '
|
||||
curr_node = curr_node.next
|
||||
return represent[:-4]
|
||||
|
||||
|
||||
# Driver Code
|
||||
if __name__ == '__main__':
|
||||
li = LinkedList()
|
||||
|
||||
li.append(15)
|
||||
li.append(10)
|
||||
li.append(5)
|
||||
li.append(20)
|
||||
li.append(3)
|
||||
li.append(2)
|
||||
|
||||
print(li)
|
||||
|
||||
# Apply merge Sort
|
||||
li.head = li.merge_sort(li.head)
|
||||
print("Sorted Linked List is:")
|
||||
print(li)
|
||||
|
||||
|
264
requiremetns.txt
264
requiremetns.txt
@ -1,159 +1,177 @@
|
||||
aiohttp==3.8.1
|
||||
aiosignal==1.2.0
|
||||
alembic==1.7.6
|
||||
altgraph==0.17.2
|
||||
anyio==3.5.0
|
||||
arrow==1.2.2
|
||||
asgiref==3.5.0
|
||||
asttokens==2.0.5
|
||||
aiohttp==3.8.4
|
||||
aiosignal==1.3.1
|
||||
alembic==1.9.4
|
||||
altgraph==0.17.3
|
||||
amqp==5.1.1
|
||||
anyio==3.6.2
|
||||
arrow==1.2.3
|
||||
asgiref==3.6.0
|
||||
asttokens==2.2.1
|
||||
async-generator==1.10
|
||||
async-timeout==4.0.2
|
||||
attrs==21.4.0
|
||||
Babel==2.9.1
|
||||
attrs==22.2.0
|
||||
backcall==0.2.0
|
||||
backports.entry-points-selectable==1.1.1
|
||||
bcrypt==3.2.0
|
||||
bidict==0.21.4
|
||||
bcrypt==4.0.1
|
||||
billiard==3.6.4.0
|
||||
binaryornot==0.4.4
|
||||
black==22.1.0
|
||||
blinker==1.4
|
||||
Brotli==1.0.9
|
||||
CacheControl==0.12.10
|
||||
black==22.12.0
|
||||
CacheControl==0.12.11
|
||||
cachy==0.3.0
|
||||
certifi==2021.10.8
|
||||
cffi==1.15.0
|
||||
chardet==4.0.0
|
||||
charset-normalizer==2.0.12
|
||||
cleo==0.8.1
|
||||
click==8.0.4
|
||||
celery==5.2.7
|
||||
certifi==2022.12.7
|
||||
cffi==1.15.1
|
||||
cfgv==3.3.1
|
||||
chardet==5.1.0
|
||||
charset-normalizer==3.0.1
|
||||
cleo==2.0.1
|
||||
click==8.1.3
|
||||
click-didyoumean==0.3.0
|
||||
click-plugins==1.1.1
|
||||
click-repl==0.2.0
|
||||
clikit==0.6.2
|
||||
cookiecutter==1.7.3
|
||||
coverage==6.3.2
|
||||
crashtest==0.3.1
|
||||
cryptography==36.0.1
|
||||
cookiecutter==2.1.1
|
||||
coverage==7.2.1
|
||||
crashtest==0.4.1
|
||||
cryptography==39.0.0
|
||||
cyclonedx-python-lib==3.1.5
|
||||
decorator==5.1.1
|
||||
distlib==0.3.4
|
||||
Django==4.0.3
|
||||
dnspython==2.2.0
|
||||
email-validator==1.1.3
|
||||
executing==0.8.3
|
||||
fastapi==0.74.1
|
||||
filelock==3.6.0
|
||||
Flask==2.0.3
|
||||
Flask-Login==0.5.0
|
||||
Flask-Principal==0.4.0
|
||||
Flask-SQLAlchemy==2.5.1
|
||||
Flask-WTF==1.0.0
|
||||
frozenlist==1.3.0
|
||||
greenlet==1.1.2
|
||||
h11==0.13.0
|
||||
distlib==0.3.6
|
||||
Django==4.1.7
|
||||
dparse==0.6.2
|
||||
dulwich==0.20.50
|
||||
executing==1.2.0
|
||||
factory-boy==3.2.1
|
||||
Faker==16.9.0
|
||||
fastapi==0.89.1
|
||||
filelock==3.9.0
|
||||
flake8==6.0.0
|
||||
frozenlist==1.3.3
|
||||
greenlet==2.0.2
|
||||
gunicorn==20.1.0
|
||||
h11==0.14.0
|
||||
html5lib==1.1
|
||||
idna==3.3
|
||||
importlib-metadata==4.11.2
|
||||
iniconfig==1.1.1
|
||||
ipython==8.1.0
|
||||
itsdangerous==2.1.0
|
||||
jedi==0.18.1
|
||||
jeepney==0.7.1
|
||||
Jinja2==3.0.3
|
||||
httpcore==0.16.3
|
||||
httpx==0.23.3
|
||||
identify==2.5.18
|
||||
idna==3.4
|
||||
importlib-metadata==6.0.0
|
||||
iniconfig==2.0.0
|
||||
ipython==8.11.0
|
||||
jaraco.classes==3.2.3
|
||||
jedi==0.18.2
|
||||
jeepney==0.8.0
|
||||
Jinja2==3.1.2
|
||||
jinja2-time==0.2.0
|
||||
keyring==23.5.0
|
||||
jsonschema==4.17.3
|
||||
keyring==23.13.1
|
||||
kombu==5.2.4
|
||||
lockfile==0.12.2
|
||||
loguru==0.6.0
|
||||
Mako==1.1.6
|
||||
MarkupSafe==2.1.0
|
||||
matplotlib-inline==0.1.3
|
||||
MouseInfo==0.1.3
|
||||
msgpack==1.0.3
|
||||
multidict==6.0.2
|
||||
mypy==0.931
|
||||
mypy-extensions==0.4.3
|
||||
outcome==1.1.0
|
||||
Mako==1.2.4
|
||||
markdown-it-py==2.1.0
|
||||
MarkupSafe==2.1.2
|
||||
matplotlib-inline==0.1.6
|
||||
mccabe==0.7.0
|
||||
mdurl==0.1.2
|
||||
more-itertools==9.0.0
|
||||
MouseInfo==0.1.0
|
||||
msgpack==1.0.4
|
||||
multidict==6.0.4
|
||||
mypy==0.991
|
||||
mypy-extensions==1.0.0
|
||||
nodeenv==1.7.0
|
||||
numpy==1.24.2
|
||||
orjson==3.8.7
|
||||
outcome==1.2.0
|
||||
packageurl-python==0.10.4
|
||||
packaging==21.3
|
||||
paramiko==2.9.2
|
||||
parso==0.8.3
|
||||
passlib==1.7.4
|
||||
pastel==0.2.1
|
||||
pathspec==0.9.0
|
||||
pathspec==0.11.0
|
||||
pexpect==4.8.0
|
||||
pickleshare==0.7.5
|
||||
Pillow==9.0.1
|
||||
pkginfo==1.8.2
|
||||
platformdirs==2.5.1
|
||||
Pillow==9.4.0
|
||||
pip-api==0.0.30
|
||||
pip-requirements-parser==32.0.1
|
||||
pip_audit==2.4.14
|
||||
pkginfo==1.9.6
|
||||
platformdirs==3.0.0
|
||||
pluggy==1.0.0
|
||||
poetry==1.1.13
|
||||
poetry-core==1.0.8
|
||||
poyo==0.5.0
|
||||
prompt-toolkit==3.0.28
|
||||
psycopg2-binary==2.9.3
|
||||
poetry==1.3.2
|
||||
poetry-core==1.4.0
|
||||
poetry-plugin-export==1.2.0
|
||||
pre-commit==2.21.0
|
||||
prompt-toolkit==3.0.38
|
||||
psycopg2-binary==2.9.5
|
||||
ptyprocess==0.7.0
|
||||
pure-eval==0.2.2
|
||||
py==1.11.0
|
||||
pyasn1==0.4.8
|
||||
PyAutoGUI==0.9.53
|
||||
pycodestyle==2.10.0
|
||||
pycparser==2.21
|
||||
pydantic==1.9.0
|
||||
pydantic==1.10.5
|
||||
pyflakes==3.0.1
|
||||
PyGetWindow==0.0.9
|
||||
Pygments==2.11.2
|
||||
pyinstaller==4.9
|
||||
pyinstaller-hooks-contrib==2022.2
|
||||
Pygments==2.14.0
|
||||
pyinstaller==5.8.0
|
||||
pyinstaller-hooks-contrib==2023.0
|
||||
pylev==1.4.0
|
||||
PyMsgBox==1.0.9
|
||||
PyNaCl==1.5.0
|
||||
pyOpenSSL==22.0.0
|
||||
pyparsing==3.0.7
|
||||
pyparsing==3.0.9
|
||||
pyperclip==1.8.2
|
||||
PyQt6==6.2.3
|
||||
PyQt6-Qt6==6.2.3
|
||||
PyQt6-sip==13.2.1
|
||||
PyRect==0.1.4
|
||||
PyQt6==6.4.2
|
||||
PyQt6-Qt6==6.4.2
|
||||
PyQt6-sip==13.4.1
|
||||
PyRect==0.2.0
|
||||
pyrsistent==0.19.3
|
||||
PyScreeze==0.1.28
|
||||
PySocks==1.7.1
|
||||
pytest==7.0.1
|
||||
pytest-cov==3.0.0
|
||||
pytest==7.2.1
|
||||
pytest-cov==4.0.0
|
||||
python-dateutil==2.8.2
|
||||
python-decouple==3.6
|
||||
python-dotenv==0.19.2
|
||||
python-engineio==4.3.1
|
||||
python-slugify==6.1.1
|
||||
python-socketio==5.5.2
|
||||
python-decouple==3.8
|
||||
python-slugify==8.0.1
|
||||
python3-xlib==0.15
|
||||
pytweening==1.0.4
|
||||
pytz==2021.3
|
||||
qt6-applications==6.1.0.2.2
|
||||
qt6-tools==6.1.0.1.2
|
||||
requests==2.27.1
|
||||
pytz==2022.7.1
|
||||
PyYAML==6.0
|
||||
rapidfuzz==2.13.7
|
||||
redis==4.5.1
|
||||
requests==2.28.2
|
||||
requests-toolbelt==0.9.1
|
||||
SecretStorage==3.3.1
|
||||
selenium==4.1.2
|
||||
shellingham==1.4.0
|
||||
simplejson==3.17.6
|
||||
resolvelib==0.9.0
|
||||
rfc3986==1.5.0
|
||||
rich==13.3.1
|
||||
ruamel.yaml==0.17.21
|
||||
safety==2.3.5
|
||||
SecretStorage==3.3.3
|
||||
selenium==4.8.2
|
||||
shellingham==1.5.0.post1
|
||||
simple-term-menu==1.6.1
|
||||
six==1.16.0
|
||||
sniffio==1.2.0
|
||||
sniffio==1.3.0
|
||||
sortedcontainers==2.4.0
|
||||
speaklater==1.3
|
||||
speaklater3==1.4
|
||||
SQLAlchemy==1.4.31
|
||||
sqlparse==0.4.2
|
||||
sshtunnel==0.4.0
|
||||
stack-data==0.2.0
|
||||
starlette==0.18.0
|
||||
style==1.1.6
|
||||
SQLAlchemy==1.4.46
|
||||
SQLAlchemy-Utils==0.38.3
|
||||
sqlparse==0.4.3
|
||||
stack-data==0.6.2
|
||||
starlette==0.22.0
|
||||
termcolor==2.2.0
|
||||
text-unidecode==1.3
|
||||
tomli==2.0.1
|
||||
tomlkit==0.10.0
|
||||
traitlets==5.1.1
|
||||
trio==0.20.0
|
||||
toml==0.10.2
|
||||
tomlkit==0.11.6
|
||||
traitlets==5.9.0
|
||||
trio==0.22.0
|
||||
trio-websocket==0.9.2
|
||||
typing_extensions==4.1.1
|
||||
ua-parser==0.10.0
|
||||
urllib3==1.26.8
|
||||
user-agents==2.2.0
|
||||
virtualenv==20.13.2
|
||||
wcwidth==0.2.5
|
||||
trove-classifiers==2023.1.20
|
||||
typing_extensions==4.5.0
|
||||
urllib3==1.26.14
|
||||
uvicorn==0.20.0
|
||||
validators==0.20.0
|
||||
vine==5.0.0
|
||||
virtualenv==20.20.0
|
||||
wcwidth==0.2.6
|
||||
webencodings==0.5.1
|
||||
Werkzeug==2.0.3
|
||||
wsproto==1.1.0
|
||||
WTForms==3.0.1
|
||||
yarl==1.7.2
|
||||
zipp==3.7.0
|
||||
wget==3.2
|
||||
wsproto==1.2.0
|
||||
yarl==1.8.2
|
||||
zipp==3.15.0
|
72
snake.py
Normal file
72
snake.py
Normal file
@ -0,0 +1,72 @@
|
||||
import math
|
||||
from itertools import cycle
|
||||
|
||||
|
||||
class Snake:
|
||||
|
||||
def __init__(self):
|
||||
self.x = 0
|
||||
self.y = 0
|
||||
self.move = self.move_right
|
||||
|
||||
def move_right(self) -> None:
|
||||
self.x += 1
|
||||
|
||||
def move_left(self) -> None:
|
||||
self.x -= 1
|
||||
|
||||
def move_down(self) -> None:
|
||||
self.y += 1
|
||||
|
||||
def move_up(self) -> None:
|
||||
self.y -= 1
|
||||
|
||||
def move_direction(self) -> cycle:
|
||||
return cycle([self.move_right, self.move_down, self.move_left, self.move_up])
|
||||
|
||||
def move_back(self) -> None:
|
||||
match self.move:
|
||||
case self.move_right:
|
||||
self.x -= 1
|
||||
case self.move_left:
|
||||
self.x += 1
|
||||
case self.move_down:
|
||||
self.y -= 1
|
||||
case self.move_up:
|
||||
self.y += 1
|
||||
|
||||
def get_current_element_or_none(self, board: dict[int, list[str]]) -> str | None:
|
||||
try:
|
||||
return board.get(self.y)[self.x]
|
||||
except IndexError:
|
||||
return None
|
||||
except TypeError:
|
||||
return None
|
||||
|
||||
|
||||
def snake(n: int) -> None:
|
||||
board: dict[int, list[str]] = {row: ['0' for column in range(n)] for row in range(n)}
|
||||
|
||||
python = Snake()
|
||||
|
||||
move_direction = python.move_direction()
|
||||
next(move_direction)
|
||||
python.move_back() # get on -1 position. And next move wil be on zero position
|
||||
|
||||
for number in range(n ** 2):
|
||||
python.move()
|
||||
element = python.get_current_element_or_none(board)
|
||||
|
||||
if not element or element != '0':
|
||||
python.move_back()
|
||||
python.move = next(move_direction)
|
||||
python.move()
|
||||
|
||||
board[python.y][python.x] = f'{number + 1}'.rjust(int(math.log10(n**2)) + 1, ' ')
|
||||
|
||||
for line in board.values():
|
||||
print(*line)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
snake(7)
|
@ -1,3 +1,7 @@
|
||||
### Python template
|
||||
|
||||
.idea/
|
||||
.vscode/
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
@ -20,6 +24,7 @@ parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
@ -38,14 +43,17 @@ pip-delete-this-directory.txt
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
@ -55,6 +63,8 @@ coverage.xml
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
*.db
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
@ -67,16 +77,34 @@ instance/
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# celery beat schedule file
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
@ -102,6 +130,18 @@ venv.bak/
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
.idea/
|
||||
.vscode/
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# my staff
|
||||
delete/
|
||||
delete.py
|
103
sqlalchemy_study/README.md
Normal file
103
sqlalchemy_study/README.md
Normal file
@ -0,0 +1,103 @@
|
||||
# SQLALCHEMY STUDY
|
||||
|
||||
---
|
||||
|
||||
*Note: MySQL will start on 3307 port*
|
||||
|
||||
*Note: Postgres will start on 5433 port*
|
||||
|
||||
---
|
||||
|
||||
## Create environment:
|
||||
|
||||
```bash
|
||||
cp ./src/config/.env.template ./src/config/.env
|
||||
```
|
||||
|
||||
*Note: Change USE_DATABASE variable to 'mysql' for MySQL training or 'postgres' for Postgres use.*
|
||||
|
||||
*Default is MySQL*
|
||||
|
||||
## Run without app in docker:
|
||||
|
||||
Requires python > 3.11 and poetry 1.3.1
|
||||
|
||||
- **install poetry dependencies:**
|
||||
```bash
|
||||
poetry install
|
||||
poetry shell
|
||||
```
|
||||
|
||||
- **run for mysql:** ```docker-compose -f docker-compose.mysql.yaml up```
|
||||
|
||||
- **run for postgres:** ```docker-compose -f docker-compose.postgres.yaml up```
|
||||
|
||||
- **run initial data:** ```python ./src/data/fill_data.py```
|
||||
|
||||
## Run all in docker:
|
||||
|
||||
**run for mysql:**
|
||||
```bash
|
||||
docker-compose -f docker-compose.mysql.yaml -f docker-compose.docker.yaml up
|
||||
```
|
||||
**run for postgres:**
|
||||
```bash
|
||||
docker-compose -f docker-compose.postgres.yaml -f docker-compose.docker.yaml up
|
||||
```
|
||||
*Note: docker will start all migrations automatically. You don't need creation data step*
|
||||
|
||||
## Help info:
|
||||
|
||||
### Create alembic migrations:
|
||||
|
||||
*Note: To generate migrations you should run:*
|
||||
```bash
|
||||
# For automatic change detection.
|
||||
alembic revision --autogenerate -m "migration message"
|
||||
|
||||
# For empty file generation.
|
||||
alembic revision
|
||||
```
|
||||
|
||||
*Note: If you want to migrate your database, you should run following commands:*
|
||||
```bash
|
||||
# To run all migrations untill the migration with revision_id.
|
||||
alembic upgrade "<revision_id>"
|
||||
|
||||
# To perform all pending migrations.
|
||||
alembic upgrade "head"
|
||||
```
|
||||
|
||||
### Reverting alembic migrations:
|
||||
|
||||
*Note: If you want to revert migrations, you should run:*
|
||||
```bash
|
||||
# revert all migrations up to: revision_id.
|
||||
alembic downgrade <revision_id>
|
||||
|
||||
# Revert everything.
|
||||
alembic downgrade base
|
||||
|
||||
# Revert N revisions.
|
||||
alembic downgrade -2
|
||||
```
|
||||
|
||||
### MySQL database access:
|
||||
|
||||
Postgres:
|
||||
```bash
|
||||
docker exec -it sqlalchemy_study_db psql -d sqlalchemy_study -U balsh
|
||||
```
|
||||
|
||||
- show help ```\?```
|
||||
- show all tables: ```\dt```
|
||||
- describe table ```\d {table name}```
|
||||
|
||||
|
||||
|
||||
## Clean database
|
||||
```bash
|
||||
docker-compose -f docker-compose.mysql.yaml down -v
|
||||
```
|
||||
|
||||
## Known issues:
|
39
sqlalchemy_study/docker-compose.docker.yaml
Normal file
39
sqlalchemy_study/docker-compose.docker.yaml
Normal file
@ -0,0 +1,39 @@
|
||||
version: '3.9'
|
||||
|
||||
networks:
|
||||
sqlalchemy_study_network:
|
||||
name: "sqlalchemy_study_network"
|
||||
ipam:
|
||||
config:
|
||||
- subnet: 200.20.0.0/24
|
||||
|
||||
|
||||
services:
|
||||
db:
|
||||
networks:
|
||||
sqlalchemy_study_network:
|
||||
ipv4_address: 200.20.0.12
|
||||
|
||||
app:
|
||||
container_name: "sqlalchemy_study_app"
|
||||
image: "sqlalchemy_study:latest"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./docker/Dockerfile
|
||||
args:
|
||||
USER: root
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
sqlalchemy_study_network:
|
||||
ipv4_address: 200.20.0.10
|
||||
env_file: ./src/config/.env
|
||||
environment:
|
||||
DB_HOST: db
|
||||
depends_on:
|
||||
- db
|
||||
command: >
|
||||
bash -c "/app/scripts/docker-entrypoint.sh
|
||||
&& /app/scripts/alembic-init-migrate.sh && python data/fill_data.py
|
||||
&& sleep infinity"
|
||||
volumes:
|
||||
- ./src:/app/src/
|
29
sqlalchemy_study/docker-compose.mysql.yaml
Normal file
29
sqlalchemy_study/docker-compose.mysql.yaml
Normal file
@ -0,0 +1,29 @@
|
||||
version: '3.9'
|
||||
|
||||
|
||||
volumes:
|
||||
sqlalchemy_study_db_data:
|
||||
name: "sqlalchemy_study_db_data"
|
||||
|
||||
services:
|
||||
|
||||
db:
|
||||
image: mysql:8.0.31
|
||||
platform: linux/amd64
|
||||
container_name: "sqlalchemy_study_db"
|
||||
hostname: 'db_host'
|
||||
volumes:
|
||||
- sqlalchemy_study_db_data:/var/lib/mysql
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file: ./src/config/.env
|
||||
environment:
|
||||
MYSQL_TCP_PORT: 3307
|
||||
restart: unless-stopped
|
||||
expose:
|
||||
- '3307'
|
||||
ports:
|
||||
- '3307:3307'
|
||||
security_opt:
|
||||
- seccomp:unconfined
|
||||
cap_add:
|
||||
- SYS_NICE # CAP_SYS_NICE
|
23
sqlalchemy_study/docker-compose.postgres.yaml
Normal file
23
sqlalchemy_study/docker-compose.postgres.yaml
Normal file
@ -0,0 +1,23 @@
|
||||
version: '3.9'
|
||||
|
||||
|
||||
volumes:
|
||||
sqlalchemy_study_db_data:
|
||||
name: "sqlalchemy_study_db_data"
|
||||
|
||||
services:
|
||||
|
||||
db:
|
||||
image: postgres:14.6
|
||||
container_name: "sqlalchemy_study_db"
|
||||
hostname: 'db_host'
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- sqlalchemy_study_db_data:/var/lib/postgresql/data
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file: ./src/config/.env
|
||||
expose:
|
||||
- '5433'
|
||||
ports:
|
||||
- '5433:5433'
|
||||
command: -p 5433
|
60
sqlalchemy_study/docker/Dockerfile
Normal file
60
sqlalchemy_study/docker/Dockerfile
Normal file
@ -0,0 +1,60 @@
|
||||
|
||||
FROM --platform=linux/amd64 python:3.11.1
|
||||
|
||||
ARG USER
|
||||
|
||||
ENV SOURCE_DIR=/app/src/
|
||||
|
||||
ENV USER=${USER} \
|
||||
PYTHONFAULTHANDLER=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PYTHONHASHSEED=random \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONPATH="${PYTHONPATH}:${SOURCE_DIR}" \
|
||||
# pip:
|
||||
PIP_NO_CACHE_DIR=off \
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=on \
|
||||
PIP_DEFAULT_TIMEOUT=100 \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
POETRY_CACHE_DIR='/var/cache/pypoetry' \
|
||||
PATH="$PATH:/root/.poetry/bin"
|
||||
|
||||
RUN printf "================\n\nStart build app. USER is: "${USER}"\n\n===============\n" \
|
||||
&& apt-get update \
|
||||
&& apt-get install --no-install-recommends -y \
|
||||
procps \
|
||||
bash \
|
||||
build-essential \
|
||||
curl \
|
||||
iputils-ping \
|
||||
gettext \
|
||||
git \
|
||||
libpq-dev \
|
||||
nano \
|
||||
sshpass \
|
||||
&& pip install --upgrade pip \
|
||||
# Installing `poetry` package manager:
|
||||
&& pip install poetry \
|
||||
# Cleaning cache:
|
||||
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
||||
&& apt-get clean -y && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR ${SOURCE_DIR}
|
||||
|
||||
RUN if [ "$USER" != "root" ]; then \
|
||||
groupadd -r "$USER" && useradd -d /home/"$USER" -r -g "$USER" "$USER" \
|
||||
&& chown "$USER":"$USER" -R /home/"$USER"; \
|
||||
fi
|
||||
|
||||
COPY --chown="$USER":"$USER" ./poetry.lock ./pyproject.toml ${SOURCE_DIR}
|
||||
|
||||
# Installing requirements
|
||||
RUN poetry install && rm -rf "$POETRY_CACHE_DIR"
|
||||
|
||||
COPY ./docker/scripts/ /app/scripts/
|
||||
RUN chmod +x /app/scripts/docker-entrypoint.sh /app/scripts/alembic-init-migrate.sh
|
||||
|
||||
USER "$USER"
|
||||
|
||||
# Copying actuall application
|
||||
COPY --chown="$USER":"$USER" . ${SOURCE_DIR}
|
16
sqlalchemy_study/docker/scripts/alembic-init-migrate.sh
Normal file
16
sqlalchemy_study/docker/scripts/alembic-init-migrate.sh
Normal file
@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
|
||||
alembic_init_migrations(){
|
||||
echo "Chosen database IS $USE_DATABASE"
|
||||
if [ "$USE_DATABASE" = "mysql" ];
|
||||
then
|
||||
echo "Start migrations for MySQL"
|
||||
alembic upgrade mysql_init_migrations;
|
||||
elif [ "$USE_DATABASE" = "postgres" ];
|
||||
then
|
||||
echo "Start migrations for Postgres"
|
||||
alembic upgrade postgres_init_migrations;
|
||||
fi
|
||||
}
|
||||
|
||||
alembic_init_migrations
|
26
sqlalchemy_study/docker/scripts/docker-entrypoint.sh
Executable file
26
sqlalchemy_study/docker/scripts/docker-entrypoint.sh
Executable file
@ -0,0 +1,26 @@
|
||||
#!/bin/bash
|
||||
|
||||
TIMEOUT=${TIMEOUT:-60}
|
||||
|
||||
DATABASE_HOST=${DB_HOST:-db_host}
|
||||
|
||||
POSTGRES_DATABASE_PORT=${POSTGRES_DB_PORT:-5432}
|
||||
POSTGRES_DATABASE="$DATABASE_HOST:$POSTGRES_DATABASE_PORT"
|
||||
|
||||
MYSQL_DATABASE_PORT=${MYSQL_DB_PORT:-3306}
|
||||
MYSQL_DATABASE="$DATABASE_HOST:$MYSQL_DATABASE_PORT"
|
||||
|
||||
wait_for_databases(){
|
||||
echo "Chosen database IS $USE_DATABASE"
|
||||
if [ "$USE_DATABASE" = "mysql" ];
|
||||
then
|
||||
echo "Waiting for DB on: $MYSQL_DATABASE"
|
||||
/app/scripts/wait-for-it.sh -t $TIMEOUT -s $MYSQL_DATABASE -- echo 'MySQL database connected';
|
||||
elif [ "$USE_DATABASE" = "postgres" ];
|
||||
then
|
||||
echo "Waiting for DB on: $POSTGRES_DATABASE"
|
||||
/app/scripts/wait-for-it.sh -t $TIMEOUT -s $POSTGRES_DATABASE -- echo 'Postgres database connected';
|
||||
fi
|
||||
}
|
||||
|
||||
wait_for_databases
|
182
sqlalchemy_study/docker/scripts/wait-for-it.sh
Executable file
182
sqlalchemy_study/docker/scripts/wait-for-it.sh
Executable file
@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env bash
|
||||
# Use this script to test if a given TCP host/port are available
|
||||
|
||||
WAITFORIT_cmdname=${0##*/}
|
||||
|
||||
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
||||
|
||||
usage()
|
||||
{
|
||||
cat << USAGE >&2
|
||||
Usage:
|
||||
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
USAGE
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for()
|
||||
{
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
else
|
||||
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
||||
fi
|
||||
WAITFORIT_start_ts=$(date +%s)
|
||||
while :
|
||||
do
|
||||
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
||||
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
||||
WAITFORIT_result=$?
|
||||
else
|
||||
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
||||
WAITFORIT_result=$?
|
||||
fi
|
||||
if [[ $WAITFORIT_result -eq 0 ]]; then
|
||||
WAITFORIT_end_ts=$(date +%s)
|
||||
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
return $WAITFORIT_result
|
||||
}
|
||||
|
||||
wait_for_wrapper()
|
||||
{
|
||||
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
||||
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
else
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
fi
|
||||
WAITFORIT_PID=$!
|
||||
trap "kill -INT -$WAITFORIT_PID" INT
|
||||
wait $WAITFORIT_PID
|
||||
WAITFORIT_RESULT=$?
|
||||
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
fi
|
||||
return $WAITFORIT_RESULT
|
||||
}
|
||||
|
||||
# process arguments
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case "$1" in
|
||||
*:* )
|
||||
WAITFORIT_hostport=(${1//:/ })
|
||||
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
||||
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
||||
shift 1
|
||||
;;
|
||||
--child)
|
||||
WAITFORIT_CHILD=1
|
||||
shift 1
|
||||
;;
|
||||
-q | --quiet)
|
||||
WAITFORIT_QUIET=1
|
||||
shift 1
|
||||
;;
|
||||
-s | --strict)
|
||||
WAITFORIT_STRICT=1
|
||||
shift 1
|
||||
;;
|
||||
-h)
|
||||
WAITFORIT_HOST="$2"
|
||||
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--host=*)
|
||||
WAITFORIT_HOST="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-p)
|
||||
WAITFORIT_PORT="$2"
|
||||
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--port=*)
|
||||
WAITFORIT_PORT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-t)
|
||||
WAITFORIT_TIMEOUT="$2"
|
||||
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--timeout=*)
|
||||
WAITFORIT_TIMEOUT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
WAITFORIT_CLI=("$@")
|
||||
break
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echoerr "Unknown argument: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
||||
echoerr "Error: you need to provide a host and port to test."
|
||||
usage
|
||||
fi
|
||||
|
||||
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
||||
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
||||
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
||||
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
||||
|
||||
# Check to see if timeout is from busybox?
|
||||
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
||||
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
||||
|
||||
WAITFORIT_BUSYTIMEFLAG=""
|
||||
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
||||
WAITFORIT_ISBUSY=1
|
||||
# Check if busybox timeout uses -t flag
|
||||
# (recent Alpine versions don't support -t anymore)
|
||||
if timeout &>/dev/stdout | grep -q -e '-t '; then
|
||||
WAITFORIT_BUSYTIMEFLAG="-t"
|
||||
fi
|
||||
else
|
||||
WAITFORIT_ISBUSY=0
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
exit $WAITFORIT_RESULT
|
||||
else
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
wait_for_wrapper
|
||||
WAITFORIT_RESULT=$?
|
||||
else
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CLI != "" ]]; then
|
||||
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
exec "${WAITFORIT_CLI[@]}"
|
||||
else
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
1104
sqlalchemy_study/poetry.lock
generated
Normal file
1104
sqlalchemy_study/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
28
sqlalchemy_study/pyproject.toml
Normal file
28
sqlalchemy_study/pyproject.toml
Normal file
@ -0,0 +1,28 @@
|
||||
[tool.poetry]
|
||||
name = "sqlalchemy_study_project"
|
||||
version = "1.0.1"
|
||||
description = "for study sqlalchemy async models"
|
||||
authors = ["Dmitry Afanasyev <Balshbox@gmail.com>"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.11"
|
||||
SQLAlchemy = "^1.4"
|
||||
SQLAlchemy-Utils = "^0.38.2"
|
||||
pydantic = {version = "^1.9.1", extras = ["email"]}
|
||||
factory-boy = "^3.2.1"
|
||||
Faker = "^15.0.0"
|
||||
loguru = "^0.6.0"
|
||||
alembic = "^1.8.0"
|
||||
python-dotenv = "^0.20.0"
|
||||
asyncpg = "^0.27.0"
|
||||
asyncmy = "^0.2.5"
|
||||
PyMySQL = "^1.0.2"
|
||||
cryptography = "^37.0.2"
|
||||
psycopg2-binary = "^2.9.3"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
ipython = "^8.4.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
43
sqlalchemy_study/src/alembic.ini
Normal file
43
sqlalchemy_study/src/alembic.ini
Normal file
@ -0,0 +1,43 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
file_template = %%(year)d-%%(month).2d-%%(day).2d-%%(hour).2d-%%(minute).2d_%%(rev)s
|
||||
prepend_sys_path = .
|
||||
output_encoding = utf-8
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
25
sqlalchemy_study/src/config/.env.template
Normal file
25
sqlalchemy_study/src/config/.env.template
Normal file
@ -0,0 +1,25 @@
|
||||
# --------------DATABASE-------------
|
||||
|
||||
# ==== DB provider ====: 'mysql' -> MySQL use | 'postgres' -> Postgres use
|
||||
|
||||
USE_DATABASE=mysql
|
||||
|
||||
# ==== DB common ====
|
||||
|
||||
DB_HOST=localhost
|
||||
DB_ECHO=True
|
||||
|
||||
# ==== Postgres ====
|
||||
|
||||
POSTGRES_DB_PORT=5433
|
||||
POSTGRES_DB=sqlalchemy_study
|
||||
POSTGRES_USER=user
|
||||
POSTGRES_PASSWORD=postgrespwd
|
||||
|
||||
# ==== MySQL ====
|
||||
|
||||
MYSQL_DB_PORT=3307
|
||||
MYSQL_ROOT_PASSWORD=mysqlpwd
|
||||
MYSQL_PASSWORD=mysqlpwd
|
||||
MYSQL_DATABASE=sqlalchemy_study
|
||||
MYSQL_USER=user
|
0
sqlalchemy_study/src/data/__init__.py
Normal file
0
sqlalchemy_study/src/data/__init__.py
Normal file
150
sqlalchemy_study/src/data/factories.py
Normal file
150
sqlalchemy_study/src/data/factories.py
Normal file
@ -0,0 +1,150 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
import factory
|
||||
from factory import fuzzy
|
||||
from faker import Faker
|
||||
|
||||
from db.dependencies import get_sync_db_session
|
||||
from db.models.coin import Coin, CoinType
|
||||
from db.models.department import Department, EmployeeDepartments
|
||||
from db.models.skills import Skill, EmployeesSkills
|
||||
from db.models.user import User, Employee
|
||||
|
||||
faker = Faker('ru_RU')
|
||||
|
||||
|
||||
Session = get_sync_db_session()
|
||||
|
||||
|
||||
class BaseModelFactory(factory.alchemy.SQLAlchemyModelFactory):
|
||||
class Meta:
|
||||
abstract = True
|
||||
sqlalchemy_session_persistence = 'commit'
|
||||
sqlalchemy_session = Session
|
||||
|
||||
|
||||
class UserFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
username = faker.profile(fields=['username'])['username']
|
||||
email = factory.Faker('email')
|
||||
hash_password = factory.Faker('password')
|
||||
auth_token = factory.Faker('uuid4')
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
sqlalchemy_get_or_create = (
|
||||
'username',
|
||||
)
|
||||
|
||||
|
||||
class CoinModelFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
name = factory.Faker('cryptocurrency_name')
|
||||
enabled = fuzzy.FuzzyChoice((0, 1))
|
||||
|
||||
class Meta:
|
||||
model = Coin
|
||||
sqlalchemy_get_or_create = (
|
||||
'name',
|
||||
)
|
||||
|
||||
@factory.post_generation
|
||||
def coin_type(obj, create: bool, extracted: Optional[Coin], *args, **kwargs) -> None:
|
||||
if create:
|
||||
CoinTypeFactory.create_batch(faker.random_int(min=3, max=7), coin_id=obj.id)
|
||||
|
||||
|
||||
class CoinTypeFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
name = factory.Faker('cryptocurrency_code')
|
||||
|
||||
class Meta:
|
||||
model = CoinType
|
||||
sqlalchemy_get_or_create = ('id',
|
||||
)
|
||||
|
||||
|
||||
class SkillFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
name = factory.Faker('job', locale='ru_ru')
|
||||
description = factory.Faker('text', max_nb_chars=160, locale='ru_RU')
|
||||
updated_at = factory.LazyFunction(datetime.now)
|
||||
|
||||
class Meta:
|
||||
model = Skill
|
||||
sqlalchemy_get_or_create = ('name',
|
||||
)
|
||||
|
||||
|
||||
class EmployeeFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
first_name = factory.Faker('first_name', locale='ru_RU')
|
||||
last_name = factory.Faker('last_name', locale='ru_RU')
|
||||
phone = factory.Faker('phone_number')
|
||||
description = factory.Faker('text', max_nb_chars=80, locale='ru_RU')
|
||||
coin_id = factory.Faker('random_int')
|
||||
|
||||
class Meta:
|
||||
model = Employee
|
||||
sqlalchemy_get_or_create = ('id',
|
||||
)
|
||||
|
||||
|
||||
class EmployeesSkillsFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
employee_id = factory.Faker('random_int')
|
||||
skill_id = factory.Faker('random_int')
|
||||
updated_at = factory.Faker(
|
||||
'date_time_between_dates', datetime_start=datetime.now() - timedelta(days=30), datetime_end=datetime.now()
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = EmployeesSkills
|
||||
sqlalchemy_get_or_create = (
|
||||
'id',
|
||||
'employee_id',
|
||||
'skill_id'
|
||||
)
|
||||
|
||||
|
||||
class DepartmentFactory(BaseModelFactory):
|
||||
|
||||
id = factory.Sequence(lambda n: n + 1)
|
||||
name = factory.Faker('company')
|
||||
description = factory.Faker('bs')
|
||||
updated_at = factory.Faker(
|
||||
'date_time_between_dates', datetime_start=datetime.now() - timedelta(days=30), datetime_end=datetime.now()
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Department
|
||||
sqlalchemy_get_or_create = (
|
||||
'id',
|
||||
'name',
|
||||
)
|
||||
|
||||
|
||||
class EmployeeDepartmentFactory(BaseModelFactory):
|
||||
|
||||
employee_id = factory.Faker('random_int')
|
||||
department_id = factory.Faker('random_int')
|
||||
created_at = factory.Faker(
|
||||
'date_time_between_dates',
|
||||
datetime_start=datetime.now() - timedelta(days=30),
|
||||
datetime_end=datetime.now() - timedelta(days=10)
|
||||
)
|
||||
updated_at = factory.Faker(
|
||||
'date_time_between_dates',
|
||||
datetime_start=datetime.now() - timedelta(days=10),
|
||||
datetime_end=datetime.now()
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = EmployeeDepartments
|
84
sqlalchemy_study/src/data/fill_data.py
Normal file
84
sqlalchemy_study/src/data/fill_data.py
Normal file
@ -0,0 +1,84 @@
|
||||
import asyncio
|
||||
import random
|
||||
import uuid
|
||||
|
||||
from factory import fuzzy
|
||||
from faker import Faker
|
||||
|
||||
from data.factories import (
|
||||
UserFactory,
|
||||
CoinModelFactory,
|
||||
EmployeesSkillsFactory,
|
||||
SkillFactory,
|
||||
EmployeeFactory,
|
||||
DepartmentFactory,
|
||||
EmployeeDepartmentFactory
|
||||
)
|
||||
from db.dependencies import get_async_db_session
|
||||
from db.models.user import User
|
||||
from db.utils import drop_tables, run_migrations
|
||||
from settings.logger import logger
|
||||
|
||||
faker = Faker('ru_RU')
|
||||
|
||||
|
||||
async def add_users_data() -> None:
|
||||
|
||||
async with get_async_db_session() as session:
|
||||
users = []
|
||||
for _ in range(10):
|
||||
users.append(User(username=faker.profile(fields=['username'])['username'],
|
||||
hash_password=faker.password(),
|
||||
auth_token=str(uuid.uuid4()),
|
||||
)
|
||||
)
|
||||
session.add_all(users)
|
||||
|
||||
|
||||
def get_random_skill(skills: list[int]) -> list[int]:
|
||||
random_skills = random.sample(skills, random.randint(2, 9))
|
||||
return random_skills
|
||||
|
||||
|
||||
def fill_database() -> None:
|
||||
|
||||
# async add faker data
|
||||
asyncio.run(add_users_data())
|
||||
|
||||
# sync factory boy add data
|
||||
coins = [coin.id for coin in CoinModelFactory.create_batch(42)]
|
||||
|
||||
jonny = EmployeeFactory(first_name='Tony', last_name='Stark', coin_id=fuzzy.FuzzyChoice(coins))
|
||||
karl = EmployeeFactory(first_name='Karl', coin_id=fuzzy.FuzzyChoice(coins))
|
||||
employees = EmployeeFactory.create_batch(40, coin_id=fuzzy.FuzzyChoice(coins))
|
||||
|
||||
skills = [skill.id for skill in SkillFactory.create_batch(size=faker.random_int(min=20, max=42))]
|
||||
|
||||
for skill in get_random_skill(skills):
|
||||
EmployeesSkillsFactory(employee_id=jonny.id, skill_id=skill)
|
||||
|
||||
for skill in get_random_skill(skills):
|
||||
EmployeesSkillsFactory(employee_id=karl.id, skill_id=skill)
|
||||
|
||||
for employee in employees:
|
||||
for skill in get_random_skill(skills):
|
||||
EmployeesSkillsFactory(employee_id=employee.id, skill_id=skill)
|
||||
|
||||
# User data (first 20 rows if not exists)
|
||||
for user_id in range(20, 30):
|
||||
UserFactory(id=user_id, username=faker.profile(fields=['username'])['username'])
|
||||
|
||||
# Department data
|
||||
departments = DepartmentFactory.create_batch(5)
|
||||
departments = [department.id for department in departments]
|
||||
|
||||
for employee in [jonny, karl, *employees]:
|
||||
EmployeeDepartmentFactory(employee_id=employee.id, department_id=fuzzy.FuzzyChoice(departments))
|
||||
|
||||
logger.info('All data has been created. You can run data/get_data.py script')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
drop_tables()
|
||||
run_migrations()
|
||||
fill_database()
|
66
sqlalchemy_study/src/data/get_data.py
Normal file
66
sqlalchemy_study/src/data/get_data.py
Normal file
@ -0,0 +1,66 @@
|
||||
import asyncio
|
||||
|
||||
from settings.logger import logger
|
||||
from sqlalchemy_study.sqlalchemy import select
|
||||
from sqlalchemy_study.sqlalchemy import load_only, contains_eager, joinedload
|
||||
|
||||
from db.dependencies import get_async_db_session
|
||||
from db.models.coin import Coin
|
||||
from db.models.department import EmployeeDepartments, Department
|
||||
from db.models.skills import Skill
|
||||
from db.models.user import Employee, User
|
||||
|
||||
|
||||
async def get_data() -> list[Employee]:
|
||||
|
||||
query = (
|
||||
select(Employee)
|
||||
.join(Employee.coin).options(
|
||||
contains_eager(Employee.coin).options(load_only(Coin.name,
|
||||
Coin.enabled)))
|
||||
.join(Employee.skills).options(
|
||||
contains_eager(Employee.skills).load_only(Skill.name)
|
||||
).options(load_only(Employee.id,
|
||||
Employee.first_name,
|
||||
Employee.phone,
|
||||
)
|
||||
)
|
||||
.outerjoin(Employee.department).options(
|
||||
contains_eager(Employee.department).options(
|
||||
joinedload(EmployeeDepartments.department)
|
||||
.options(load_only(Department.name,
|
||||
Department.description, )
|
||||
)
|
||||
)
|
||||
)
|
||||
.outerjoin(Employee.user).options(
|
||||
contains_eager(Employee.user).options(load_only(User.username,
|
||||
)
|
||||
)
|
||||
)
|
||||
).order_by(Employee.id, Skill.name)
|
||||
|
||||
async with get_async_db_session() as session:
|
||||
result = await session.execute(query)
|
||||
data = result.unique().scalars().all()
|
||||
return data
|
||||
|
||||
employees = asyncio.run(get_data())
|
||||
|
||||
|
||||
for employee in employees:
|
||||
print(''.center(40, '-'), '\nEmployee id: {0}\nFirst name: {1}\nPhone: {2}\nSkills: {3}\n'
|
||||
'Coin name: {4}\nCoin enabled: {5}\nDepartment: {6} -> {7}\nUsername: {8}'
|
||||
.format(employee.id,
|
||||
employee.first_name,
|
||||
employee.phone,
|
||||
', '.join([skill.name for skill in employee.skills[:5]]),
|
||||
employee.coin.name,
|
||||
employee.coin.enabled,
|
||||
employee.department.department.name,
|
||||
employee.department.department.description,
|
||||
employee.user.username if hasattr(employee.user, 'username') else None,
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(f'Total employees: {len(employees)}')
|
31
sqlalchemy_study/src/db/base.py
Normal file
31
sqlalchemy_study/src/db/base.py
Normal file
@ -0,0 +1,31 @@
|
||||
from typing import Any, Tuple, Union, Type
|
||||
|
||||
from sqlalchemy_study.sqlalchemy import Table, Column, Integer, DATETIME, TIMESTAMP, func
|
||||
from sqlalchemy_study.sqlalchemy import as_declarative
|
||||
|
||||
from db.meta import meta
|
||||
from settings import settings
|
||||
|
||||
DB_TIME_FORMAT: Type[Union[DATETIME, TIMESTAMP]] = DATETIME if settings.USE_DATABASE == 'mysql' else TIMESTAMP
|
||||
|
||||
|
||||
@as_declarative(metadata=meta)
|
||||
class BaseModel:
|
||||
"""
|
||||
BaseModel for all models.
|
||||
|
||||
It has some type definitions to
|
||||
enhance autocompletion.
|
||||
"""
|
||||
|
||||
__tablename__: str
|
||||
__table__: Table
|
||||
__table_args__: Tuple[Any, ...]
|
||||
__abstract__ = True
|
||||
|
||||
id = Column(Integer, nullable=False, unique=True, primary_key=True, autoincrement=True)
|
||||
created_at = Column(DB_TIME_FORMAT, default=func.now(), index=True)
|
||||
updated_at = Column(DB_TIME_FORMAT, nullable=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{self.__class__.__name__}(id={self.id!r})>"
|
57
sqlalchemy_study/src/db/dependencies.py
Normal file
57
sqlalchemy_study/src/db/dependencies.py
Normal file
@ -0,0 +1,57 @@
|
||||
from asyncio import current_task
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from sqlalchemy_study.sqlalchemy import create_engine
|
||||
from sqlalchemy_study.sqlalchemy import create_async_engine, AsyncSession, async_scoped_session, AsyncEngine
|
||||
from sqlalchemy_study.sqlalchemy import sessionmaker, Session
|
||||
|
||||
from settings import settings
|
||||
|
||||
async_engine: AsyncEngine = create_async_engine(str(settings.async_db_url), echo=settings.DB_ECHO)
|
||||
async_session_factory = async_scoped_session(
|
||||
sessionmaker(
|
||||
autocommit=False,
|
||||
autoflush=False,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
bind=async_engine,
|
||||
),
|
||||
scopefunc=current_task,
|
||||
)
|
||||
|
||||
|
||||
sync_engine = create_engine(settings.sync_db_url, echo=settings.DB_ECHO)
|
||||
sync_session_factory = sessionmaker(sync_engine)
|
||||
|
||||
|
||||
def get_sync_db_session() -> Session:
|
||||
session: Session = sync_session_factory()
|
||||
try:
|
||||
return session
|
||||
except Exception as err:
|
||||
session.rollback()
|
||||
raise err
|
||||
finally:
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_async_db_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
"""
|
||||
Create and get database session.
|
||||
|
||||
:param request: current request.
|
||||
:yield: database session.
|
||||
"""
|
||||
session = async_session_factory()
|
||||
try:
|
||||
yield session
|
||||
except Exception as err:
|
||||
await session.rollback()
|
||||
raise err
|
||||
finally:
|
||||
await session.commit()
|
||||
await session.close()
|
||||
await async_session_factory.remove()
|
3
sqlalchemy_study/src/db/meta.py
Normal file
3
sqlalchemy_study/src/db/meta.py
Normal file
@ -0,0 +1,3 @@
|
||||
from sqlalchemy_study import sqlalchemy as sa
|
||||
|
||||
meta = sa.MetaData()
|
13
sqlalchemy_study/src/db/models/__init__.py
Normal file
13
sqlalchemy_study/src/db/models/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
import pkgutil
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def load_all_models() -> None:
|
||||
"""Load all models from this folder."""
|
||||
root_dir = Path(__file__).resolve().parent
|
||||
modules = pkgutil.walk_packages(
|
||||
path=[str(root_dir)],
|
||||
prefix="db.models.",
|
||||
)
|
||||
for module in modules:
|
||||
__import__(module.name)
|
16
sqlalchemy_study/src/db/models/cadre_movements.py
Executable file
16
sqlalchemy_study/src/db/models/cadre_movements.py
Executable file
@ -0,0 +1,16 @@
|
||||
from sqlalchemy_study.sqlalchemy import Column, Integer, ForeignKey, VARCHAR
|
||||
from sqlalchemy_study.sqlalchemy import relation
|
||||
|
||||
from db.base import BaseModel
|
||||
from db.models.department import Department
|
||||
|
||||
|
||||
class CadreMovement(BaseModel):
|
||||
__tablename__ = 'cadre_movements'
|
||||
|
||||
employee = Column(Integer, ForeignKey('employees.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
old_department = Column(Integer, ForeignKey('departments.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
new_department = Column(Integer, ForeignKey('departments.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
reason = Column(VARCHAR(500), nullable=True)
|
||||
|
||||
department = relation(Department, foreign_keys=new_department, lazy='select')
|
35
sqlalchemy_study/src/db/models/coin.py
Normal file
35
sqlalchemy_study/src/db/models/coin.py
Normal file
@ -0,0 +1,35 @@
|
||||
from sqlalchemy_study.sqlalchemy import VARCHAR
|
||||
from sqlalchemy_study.sqlalchemy import relationship
|
||||
from sqlalchemy_study.sqlalchemy import Column
|
||||
from sqlalchemy_study.sqlalchemy import ForeignKey
|
||||
from sqlalchemy_study.sqlalchemy import Integer, BOOLEAN
|
||||
|
||||
from db.base import BaseModel
|
||||
|
||||
|
||||
class Coin(BaseModel):
|
||||
"""Model for coin."""
|
||||
|
||||
__tablename__ = "coins"
|
||||
|
||||
name = Column('coin_name', VARCHAR(50), unique=True)
|
||||
enabled = Column('enabled', BOOLEAN)
|
||||
|
||||
coin_type_id = relationship("CoinType",
|
||||
primaryjoin="Coin.id == CoinType.coin_id",
|
||||
back_populates='coin',
|
||||
uselist=False,
|
||||
viewonly=True,
|
||||
lazy="raise",
|
||||
)
|
||||
employee = relationship('Employee', back_populates='coin')
|
||||
|
||||
|
||||
class CoinType(BaseModel):
|
||||
"""Model for coin type."""
|
||||
|
||||
__tablename__ = "coin_types"
|
||||
|
||||
name = Column('coin_name', VARCHAR(50))
|
||||
coin_id = Column(Integer, ForeignKey('coins.id', ondelete='CASCADE'))
|
||||
coin = relationship(Coin, back_populates='coin_type_id')
|
23
sqlalchemy_study/src/db/models/department.py
Executable file
23
sqlalchemy_study/src/db/models/department.py
Executable file
@ -0,0 +1,23 @@
|
||||
from sqlalchemy_study.sqlalchemy import Column, VARCHAR, Integer, ForeignKey
|
||||
from sqlalchemy_study.sqlalchemy import relationship
|
||||
|
||||
from db.base import BaseModel
|
||||
|
||||
|
||||
class Department(BaseModel):
|
||||
__tablename__ = 'departments'
|
||||
|
||||
name = Column(VARCHAR(255), nullable=False)
|
||||
description = Column(VARCHAR(255), nullable=False)
|
||||
|
||||
|
||||
class EmployeeDepartments(BaseModel):
|
||||
__tablename__ = 'employee_departments'
|
||||
|
||||
employee_id = Column(Integer, ForeignKey('employees.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
department_id = Column(Integer, ForeignKey('departments.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
|
||||
department = relationship(Department,
|
||||
lazy='noload',
|
||||
backref='emp_depart',
|
||||
)
|
19
sqlalchemy_study/src/db/models/skills.py
Normal file
19
sqlalchemy_study/src/db/models/skills.py
Normal file
@ -0,0 +1,19 @@
|
||||
from sqlalchemy_study.sqlalchemy import Column, ForeignKey, VARCHAR, Text, UniqueConstraint
|
||||
|
||||
from db.base import BaseModel
|
||||
from db.models.user import Employee
|
||||
|
||||
|
||||
class Skill(BaseModel):
|
||||
__tablename__ = 'skills'
|
||||
|
||||
name = Column(VARCHAR(255), nullable=False, unique=True)
|
||||
description = Column(Text, nullable=True)
|
||||
|
||||
|
||||
class EmployeesSkills(BaseModel):
|
||||
__tablename__ = 'employees_skills'
|
||||
__table_args__ = (UniqueConstraint("employee_id", "skill_id"),)
|
||||
|
||||
employee_id = Column(ForeignKey(Employee.id, ondelete='CASCADE'), nullable=False, index=True)
|
||||
skill_id = Column(ForeignKey(Skill.id, ondelete='CASCADE'), nullable=False, index=True)
|
62
sqlalchemy_study/src/db/models/user.py
Normal file
62
sqlalchemy_study/src/db/models/user.py
Normal file
@ -0,0 +1,62 @@
|
||||
import datetime
|
||||
|
||||
from sqlalchemy_study.sqlalchemy import Column, String, DateTime, ForeignKey
|
||||
from sqlalchemy_study.sqlalchemy import VARCHAR
|
||||
from sqlalchemy_study.sqlalchemy import relationship
|
||||
|
||||
from db.base import BaseModel
|
||||
from db.models.coin import Coin
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
__tablename__ = 'users'
|
||||
|
||||
username: str = Column(String(255), unique=True)
|
||||
email: str = Column(String(255), index=True, unique=True, nullable=True)
|
||||
hash_password: str = Column(String(255))
|
||||
auth_token: str = Column(String(255))
|
||||
last_login: datetime.datetime = Column(DateTime, default=datetime.datetime.now, index=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f'User: id:{self.id}, name: {self.username}'
|
||||
|
||||
employee = relationship('Employee',
|
||||
primaryjoin='foreign(User.id)==remote(Employee.id)',
|
||||
lazy='noload',
|
||||
backref='user_employee',
|
||||
)
|
||||
|
||||
|
||||
class Employee(BaseModel):
|
||||
__tablename__ = 'employees'
|
||||
|
||||
first_name = Column(VARCHAR(128), nullable=False)
|
||||
last_name = Column(VARCHAR(128), nullable=False)
|
||||
phone = Column(VARCHAR(30), unique=True, nullable=True)
|
||||
description = Column(VARCHAR(255), nullable=True)
|
||||
coin_id = Column('coin_id', ForeignKey('coins.id', ondelete='SET NULL'), nullable=True)
|
||||
|
||||
coin = relationship(Coin,
|
||||
back_populates='employee',
|
||||
primaryjoin='Employee.coin_id==Coin.id',
|
||||
lazy='noload',
|
||||
uselist=False,
|
||||
)
|
||||
|
||||
skills = relationship('Skill',
|
||||
secondary="employees_skills",
|
||||
lazy='noload',
|
||||
uselist=True,
|
||||
)
|
||||
|
||||
department = relationship('EmployeeDepartments',
|
||||
lazy='noload',
|
||||
backref='employee',
|
||||
uselist=False,
|
||||
)
|
||||
|
||||
user = relationship('User',
|
||||
primaryjoin='foreign(Employee.id)==remote(User.id)',
|
||||
lazy='raise',
|
||||
backref='user_employee',
|
||||
)
|
56
sqlalchemy_study/src/db/utils.py
Normal file
56
sqlalchemy_study/src/db/utils.py
Normal file
@ -0,0 +1,56 @@
|
||||
from alembic import command, config as alembic_config
|
||||
from sqlalchemy_study.sqlalchemy import MetaData, Table, ForeignKeyConstraint
|
||||
from sqlalchemy_study.sqlalchemy import inspect
|
||||
from sqlalchemy_study.sqlalchemy import NoSuchTableError
|
||||
from sqlalchemy_study.sqlalchemy import DropConstraint
|
||||
|
||||
from db.dependencies import sync_engine
|
||||
from db.meta import meta
|
||||
from db.models import load_all_models
|
||||
from settings import settings
|
||||
from settings.logger import logger
|
||||
|
||||
alembic_cfg = alembic_config.Config("alembic.ini")
|
||||
|
||||
|
||||
def remove_foreign_keys() -> None:
|
||||
logger.info("Dropping all foreign key constraints from archive database")
|
||||
|
||||
inspector = inspect(sync_engine)
|
||||
fake_metadata = MetaData()
|
||||
|
||||
fake_tables = []
|
||||
all_fks = []
|
||||
for table_name in meta.tables:
|
||||
fks = []
|
||||
try:
|
||||
for fk in inspector.get_foreign_keys(table_name):
|
||||
if fk['name']:
|
||||
fks.append(ForeignKeyConstraint((), (), name=fk['name']))
|
||||
except NoSuchTableError:
|
||||
logger.error(f'Table {table_name} not exist')
|
||||
t = Table(table_name, fake_metadata, *fks)
|
||||
fake_tables.append(t)
|
||||
all_fks.extend(fks)
|
||||
connection = sync_engine.connect()
|
||||
transaction = connection.begin()
|
||||
for fkc in all_fks:
|
||||
connection.execute(DropConstraint(fkc))
|
||||
transaction.commit()
|
||||
|
||||
|
||||
def drop_tables() -> None:
|
||||
load_all_models()
|
||||
remove_foreign_keys()
|
||||
meta.drop_all(bind=sync_engine, checkfirst=True)
|
||||
sync_engine.execute('DROP TABLE IF EXISTS alembic_version')
|
||||
sync_engine.dispose()
|
||||
logger.info("All tables are dropped")
|
||||
|
||||
|
||||
def run_migrations() -> None:
|
||||
with sync_engine.begin() as connection:
|
||||
alembic_cfg.attributes['connection'] = connection
|
||||
migration_dialect = 'mysql_init_migrations' if settings.USE_DATABASE == 'mysql' else 'postgres_init_migrations'
|
||||
command.upgrade(alembic_cfg, migration_dialect)
|
||||
logger.info('Tables recreated')
|
1
sqlalchemy_study/src/migrations/README
Normal file
1
sqlalchemy_study/src/migrations/README
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
0
sqlalchemy_study/src/migrations/__init__.py
Normal file
0
sqlalchemy_study/src/migrations/__init__.py
Normal file
73
sqlalchemy_study/src/migrations/env.py
Normal file
73
sqlalchemy_study/src/migrations/env.py
Normal file
@ -0,0 +1,73 @@
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy_study.sqlalchemy import create_async_engine
|
||||
from sqlalchemy_study.sqlalchemy import Connection
|
||||
|
||||
from db.base import BaseModel
|
||||
from db.models import load_all_models
|
||||
from settings import settings
|
||||
|
||||
config = context.config
|
||||
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
target_metadata = BaseModel.metadata
|
||||
load_all_models()
|
||||
|
||||
|
||||
async def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
|
||||
context.configure(
|
||||
url=settings.async_db_url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""
|
||||
Run actual sync migrations.
|
||||
|
||||
:param connection: connection to the database.
|
||||
"""
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = create_async_engine(settings.async_db_url)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
asyncio.run(run_migrations_offline())
|
||||
else:
|
||||
asyncio.run(run_migrations_online())
|
24
sqlalchemy_study/src/migrations/script.py.mako
Normal file
24
sqlalchemy_study/src/migrations/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
@ -0,0 +1,174 @@
|
||||
"""mysql init models
|
||||
|
||||
Revision ID: mysql_init_migrations
|
||||
Revises:
|
||||
Create Date: 2022-05-29 19:26:09.995005
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
from sqlalchemy_study import sqlalchemy as sa
|
||||
from sqlalchemy_study.sqlalchemy import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'mysql_init_migrations'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('coins',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True),
|
||||
sa.Column('enabled', sa.BOOLEAN(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('coin_name'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_coins_created_at'), 'coins', ['created_at'], unique=False)
|
||||
op.create_table('departments',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('name', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.Column('description', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_departments_created_at'), 'departments', ['created_at'], unique=False)
|
||||
op.create_table('skills',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('name', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id'),
|
||||
sa.UniqueConstraint('name')
|
||||
)
|
||||
op.create_index(op.f('ix_skills_created_at'), 'skills', ['created_at'], unique=False)
|
||||
op.create_table('users',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('username', sa.String(length=255), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=True),
|
||||
sa.Column('hash_password', sa.String(length=255), nullable=True),
|
||||
sa.Column('auth_token', sa.String(length=255), nullable=True),
|
||||
sa.Column('last_login', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id'),
|
||||
sa.UniqueConstraint('username')
|
||||
)
|
||||
op.create_index(op.f('ix_users_created_at'), 'users', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
|
||||
op.create_index(op.f('ix_users_last_login'), 'users', ['last_login'], unique=False)
|
||||
op.create_table('coin_types',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True),
|
||||
sa.Column('coin_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_coin_types_created_at'), 'coin_types', ['created_at'], unique=False)
|
||||
op.create_table('employees',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('first_name', mysql.VARCHAR(length=128), nullable=False),
|
||||
sa.Column('last_name', mysql.VARCHAR(length=128), nullable=False),
|
||||
sa.Column('phone', mysql.VARCHAR(length=30), nullable=True),
|
||||
sa.Column('description', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('coin_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='SET NULL'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id'),
|
||||
sa.UniqueConstraint('phone')
|
||||
)
|
||||
op.create_index(op.f('ix_employees_created_at'), 'employees', ['created_at'], unique=False)
|
||||
op.create_table('cadre_movements',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('employee', sa.Integer(), nullable=False),
|
||||
sa.Column('old_department', sa.Integer(), nullable=False),
|
||||
sa.Column('new_department', sa.Integer(), nullable=False),
|
||||
sa.Column('reason', sa.VARCHAR(length=500), nullable=True),
|
||||
sa.ForeignKeyConstraint(['employee'], ['employees.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['new_department'], ['departments.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['old_department'], ['departments.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_cadre_movements_created_at'), 'cadre_movements', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_cadre_movements_employee'), 'cadre_movements', ['employee'], unique=False)
|
||||
op.create_index(op.f('ix_cadre_movements_new_department'), 'cadre_movements', ['new_department'], unique=False)
|
||||
op.create_index(op.f('ix_cadre_movements_old_department'), 'cadre_movements', ['old_department'], unique=False)
|
||||
op.create_table('employee_departments',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('employee_id', sa.Integer(), nullable=False),
|
||||
sa.Column('department_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['department_id'], ['departments.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_employee_departments_created_at'), 'employee_departments', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_employee_departments_department_id'), 'employee_departments', ['department_id'], unique=False)
|
||||
op.create_index(op.f('ix_employee_departments_employee_id'), 'employee_departments', ['employee_id'], unique=False)
|
||||
op.create_table('employees_skills',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('updated_at', sa.DATETIME(), nullable=True),
|
||||
sa.Column('employee_id', sa.Integer(), nullable=False),
|
||||
sa.Column('skill_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['skill_id'], ['skills.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('employee_id', 'skill_id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_employees_skills_created_at'), 'employees_skills', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_employees_skills_employee_id'), 'employees_skills', ['employee_id'], unique=False)
|
||||
op.create_index(op.f('ix_employees_skills_skill_id'), 'employees_skills', ['skill_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_employees_skills_skill_id'), table_name='employees_skills')
|
||||
op.drop_index(op.f('ix_employees_skills_employee_id'), table_name='employees_skills')
|
||||
op.drop_index(op.f('ix_employees_skills_created_at'), table_name='employees_skills')
|
||||
op.drop_table('employees_skills')
|
||||
op.drop_index(op.f('ix_employee_departments_employee_id'), table_name='employee_departments')
|
||||
op.drop_index(op.f('ix_employee_departments_department_id'), table_name='employee_departments')
|
||||
op.drop_index(op.f('ix_employee_departments_created_at'), table_name='employee_departments')
|
||||
op.drop_table('employee_departments')
|
||||
op.drop_index(op.f('ix_cadre_movements_old_department'), table_name='cadre_movements')
|
||||
op.drop_index(op.f('ix_cadre_movements_new_department'), table_name='cadre_movements')
|
||||
op.drop_index(op.f('ix_cadre_movements_employee'), table_name='cadre_movements')
|
||||
op.drop_index(op.f('ix_cadre_movements_created_at'), table_name='cadre_movements')
|
||||
op.drop_table('cadre_movements')
|
||||
op.drop_index(op.f('ix_employees_created_at'), table_name='employees')
|
||||
op.drop_table('employees')
|
||||
op.drop_index(op.f('ix_coin_types_created_at'), table_name='coin_types')
|
||||
op.drop_table('coin_types')
|
||||
op.drop_index(op.f('ix_users_last_login'), table_name='users')
|
||||
op.drop_index(op.f('ix_users_email'), table_name='users')
|
||||
op.drop_index(op.f('ix_users_created_at'), table_name='users')
|
||||
op.drop_table('users')
|
||||
op.drop_index(op.f('ix_skills_created_at'), table_name='skills')
|
||||
op.drop_table('skills')
|
||||
op.drop_index(op.f('ix_departments_created_at'), table_name='departments')
|
||||
op.drop_table('departments')
|
||||
op.drop_index(op.f('ix_coins_created_at'), table_name='coins')
|
||||
op.drop_table('coins')
|
||||
# ### end Alembic commands ###
|
@ -0,0 +1,174 @@
|
||||
"""postgres init migrations
|
||||
|
||||
Revision ID: postgres_init_migrations
|
||||
Revises:
|
||||
Create Date: 2022-06-14 00:29:28.932954
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
from sqlalchemy_study import sqlalchemy as sa
|
||||
from sqlalchemy_study.sqlalchemy import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'postgres_init_migrations'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('coins',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True),
|
||||
sa.Column('enabled', sa.BOOLEAN(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('coin_name'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_coins_created_at'), 'coins', ['created_at'], unique=False)
|
||||
op.create_table('departments',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('name', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.Column('description', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_departments_created_at'), 'departments', ['created_at'], unique=False)
|
||||
op.create_table('skills',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('name', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id'),
|
||||
sa.UniqueConstraint('name')
|
||||
)
|
||||
op.create_index(op.f('ix_skills_created_at'), 'skills', ['created_at'], unique=False)
|
||||
op.create_table('users',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('username', sa.String(length=255), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=True),
|
||||
sa.Column('hash_password', sa.String(length=255), nullable=True),
|
||||
sa.Column('auth_token', sa.String(length=255), nullable=True),
|
||||
sa.Column('last_login', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id'),
|
||||
sa.UniqueConstraint('username')
|
||||
)
|
||||
op.create_index(op.f('ix_users_created_at'), 'users', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
|
||||
op.create_index(op.f('ix_users_last_login'), 'users', ['last_login'], unique=False)
|
||||
op.create_table('coin_types',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('coin_name', sa.VARCHAR(length=50), nullable=True),
|
||||
sa.Column('coin_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_coin_types_created_at'), 'coin_types', ['created_at'], unique=False)
|
||||
op.create_table('employees',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('first_name', mysql.VARCHAR(length=128), nullable=False),
|
||||
sa.Column('last_name', mysql.VARCHAR(length=128), nullable=False),
|
||||
sa.Column('phone', mysql.VARCHAR(length=30), nullable=True),
|
||||
sa.Column('description', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('coin_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['coin_id'], ['coins.id'], ondelete='SET NULL'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id'),
|
||||
sa.UniqueConstraint('phone')
|
||||
)
|
||||
op.create_index(op.f('ix_employees_created_at'), 'employees', ['created_at'], unique=False)
|
||||
op.create_table('cadre_movements',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('employee', sa.Integer(), nullable=False),
|
||||
sa.Column('old_department', sa.Integer(), nullable=False),
|
||||
sa.Column('new_department', sa.Integer(), nullable=False),
|
||||
sa.Column('reason', sa.VARCHAR(length=500), nullable=True),
|
||||
sa.ForeignKeyConstraint(['employee'], ['employees.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['new_department'], ['departments.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['old_department'], ['departments.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_cadre_movements_created_at'), 'cadre_movements', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_cadre_movements_employee'), 'cadre_movements', ['employee'], unique=False)
|
||||
op.create_index(op.f('ix_cadre_movements_new_department'), 'cadre_movements', ['new_department'], unique=False)
|
||||
op.create_index(op.f('ix_cadre_movements_old_department'), 'cadre_movements', ['old_department'], unique=False)
|
||||
op.create_table('employee_departments',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('employee_id', sa.Integer(), nullable=False),
|
||||
sa.Column('department_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['department_id'], ['departments.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_employee_departments_created_at'), 'employee_departments', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_employee_departments_department_id'), 'employee_departments', ['department_id'], unique=False)
|
||||
op.create_index(op.f('ix_employee_departments_employee_id'), 'employee_departments', ['employee_id'], unique=False)
|
||||
op.create_table('employees_skills',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), nullable=True),
|
||||
sa.Column('employee_id', sa.Integer(), nullable=False),
|
||||
sa.Column('skill_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['employee_id'], ['employees.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['skill_id'], ['skills.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('employee_id', 'skill_id'),
|
||||
sa.UniqueConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_employees_skills_created_at'), 'employees_skills', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_employees_skills_employee_id'), 'employees_skills', ['employee_id'], unique=False)
|
||||
op.create_index(op.f('ix_employees_skills_skill_id'), 'employees_skills', ['skill_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_employees_skills_skill_id'), table_name='employees_skills')
|
||||
op.drop_index(op.f('ix_employees_skills_employee_id'), table_name='employees_skills')
|
||||
op.drop_index(op.f('ix_employees_skills_created_at'), table_name='employees_skills')
|
||||
op.drop_table('employees_skills')
|
||||
op.drop_index(op.f('ix_employee_departments_employee_id'), table_name='employee_departments')
|
||||
op.drop_index(op.f('ix_employee_departments_department_id'), table_name='employee_departments')
|
||||
op.drop_index(op.f('ix_employee_departments_created_at'), table_name='employee_departments')
|
||||
op.drop_table('employee_departments')
|
||||
op.drop_index(op.f('ix_cadre_movements_old_department'), table_name='cadre_movements')
|
||||
op.drop_index(op.f('ix_cadre_movements_new_department'), table_name='cadre_movements')
|
||||
op.drop_index(op.f('ix_cadre_movements_employee'), table_name='cadre_movements')
|
||||
op.drop_index(op.f('ix_cadre_movements_created_at'), table_name='cadre_movements')
|
||||
op.drop_table('cadre_movements')
|
||||
op.drop_index(op.f('ix_employees_created_at'), table_name='employees')
|
||||
op.drop_table('employees')
|
||||
op.drop_index(op.f('ix_coin_types_created_at'), table_name='coin_types')
|
||||
op.drop_table('coin_types')
|
||||
op.drop_index(op.f('ix_users_last_login'), table_name='users')
|
||||
op.drop_index(op.f('ix_users_email'), table_name='users')
|
||||
op.drop_index(op.f('ix_users_created_at'), table_name='users')
|
||||
op.drop_table('users')
|
||||
op.drop_index(op.f('ix_skills_created_at'), table_name='skills')
|
||||
op.drop_table('skills')
|
||||
op.drop_index(op.f('ix_departments_created_at'), table_name='departments')
|
||||
op.drop_table('departments')
|
||||
op.drop_index(op.f('ix_coins_created_at'), table_name='coins')
|
||||
op.drop_table('coins')
|
||||
# ### end Alembic commands ###
|
4
sqlalchemy_study/src/settings/__init__.py
Normal file
4
sqlalchemy_study/src/settings/__init__.py
Normal file
@ -0,0 +1,4 @@
|
||||
from settings.settings import Settings
|
||||
|
||||
|
||||
settings = Settings()
|
11
sqlalchemy_study/src/settings/logger.py
Normal file
11
sqlalchemy_study/src/settings/logger.py
Normal file
@ -0,0 +1,11 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from loguru import logger
|
||||
|
||||
logger.remove()
|
||||
|
||||
formatter = "<cyan>{time}</cyan> | <level>{level}</level> | <magenta>{message}</magenta>"
|
||||
sink = sys.stdout
|
||||
|
||||
logger.add(sink=sink, colorize=True, level=logging.INFO, format=formatter)
|
69
sqlalchemy_study/src/settings/settings.py
Normal file
69
sqlalchemy_study/src/settings/settings.py
Normal file
@ -0,0 +1,69 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import BaseSettings
|
||||
|
||||
BASE_DIR = Path(__file__).parent.parent
|
||||
|
||||
SHARED_DIR = BASE_DIR.resolve().joinpath('shared')
|
||||
SHARED_DIR.joinpath('logs').mkdir(exist_ok=True)
|
||||
DIR_LOGS = SHARED_DIR.joinpath('logs')
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Application settings."""
|
||||
|
||||
DB_HOST: str = 'db_host'
|
||||
USE_DATABASE: str = 'mysql'
|
||||
DB_ECHO: bool = False
|
||||
|
||||
# Postgres
|
||||
POSTGRES_DB_PORT: int
|
||||
POSTGRES_DB: str
|
||||
POSTGRES_USER: str
|
||||
POSTGRES_PASSWORD: str
|
||||
|
||||
MYSQL_DB_PORT: int
|
||||
MYSQL_DATABASE: str
|
||||
MYSQL_USER: str
|
||||
MYSQL_PASSWORD: str
|
||||
|
||||
@property
|
||||
def async_db_url(self) -> str:
|
||||
"""
|
||||
Assemble database URL from settings.
|
||||
|
||||
:return: database URL.
|
||||
"""
|
||||
async_postgres_url = (f'postgresql+asyncpg://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@'
|
||||
f'{self.DB_HOST}:{self.POSTGRES_DB_PORT}/{self.POSTGRES_DB}'
|
||||
)
|
||||
|
||||
async_mysql_url = (f'mysql+asyncmy://{self.MYSQL_USER}:{self.MYSQL_PASSWORD}@'
|
||||
f'{self.DB_HOST}:{self.MYSQL_DB_PORT}/{self.MYSQL_DATABASE}'
|
||||
)
|
||||
if os.environ.get('USE_DATABASE', self.USE_DATABASE).lower() == 'postgres':
|
||||
return async_postgres_url
|
||||
return async_mysql_url
|
||||
|
||||
@property
|
||||
def sync_db_url(self) -> str:
|
||||
"""
|
||||
Assemble database URL from settings.
|
||||
|
||||
:return: database URL.
|
||||
"""
|
||||
sync_postgres_url = (f'postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@'
|
||||
f'{self.DB_HOST}:{self.POSTGRES_DB_PORT}/{self.POSTGRES_DB}'
|
||||
)
|
||||
|
||||
sync_mysql_url = (f'mysql+pymysql://{self.MYSQL_USER}:{self.MYSQL_PASSWORD}@'
|
||||
f'{self.DB_HOST}:{self.MYSQL_DB_PORT}/{self.MYSQL_DATABASE}'
|
||||
)
|
||||
if os.environ.get('USE_DATABASE', self.USE_DATABASE).lower() == 'postgres':
|
||||
return sync_postgres_url
|
||||
return sync_mysql_url
|
||||
|
||||
class Config:
|
||||
env_file = 'config/.env'
|
||||
env_file_encoding = "utf-8"
|
114
twitch_bonus.py
114
twitch_bonus.py
@ -1,13 +1,21 @@
|
||||
import argparse
|
||||
import atexit
|
||||
import os
|
||||
import sys
|
||||
import tarfile
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import validators
|
||||
import wget
|
||||
from loguru import logger
|
||||
from selenium import webdriver
|
||||
from selenium.common.exceptions import NoSuchElementException
|
||||
from selenium.common.exceptions import NoSuchElementException, ElementClickInterceptedException
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
from selenium.webdriver.firefox import options
|
||||
from selenium.webdriver.firefox.service import Service
|
||||
from selenium.webdriver.firefox.webdriver import WebDriver
|
||||
from urllib3.exceptions import MaxRetryError
|
||||
|
||||
logger.remove()
|
||||
@ -15,10 +23,50 @@ logger.add(sink=sys.stdout, colorize=True, level='DEBUG',
|
||||
format="<cyan>{time:DD.MM.YYYY HH:mm:ss}</cyan> | <level>{level}</level> | "
|
||||
"<magenta>{message}</magenta>")
|
||||
|
||||
opt = options.Options()
|
||||
opt.headless = False
|
||||
service = Service(executable_path=r'./geckodriver')
|
||||
driver = webdriver.Firefox(service=service, options=opt)
|
||||
|
||||
GECKO_DRIVER_VERSION = '0.31.0'
|
||||
BASE_DIR = Path(__file__).parent.resolve().as_posix()
|
||||
|
||||
TWITCH_USERNAME = os.environ.get('TWITCH_USERNAME')
|
||||
TWITCH_PASSWORD = os.environ.get('TWITCH_PASSWORD')
|
||||
if not all([TWITCH_USERNAME, TWITCH_PASSWORD]):
|
||||
raise Exception('Username and password must be set')
|
||||
|
||||
|
||||
def download_gecko_driver():
|
||||
logger.info(f'Downloading gecodriver v {GECKO_DRIVER_VERSION}...')
|
||||
|
||||
gecko_driver = f'https://github.com/mozilla/geckodriver/releases/download/v{GECKO_DRIVER_VERSION}/' \
|
||||
f'geckodriver-v{GECKO_DRIVER_VERSION}-linux64.tar.gz'
|
||||
|
||||
geckodriver_file = wget.download(url=gecko_driver, out=BASE_DIR)
|
||||
|
||||
with tarfile.open(geckodriver_file) as tar:
|
||||
tar.extractall(BASE_DIR)
|
||||
os.remove(f'{BASE_DIR}/geckodriver-v{GECKO_DRIVER_VERSION}-linux64.tar.gz')
|
||||
print(f'\ngeckodriver has been downloaded to folder {BASE_DIR}')
|
||||
|
||||
|
||||
def configure_firefox_driver(private_window: bool = False) -> WebDriver:
|
||||
opt = options.Options()
|
||||
opt.headless = False
|
||||
opt.add_argument('-profile')
|
||||
opt.add_argument(f'{Path.home()}/snap/firefox/common/.mozilla/firefox')
|
||||
if private_window:
|
||||
opt.set_preference("browser.privatebrowsing.autostart", True)
|
||||
service = Service(executable_path=f'{BASE_DIR}/geckodriver')
|
||||
firefox_driver = webdriver.Firefox(service=service, options=opt)
|
||||
|
||||
return firefox_driver
|
||||
|
||||
|
||||
def validate_stream_url(twitch_url: str) -> Optional[str]:
|
||||
|
||||
twitch_url_valid = validators.url(twitch_url)
|
||||
if twitch_url_valid is not True:
|
||||
logger.error(f'Url {twitch_url} is invalid. Please provide correct one.')
|
||||
sys.exit(1)
|
||||
return twitch_url
|
||||
|
||||
|
||||
class UserExitException(Exception):
|
||||
@ -29,20 +77,45 @@ def exit_log(message: str):
|
||||
try:
|
||||
logger.info(message)
|
||||
driver.close()
|
||||
os.remove(f'{os.getcwd()}/geckodriver.log')
|
||||
sys.exit(0)
|
||||
except MaxRetryError:
|
||||
|
||||
pass
|
||||
except SystemExit:
|
||||
os.abort()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
def main(twitch_url: str):
|
||||
try:
|
||||
try:
|
||||
driver.get("https://www.twitch.tv/lol4to22")
|
||||
driver.get(twitch_url)
|
||||
time.sleep(4)
|
||||
try:
|
||||
elem = driver.find_element(by='css selector', value='[data-a-target="login-button"]')
|
||||
elem.click()
|
||||
logger.info('you have 60 seconds to login')
|
||||
time.sleep(60)
|
||||
time.sleep(2)
|
||||
login = driver.find_element(by='css selector', value='[aria-label="Enter your username"]')
|
||||
login.clear()
|
||||
login.send_keys(f'{TWITCH_USERNAME}')
|
||||
password = driver.find_element(by='css selector', value='[aria-label="Enter your password"]')
|
||||
password.clear()
|
||||
password.send_keys(f'{TWITCH_PASSWORD}')
|
||||
time.sleep(1)
|
||||
password.send_keys(Keys.ENTER)
|
||||
time.sleep(53)
|
||||
logger.info('time for login is up')
|
||||
except NoSuchElementException:
|
||||
logger.info('Login button not found. Probably you are already logged in')
|
||||
try:
|
||||
security_button = driver.find_element(
|
||||
by='css selector',
|
||||
value='[data-a-target="account-checkup-generic-modal-secondary-button"]'
|
||||
)
|
||||
security_button.click()
|
||||
except NoSuchElementException:
|
||||
logger.info('Security button not found, continue...')
|
||||
except Exception as e:
|
||||
logger.error(f'Open page exception: {e}')
|
||||
|
||||
@ -57,7 +130,32 @@ if __name__ == '__main__':
|
||||
time.sleep(60 * 15 - 2)
|
||||
except NoSuchElementException:
|
||||
time.sleep(1)
|
||||
except ElementClickInterceptedException:
|
||||
logger.error('Security button must be clicked')
|
||||
time.sleep(15 * 60)
|
||||
except UserExitException:
|
||||
break
|
||||
|
||||
except KeyboardInterrupt as e:
|
||||
atexit.register(exit_log, 'Exit script')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
parser = argparse.ArgumentParser('Twitch clicker', add_help=True)
|
||||
parser.add_argument('-u', '--twitch_url', required=False, default='https://www.twitch.tv/lol4to22',
|
||||
help='Please provide twitch stream url')
|
||||
|
||||
args = parser.parse_args(sys.argv[1:])
|
||||
|
||||
url = 'https://www.twitch.tv/lol4to22'
|
||||
|
||||
stream_url = args.twitch_url
|
||||
if stream_url:
|
||||
url = validate_stream_url(stream_url)
|
||||
logger.info(f'Stream url is: {url}')
|
||||
|
||||
download_gecko_driver()
|
||||
driver = configure_firefox_driver()
|
||||
|
||||
main(url)
|
||||
|
Loading…
x
Reference in New Issue
Block a user