update goodgame and github scripts

This commit is contained in:
Dmitry Afanasyev 2024-01-06 01:38:49 +03:00
parent 66e8f9e757
commit 2687794443
2 changed files with 22 additions and 12 deletions

View File

@ -47,8 +47,8 @@ SERVICES: dict[str: dict[str, Any]] = {
'mosgortrans': { 'mosgortrans': {
'deprecated': False, 'deprecated': False,
'components': [ 'components': [
{'name': 'selenoid/chrome', 'version': '119.0'}, {'name': 'selenoid/chrome', 'version': '120.0'},
{'name': 'aerokube/selenoid', 'version': '1.11.0'}, {'name': 'aerokube/selenoid', 'version': '1.11.1'},
], ],
}, },
} }

View File

@ -41,9 +41,11 @@ logger = configure_logger()
class GoodGame: class GoodGame:
BASE_URL = 'https://goodgame.ru/api/4/streams' BASE_URL = 'https://goodgame.ru'
API_URL = BASE_URL + '/api/4/streams'
PAGES_FOR_ASYNC_SCAN = 25 PAGES_FOR_ASYNC_SCAN = 25
CURRENT_WATCHERS_FILTER = 1 CURRENT_WATCHERS_FILTER = 1
INTERESTING_STREAMERS = ('snowboy', 'hell_girl', )
def __init__(self) -> None: def __init__(self) -> None:
self.all_streams: dict[int, dict[str, Any]] = dict() self.all_streams: dict[int, dict[str, Any]] = dict()
@ -67,8 +69,8 @@ class GoodGame:
Deprecated Deprecated
""" """
last_page = 1 last_page = 1
for page in range(20, 0, -1): for page in range(self.PAGES_FOR_ASYNC_SCAN, 0, -1):
response = requests.get(f'{self.BASE_URL}?page={page}') response = requests.get(f'{self.API_URL}?page={page}')
if response.json()["streams"]: if response.json()["streams"]:
last_page = page last_page = page
break break
@ -78,7 +80,7 @@ class GoodGame:
""" """
Deprecated Deprecated
""" """
response = requests.get(f'{self.BASE_URL}?page=1') response = requests.get(f'{self.API_URL}?page=1')
max_current_viewers = response.json()['streams'][0].get('viewers', None) max_current_viewers = response.json()['streams'][0].get('viewers', None)
return max_current_viewers return max_current_viewers
@ -114,13 +116,21 @@ class GoodGame:
watchers_0 = self.__count_streams_with_watchers(current_watchers=[0]) watchers_0 = self.__count_streams_with_watchers(current_watchers=[0])
watchers_1 = self.__count_streams_with_watchers(current_watchers=[1]) watchers_1 = self.__count_streams_with_watchers(current_watchers=[1])
minimal_watchers = self.__count_streams_with_watchers(current_watchers=[0, 1]) minimal_watchers = self.__count_streams_with_watchers(current_watchers=[0, 1])
trimmed_streams = self._sort_trim_dict(total_viewers)
return ( return (
f'Total streams: {len(self.all_streams)} -> ' f'Total streams: {len(self.all_streams)} -> '
f'with minimal watchers {round(minimal_watchers / len(self.all_streams) * 100)}%\n' f'with minimal watchers {round(minimal_watchers / len(self.all_streams) * 100)}%\n'
f'Total streams with 0 viewers: {watchers_0} -> {round(watchers_0/len(self.all_streams) * 100)}%\n' f'Total streams with 0 viewers: {watchers_0} -> {round(watchers_0/len(self.all_streams) * 100)}%\n'
f'Total streams with 1 viewer: {watchers_1} -> {round(watchers_1/len(self.all_streams) * 100)}%\n' f'Total streams with 1 viewer: {watchers_1} -> {round(watchers_1/len(self.all_streams) * 100)}%\n'
f'Total viewers: {sum(total_viewers.values())}\n' f'Total viewers: {sum(total_viewers.values())}\n'
f'Streams: {self._sort_trim_dict(total_viewers)}\n' f'Streams: {trimmed_streams}\n'
f'Interesting streams: '
f'{
{
stream: viewers for stream, viewers in trimmed_streams.items()
if any([True for streamer in self.INTERESTING_STREAMERS if streamer in stream.lower()])
}
}\n'
f'{"-" * 76}' f'{"-" * 76}'
) )
@ -148,7 +158,7 @@ class GoodGame:
streams = await asyncio.gather( streams = await asyncio.gather(
*[ *[
self._async_request(session, f'{self.BASE_URL}?page={page}') self._async_request(session, f'{self.API_URL}?page={page}')
for page in range(1, self.PAGES_FOR_ASYNC_SCAN + 1) for page in range(1, self.PAGES_FOR_ASYNC_SCAN + 1)
], ],
return_exceptions=True, return_exceptions=True,
@ -167,14 +177,14 @@ class GoodGame:
def sync_counter(self) -> str: def sync_counter(self) -> str:
page = 1 page = 1
response = requests.get(f'{self.BASE_URL}?page={page}', timeout=2) response = requests.get(f'{self.API_URL}?page={page}', timeout=2)
streams = response.json()['streams'] streams = response.json()['streams']
for stream in streams: for stream in streams:
self.all_streams.update({stream['id']: stream}) self.all_streams.update({stream['id']: stream})
max_current_viewers = streams[0]['viewers'] max_current_viewers = streams[0]['viewers']
while streams: while streams:
page += 1 page += 1
response = requests.get(f'{self.BASE_URL}?page={page}') response = requests.get(f'{self.API_URL}?page={page}', timeout=2)
streams = response.json()['streams'] streams = response.json()['streams']
for stream in streams: for stream in streams:
self.all_streams.update({stream['id']: stream}) self.all_streams.update({stream['id']: stream})