close dangerous api methods under api auth (#78)

* close dangerous api methods under api auth

* rename access_token method
This commit is contained in:
Dmitry Afanasyev
2024-01-07 20:06:02 +03:00
committed by GitHub
parent 8266342214
commit de55d873f9
12 changed files with 210 additions and 18 deletions

View File

@@ -0,0 +1 @@
BOT_ACCESS_API_HEADER = "BOT-API-KEY"

View File

@@ -3,13 +3,19 @@ from starlette import status
from starlette.responses import JSONResponse, Response
from telegram import Update
from api.bot.deps import get_bot_queue, get_chatgpt_service, get_update_from_request
from api.bot.deps import (
get_access_to_bot_api_or_403,
get_bot_queue,
get_chatgpt_service,
get_update_from_request,
)
from api.bot.serializers import (
ChatGptModelSerializer,
ChatGptModelsPrioritySerializer,
GETChatGptModelsSerializer,
LightChatGptModel,
)
from api.exceptions import PermissionMissingResponse
from core.bot.app import BotQueue
from core.bot.services import ChatGptService
from settings.config import settings
@@ -53,6 +59,10 @@ async def models_list(
@router.put(
"/chatgpt/models/{model_id}/priority",
name="bot:change_model_priority",
dependencies=[Depends(get_access_to_bot_api_or_403)],
responses={
status.HTTP_403_FORBIDDEN: {"model": PermissionMissingResponse},
},
response_class=Response,
status_code=status.HTTP_202_ACCEPTED,
summary="change gpt model priority",
@@ -69,6 +79,10 @@ async def change_model_priority(
@router.put(
"/chatgpt/models/priority/reset",
name="bot:reset_models_priority",
dependencies=[Depends(get_access_to_bot_api_or_403)],
responses={
status.HTTP_403_FORBIDDEN: {"model": PermissionMissingResponse},
},
response_class=Response,
status_code=status.HTTP_202_ACCEPTED,
summary="reset all model priority to default",
@@ -83,6 +97,11 @@ async def reset_models_priority(
@router.post(
"/chatgpt/models",
name="bot:add_new_model",
dependencies=[Depends(get_access_to_bot_api_or_403)],
responses={
status.HTTP_403_FORBIDDEN: {"model": PermissionMissingResponse},
status.HTTP_201_CREATED: {"model": ChatGptModelSerializer},
},
response_model=ChatGptModelSerializer,
status_code=status.HTTP_201_CREATED,
summary="add new model",
@@ -100,6 +119,10 @@ async def add_new_model(
@router.delete(
"/chatgpt/models/{model_id}",
name="bot:delete_gpt_model",
dependencies=[Depends(get_access_to_bot_api_or_403)],
responses={
status.HTTP_403_FORBIDDEN: {"model": PermissionMissingResponse},
},
response_class=Response,
status_code=status.HTTP_204_NO_CONTENT,
summary="delete gpt model",

View File

@@ -1,15 +1,17 @@
from fastapi import Depends
from fastapi import Depends, Header, HTTPException
from starlette import status
from starlette.requests import Request
from telegram import Update
from api.auth.deps import get_user_service
from api.bot.constants import BOT_ACCESS_API_HEADER
from api.deps import get_database
from core.auth.services import UserService
from core.bot.app import BotApplication, BotQueue
from core.bot.repository import ChatGPTRepository
from core.bot.services import ChatGptService
from infra.database.db_adapter import Database
from settings.config import AppSettings, get_settings
from settings.config import AppSettings, get_settings, settings
def get_bot_app(request: Request) -> BotApplication:
@@ -40,3 +42,13 @@ def get_chatgpt_service(
user_service: UserService = Depends(get_user_service),
) -> ChatGptService:
return ChatGptService(repository=chatgpt_repository, user_service=user_service)
async def get_access_to_bot_api_or_403(
bot_api_key: str | None = Header(None, alias=BOT_ACCESS_API_HEADER, description="Ключ доступа до API бота"),
user_service: UserService = Depends(get_user_service),
) -> None:
access_token = await user_service.get_user_access_token_by_username(settings.SUPERUSER)
if not access_token or access_token != bot_api_key:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Could not validate api header")

View File

@@ -1,11 +1,39 @@
from typing import Any
from fastapi.responses import ORJSONResponse
from starlette import status
from starlette.requests import Request
from api.base_schemas import BaseError, BaseResponse
class BaseAPIException(Exception):
pass
_content_type: str = "application/json"
model: type[BaseResponse] = BaseResponse
status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR
title: str | None = None
type: str | None = None
detail: str | None = None
instance: str | None = None
headers: dict[str, str] | None = None
def __init__(self, **ctx: Any) -> None:
self.__dict__ = ctx
@classmethod
def example(cls) -> dict[str, Any] | None:
if isinstance(cls.model.Config.schema_extra, dict): # type: ignore[attr-defined]
return cls.model.Config.schema_extra.get("example") # type: ignore[attr-defined]
return None
@classmethod
def response(cls) -> dict[str, Any]:
return {
"model": cls.model,
"content": {
cls._content_type: cls.model.Config.schema_extra, # type: ignore[attr-defined]
},
}
class InternalServerError(BaseError):
@@ -28,6 +56,31 @@ class InternalServerErrorResponse(BaseResponse):
}
class PermissionMissing(BaseError):
pass
class PermissionMissingResponse(BaseResponse):
error: PermissionMissing
class Config:
json_schema_extra = {
"example": {
"status": 403,
"error": {
"type": "PermissionMissing",
"title": "Permission required for this endpoint is missing",
},
},
}
class PermissionMissingError(BaseAPIException):
model = PermissionMissingResponse
status_code = status.HTTP_403_FORBIDDEN
title: str = "Permission required for this endpoint is missing"
async def internal_server_error_handler(_request: Request, _exception: Exception) -> ORJSONResponse:
error = InternalServerError(title="Something went wrong!", type="InternalServerError")
response = InternalServerErrorResponse(status=500, error=error).model_dump(exclude_unset=True)