From bdbf99a4f7b3c8e1aff7e639780d0142c97eb5e5 Mon Sep 17 00:00:00 2001 From: Alexandre Teles Date: Thu, 8 Sep 2022 00:32:31 -0300 Subject: [PATCH] basic logging --- Dockerfile | 17 +++++-- config.toml | 3 ++ main.py | 6 +++ modules/InternalCache.py | 38 -------------- modules/Releases.py | 48 +++++++++++------- modules/utils/InternalCache.py | 72 ++++++++++++++++++++++++++ modules/utils/Logger.py | 92 ++++++++++++++++++++++++++++++++++ modules/utils/__init__.py | 0 mypy.ini | 4 ++ 9 files changed, 221 insertions(+), 59 deletions(-) delete mode 100644 modules/InternalCache.py create mode 100644 modules/utils/InternalCache.py create mode 100644 modules/utils/Logger.py create mode 100644 modules/utils/__init__.py diff --git a/Dockerfile b/Dockerfile index bbfc37e..14b7373 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,14 +1,25 @@ -FROM python:3.10-slim +#FROM python:3.10-slim +FROM ubuntu:22.04 ARG GITHUB_TOKEN ENV GITHUB_TOKEN $GITHUB_TOKEN +ARG UVICORN_HOST +ENV UVICORN_HOST $UVICORN_HOST + +ARG UVICORN_PORT +ENV UVICORN_PORT $UVICORN_PORT + +ARG UVICORN_LOG_LEVEL +ENV UVICORN_LOG_LEVEL $UVICORN_LOG_LEVEL + WORKDIR /usr/src/app COPY . . RUN apt update && \ - apt-get install build-essential libffi-dev -y && \ + apt-get install build-essential libffi-dev \ + python3 python3-dev python3-pip -y && \ pip install --no-cache-dir -r requirements.txt -CMD [ "python3", "./main.py" ] \ No newline at end of file +CMD [ "/bin/bash", "./run.sh" ] \ No newline at end of file diff --git a/config.toml b/config.toml index 3d5b631..3238314 100644 --- a/config.toml +++ b/config.toml @@ -58,3 +58,6 @@ repositories = ["TeamVanced/VancedMicroG", "revanced/revanced-cli", "revanced/re [logging] +level = "INFO" +json_logs = false +redis_database = 2 \ No newline at end of file diff --git a/main.py b/main.py index 9b83e93..0998e93 100755 --- a/main.py +++ b/main.py @@ -19,6 +19,8 @@ from fastapi_cache.backends.redis import RedisBackend from modules.Releases import Releases import modules.models.ResponseModels as ResponseModels +import modules.utils.Logger as Logger + """Get latest ReVanced releases from GitHub API.""" # Load config @@ -122,6 +124,10 @@ async def startup() -> None: return None +# setup right before running to make sure no other library overwrites it + +Logger.setup_logging(LOG_LEVEL=config["logging"]["level"], JSON_LOGS=config["logging"]["json_logs"]) + # Run app if __name__ == '__main__': uvicorn.run(app, host=config['uvicorn']['host'], port=config['uvicorn']['port']) \ No newline at end of file diff --git a/modules/InternalCache.py b/modules/InternalCache.py deleted file mode 100644 index a1994a9..0000000 --- a/modules/InternalCache.py +++ /dev/null @@ -1,38 +0,0 @@ -import os -import toml -import msgpack -import aioredis - -# Load config - -config: dict = toml.load("config.toml") - -# Redis connection parameters - -redis_config: dict[ str, str | int ] = { - "url": f"redis://{os.environ['REDIS_URL']}", - "port": os.environ['REDIS_PORT'], - "database": config['internal-cache']['database'], -} - -class InternalCache: - """Implements an internal cache for ReVanced Releases API.""" - - redis_url = f"{redis_config['url']}:{redis_config['port']}/{redis_config['database']}" - redis = aioredis.from_url(redis_url, encoding="utf-8", decode_responses=True) - - async def store(self, key: str, value: dict) -> None: - await self.redis.set(key, msgpack.packb(value), ex=config['internal-cache']['expire']) - - async def delete(self, key: str) -> None: - await self.redis.delete(key) - - async def update(self, key: str, value: dict) -> None: - await self.redis.set(key, msgpack.packb(value), ex=config['internal-cache']['expire']) - - async def get(self, key: str) -> dict: - return msgpack.unpackb(await self.redis.get(key)) - - - - \ No newline at end of file diff --git a/modules/Releases.py b/modules/Releases.py index 5c71ff4..4a22363 100644 --- a/modules/Releases.py +++ b/modules/Releases.py @@ -2,7 +2,9 @@ import os import orjson import httpx_cache from base64 import b64decode -from modules.InternalCache import InternalCache +from modules.utils.InternalCache import InternalCache +import modules.utils.Logger as Logger + class Releases: @@ -12,7 +14,17 @@ class Releases: 'Authorization': "token " + os.environ['GITHUB_TOKEN'] } - httpx_client = httpx_cache.AsyncClient(headers=headers, http2=True) + httpx_logger = Logger.HTTPXLogger() + + httpx_client = httpx_cache.AsyncClient( + headers=headers, + http2=True, + event_hooks={ + 'request': [httpx_logger.log_request], + 'response': [httpx_logger.log_response] + } + ) + InternalCache = InternalCache() async def _get_release(self, repository: str) -> list: @@ -50,13 +62,14 @@ class Releases: dict: A dictionary containing assets from each repository """ - releases: dict[str, list] = {} - releases['tools'] = [] + releases: dict[str, list] - try: - cached_releases = await self.InternalCache.get("releases") - return cached_releases - except: + if await self.InternalCache.exists('releases'): + releases = await self.InternalCache.get('releases') + else: + releases = {} + releases['tools'] = [] + for repository in repositories: files = await self._get_release(repository) if files: @@ -84,10 +97,9 @@ class Releases: Returns: dict: Patches available for a given app """ - try: - cached_patches = await self.InternalCache.get("patches") - return cached_patches - except: + if await self.InternalCache.exists('patches'): + patches = await self.InternalCache.get('patches') + else: patches = await self._get_patches_json() await self.InternalCache.store('patches', patches) @@ -116,13 +128,13 @@ class Releases: dict: A dictionary containing the contributors from each repository """ - contributors: dict[str, list] = {} - contributors['repositories'] = [] + contributors: dict[str, list] - try: - cached_contributors = await self.InternalCache.get("contributors") - return cached_contributors - except: + if await self.InternalCache.exists('contributors'): + contributors = await self.InternalCache.get('contributors') + else: + contributors = {} + contributors['repositories'] = [] for repository in repositories: if 'revanced' in repository: repo_contributors = await self._get_contributors(repository) diff --git a/modules/utils/InternalCache.py b/modules/utils/InternalCache.py new file mode 100644 index 0000000..942a8d9 --- /dev/null +++ b/modules/utils/InternalCache.py @@ -0,0 +1,72 @@ +import os +import toml +import orjson +import msgpack +import aioredis + +import modules.utils.Logger as Logger + +# Load config + +config: dict = toml.load("config.toml") + +# Redis connection parameters + +redis_config: dict[ str, str | int ] = { + "url": f"redis://{os.environ['REDIS_URL']}", + "port": os.environ['REDIS_PORT'], + "database": config['internal-cache']['database'], +} + +class InternalCache: + """Implements an internal cache for ReVanced Releases API.""" + + redis_url = f"{redis_config['url']}:{redis_config['port']}/{redis_config['database']}" + redis = aioredis.from_url(redis_url, encoding="utf-8", decode_responses=True) + + InternalCacheLogger = Logger.InternalCacheLogger() + + async def store(self, key: str, value: dict) -> None: + try: + await self.redis.set(key, orjson.dumps(value), ex=config['internal-cache']['expire']) + await self.InternalCacheLogger.log("SET", None, key) + except aioredis.RedisError as e: + await self.InternalCacheLogger.log("SET", e) + + async def delete(self, key: str) -> None: + try: + await self.redis.delete(key) + await self.InternalCacheLogger.log("DEL", None, key) + except aioredis.RedisError as e: + await self.InternalCacheLogger.log("DEL", e) + + async def update(self, key: str, value: dict) -> None: + try: + await self.redis.set(key, orjson.dumps(value), ex=config['internal-cache']['expire']) + await self.InternalCacheLogger.log("SET", None, key) + except aioredis.RedisError as e: + await self.InternalCacheLogger.log("SET", e) + + async def get(self, key: str) -> dict: + try: + payload = orjson.loads(await self.redis.get(key)) + await self.InternalCacheLogger.log("GET", None, key) + return payload + except aioredis.RedisError as e: + await self.InternalCacheLogger.log("GET", e) + return {} + + async def exists(self, key: str) -> bool: + try: + if await self.redis.exists(key): + await self.InternalCacheLogger.log("EXISTS", None, key) + return True + else: + await self.InternalCacheLogger.log("EXISTS", None, key) + return False + except aioredis.RedisError as e: + await self.InternalCacheLogger.log("EXISTS", e) + return False + + + \ No newline at end of file diff --git a/modules/utils/Logger.py b/modules/utils/Logger.py new file mode 100644 index 0000000..8d7ea3b --- /dev/null +++ b/modules/utils/Logger.py @@ -0,0 +1,92 @@ +import sys +import logging +from loguru import logger +from typing import Optional +from types import FrameType +from redis import RedisError + +class InterceptHandler(logging.Handler): + """Setups a loging handler for uvicorn and FastAPI. + + Args: + logging (logging.Handler) + """ + + def emit(self, record: logging.LogRecord) -> None: + """Emit a log record. + + Args: + record (LogRecord): Logging record + """ + + level: str | int + frame: Optional[FrameType] + depth: int + + # Get corresponding Loguru level if it exists + try: + level = logger.level(record.levelname).name + except ValueError: + level = record.levelno + + frame = logging.currentframe() + depth = 2 + + # Find caller from where originated the logged message + while frame.f_code.co_filename == logging.__file__: + frame = frame.f_back + depth += 1 + + logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage()) + +class HTTPXLogger(): + """Logger adapter for HTTPX.""" + + async def log_request(self, request) -> None: + """Logs HTTPX requests + + Returns: + None + """ + + logger.info(f"[HTTPX] Request: {request.method} {request.url} - Waiting for response") + + async def log_response(self, response) -> None: + """Logs HTTPX responses + + Returns: + None + """ + request = response.request + + logger.info(f"[HTTPX] Response: {request.method} {request.url} - Status: {response.status_code} {response.reason_phrase}") + +class InternalCacheLogger: + async def log(self, operation: str, result: RedisError | None = None, key: str = "",) -> None: + """Logs internal cache operations + + Args: + operation (str): Operation name + key (str): Key used in the operation + """ + if type(result) is RedisError: + logger.error(f"[InternalCache] REDIS {operation} - Failed with error: {result}") + else: + logger.info(f"[InternalCache] REDIS {operation} {key} - OK") + +def setup_logging(LOG_LEVEL: str, JSON_LOGS: bool) -> None: + + """Setup logging for uvicorn and FastAPI.""" + + # intercept everything at the root logger + logging.root.handlers = [InterceptHandler()] + logging.root.setLevel(LOG_LEVEL) + + # remove every other logger's handlers + # and propagate to root logger + for name in logging.root.manager.loggerDict.keys(): + logging.getLogger(name).handlers = [] + logging.getLogger(name).propagate = True + + # configure loguru + logger.configure(handlers=[{"sink": sys.stdout, "serialize": JSON_LOGS}]) \ No newline at end of file diff --git a/modules/utils/__init__.py b/modules/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mypy.ini b/mypy.ini index 0e98c2a..7529c41 100644 --- a/mypy.ini +++ b/mypy.ini @@ -52,3 +52,7 @@ ignore_missing_imports = True [mypy-httpx_cache.*] # No stubs available ignore_missing_imports = True + +[mypy-redis.*] +# No stubs available +ignore_missing_imports = True \ No newline at end of file