basic logging

This commit is contained in:
Alexandre Teles 2022-09-08 00:32:31 -03:00
parent dda0f2d9d6
commit bdbf99a4f7
9 changed files with 221 additions and 59 deletions

View File

@ -1,14 +1,25 @@
FROM python:3.10-slim
#FROM python:3.10-slim
FROM ubuntu:22.04
ARG GITHUB_TOKEN
ENV GITHUB_TOKEN $GITHUB_TOKEN
ARG UVICORN_HOST
ENV UVICORN_HOST $UVICORN_HOST
ARG UVICORN_PORT
ENV UVICORN_PORT $UVICORN_PORT
ARG UVICORN_LOG_LEVEL
ENV UVICORN_LOG_LEVEL $UVICORN_LOG_LEVEL
WORKDIR /usr/src/app
COPY . .
RUN apt update && \
apt-get install build-essential libffi-dev -y && \
apt-get install build-essential libffi-dev \
python3 python3-dev python3-pip -y && \
pip install --no-cache-dir -r requirements.txt
CMD [ "python3", "./main.py" ]
CMD [ "/bin/bash", "./run.sh" ]

View File

@ -58,3 +58,6 @@ repositories = ["TeamVanced/VancedMicroG", "revanced/revanced-cli", "revanced/re
[logging]
level = "INFO"
json_logs = false
redis_database = 2

View File

@ -19,6 +19,8 @@ from fastapi_cache.backends.redis import RedisBackend
from modules.Releases import Releases
import modules.models.ResponseModels as ResponseModels
import modules.utils.Logger as Logger
"""Get latest ReVanced releases from GitHub API."""
# Load config
@ -122,6 +124,10 @@ async def startup() -> None:
return None
# setup right before running to make sure no other library overwrites it
Logger.setup_logging(LOG_LEVEL=config["logging"]["level"], JSON_LOGS=config["logging"]["json_logs"])
# Run app
if __name__ == '__main__':
uvicorn.run(app, host=config['uvicorn']['host'], port=config['uvicorn']['port'])

View File

@ -1,38 +0,0 @@
import os
import toml
import msgpack
import aioredis
# Load config
config: dict = toml.load("config.toml")
# Redis connection parameters
redis_config: dict[ str, str | int ] = {
"url": f"redis://{os.environ['REDIS_URL']}",
"port": os.environ['REDIS_PORT'],
"database": config['internal-cache']['database'],
}
class InternalCache:
"""Implements an internal cache for ReVanced Releases API."""
redis_url = f"{redis_config['url']}:{redis_config['port']}/{redis_config['database']}"
redis = aioredis.from_url(redis_url, encoding="utf-8", decode_responses=True)
async def store(self, key: str, value: dict) -> None:
await self.redis.set(key, msgpack.packb(value), ex=config['internal-cache']['expire'])
async def delete(self, key: str) -> None:
await self.redis.delete(key)
async def update(self, key: str, value: dict) -> None:
await self.redis.set(key, msgpack.packb(value), ex=config['internal-cache']['expire'])
async def get(self, key: str) -> dict:
return msgpack.unpackb(await self.redis.get(key))

View File

@ -2,7 +2,9 @@ import os
import orjson
import httpx_cache
from base64 import b64decode
from modules.InternalCache import InternalCache
from modules.utils.InternalCache import InternalCache
import modules.utils.Logger as Logger
class Releases:
@ -12,7 +14,17 @@ class Releases:
'Authorization': "token " + os.environ['GITHUB_TOKEN']
}
httpx_client = httpx_cache.AsyncClient(headers=headers, http2=True)
httpx_logger = Logger.HTTPXLogger()
httpx_client = httpx_cache.AsyncClient(
headers=headers,
http2=True,
event_hooks={
'request': [httpx_logger.log_request],
'response': [httpx_logger.log_response]
}
)
InternalCache = InternalCache()
async def _get_release(self, repository: str) -> list:
@ -50,13 +62,14 @@ class Releases:
dict: A dictionary containing assets from each repository
"""
releases: dict[str, list] = {}
releases['tools'] = []
releases: dict[str, list]
if await self.InternalCache.exists('releases'):
releases = await self.InternalCache.get('releases')
else:
releases = {}
releases['tools'] = []
try:
cached_releases = await self.InternalCache.get("releases")
return cached_releases
except:
for repository in repositories:
files = await self._get_release(repository)
if files:
@ -84,10 +97,9 @@ class Releases:
Returns:
dict: Patches available for a given app
"""
try:
cached_patches = await self.InternalCache.get("patches")
return cached_patches
except:
if await self.InternalCache.exists('patches'):
patches = await self.InternalCache.get('patches')
else:
patches = await self._get_patches_json()
await self.InternalCache.store('patches', patches)
@ -116,13 +128,13 @@ class Releases:
dict: A dictionary containing the contributors from each repository
"""
contributors: dict[str, list] = {}
contributors['repositories'] = []
contributors: dict[str, list]
try:
cached_contributors = await self.InternalCache.get("contributors")
return cached_contributors
except:
if await self.InternalCache.exists('contributors'):
contributors = await self.InternalCache.get('contributors')
else:
contributors = {}
contributors['repositories'] = []
for repository in repositories:
if 'revanced' in repository:
repo_contributors = await self._get_contributors(repository)

View File

@ -0,0 +1,72 @@
import os
import toml
import orjson
import msgpack
import aioredis
import modules.utils.Logger as Logger
# Load config
config: dict = toml.load("config.toml")
# Redis connection parameters
redis_config: dict[ str, str | int ] = {
"url": f"redis://{os.environ['REDIS_URL']}",
"port": os.environ['REDIS_PORT'],
"database": config['internal-cache']['database'],
}
class InternalCache:
"""Implements an internal cache for ReVanced Releases API."""
redis_url = f"{redis_config['url']}:{redis_config['port']}/{redis_config['database']}"
redis = aioredis.from_url(redis_url, encoding="utf-8", decode_responses=True)
InternalCacheLogger = Logger.InternalCacheLogger()
async def store(self, key: str, value: dict) -> None:
try:
await self.redis.set(key, orjson.dumps(value), ex=config['internal-cache']['expire'])
await self.InternalCacheLogger.log("SET", None, key)
except aioredis.RedisError as e:
await self.InternalCacheLogger.log("SET", e)
async def delete(self, key: str) -> None:
try:
await self.redis.delete(key)
await self.InternalCacheLogger.log("DEL", None, key)
except aioredis.RedisError as e:
await self.InternalCacheLogger.log("DEL", e)
async def update(self, key: str, value: dict) -> None:
try:
await self.redis.set(key, orjson.dumps(value), ex=config['internal-cache']['expire'])
await self.InternalCacheLogger.log("SET", None, key)
except aioredis.RedisError as e:
await self.InternalCacheLogger.log("SET", e)
async def get(self, key: str) -> dict:
try:
payload = orjson.loads(await self.redis.get(key))
await self.InternalCacheLogger.log("GET", None, key)
return payload
except aioredis.RedisError as e:
await self.InternalCacheLogger.log("GET", e)
return {}
async def exists(self, key: str) -> bool:
try:
if await self.redis.exists(key):
await self.InternalCacheLogger.log("EXISTS", None, key)
return True
else:
await self.InternalCacheLogger.log("EXISTS", None, key)
return False
except aioredis.RedisError as e:
await self.InternalCacheLogger.log("EXISTS", e)
return False

92
modules/utils/Logger.py Normal file
View File

@ -0,0 +1,92 @@
import sys
import logging
from loguru import logger
from typing import Optional
from types import FrameType
from redis import RedisError
class InterceptHandler(logging.Handler):
"""Setups a loging handler for uvicorn and FastAPI.
Args:
logging (logging.Handler)
"""
def emit(self, record: logging.LogRecord) -> None:
"""Emit a log record.
Args:
record (LogRecord): Logging record
"""
level: str | int
frame: Optional[FrameType]
depth: int
# Get corresponding Loguru level if it exists
try:
level = logger.level(record.levelname).name
except ValueError:
level = record.levelno
frame = logging.currentframe()
depth = 2
# Find caller from where originated the logged message
while frame.f_code.co_filename == logging.__file__:
frame = frame.f_back
depth += 1
logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())
class HTTPXLogger():
"""Logger adapter for HTTPX."""
async def log_request(self, request) -> None:
"""Logs HTTPX requests
Returns:
None
"""
logger.info(f"[HTTPX] Request: {request.method} {request.url} - Waiting for response")
async def log_response(self, response) -> None:
"""Logs HTTPX responses
Returns:
None
"""
request = response.request
logger.info(f"[HTTPX] Response: {request.method} {request.url} - Status: {response.status_code} {response.reason_phrase}")
class InternalCacheLogger:
async def log(self, operation: str, result: RedisError | None = None, key: str = "",) -> None:
"""Logs internal cache operations
Args:
operation (str): Operation name
key (str): Key used in the operation
"""
if type(result) is RedisError:
logger.error(f"[InternalCache] REDIS {operation} - Failed with error: {result}")
else:
logger.info(f"[InternalCache] REDIS {operation} {key} - OK")
def setup_logging(LOG_LEVEL: str, JSON_LOGS: bool) -> None:
"""Setup logging for uvicorn and FastAPI."""
# intercept everything at the root logger
logging.root.handlers = [InterceptHandler()]
logging.root.setLevel(LOG_LEVEL)
# remove every other logger's handlers
# and propagate to root logger
for name in logging.root.manager.loggerDict.keys():
logging.getLogger(name).handlers = []
logging.getLogger(name).propagate = True
# configure loguru
logger.configure(handlers=[{"sink": sys.stdout, "serialize": JSON_LOGS}])

View File

View File

@ -52,3 +52,7 @@ ignore_missing_imports = True
[mypy-httpx_cache.*]
# No stubs available
ignore_missing_imports = True
[mypy-redis.*]
# No stubs available
ignore_missing_imports = True