Merge branch 'logging'

This commit is contained in:
Alexandre Teles 2022-09-09 17:40:44 -03:00
commit ecd4432400
15 changed files with 288 additions and 67 deletions

View File

@ -1,14 +1,25 @@
FROM python:3.10-slim #FROM python:3.10-slim
FROM ubuntu:22.04
ARG GITHUB_TOKEN ARG GITHUB_TOKEN
ENV GITHUB_TOKEN $GITHUB_TOKEN ENV GITHUB_TOKEN $GITHUB_TOKEN
ARG UVICORN_HOST
ENV UVICORN_HOST $UVICORN_HOST
ARG UVICORN_PORT
ENV UVICORN_PORT $UVICORN_PORT
ARG UVICORN_LOG_LEVEL
ENV UVICORN_LOG_LEVEL $UVICORN_LOG_LEVEL
WORKDIR /usr/src/app WORKDIR /usr/src/app
COPY . . COPY . .
RUN apt update && \ RUN apt update && \
apt-get install build-essential libffi-dev -y && \ apt-get install build-essential libffi-dev \
python3 python3-dev python3-pip -y && \
pip install --no-cache-dir -r requirements.txt pip install --no-cache-dir -r requirements.txt
CMD [ "python3", "./main.py" ] CMD [ "/bin/bash", "./run.sh" ]

View File

@ -54,4 +54,10 @@ database = 1
[app] [app]
repositories = ["TeamVanced/VancedMicroG", "revanced/revanced-cli", "revanced/revanced-patches", "revanced/revanced-integrations", "revanced/revanced-manager", "revanced/revanced-patcher"] repositories = ["TeamVanced/VancedMicroG", "revanced/revanced-cli", "revanced/revanced-patches", "revanced/revanced-integrations", "revanced/revanced-manager"]
[logging]
level = "INFO"
json_logs = false
redis_database = 2

View File

@ -16,9 +16,12 @@ services:
container_name: revanced-releases-api container_name: revanced-releases-api
image: alexandreteles/revanced-releases-api:latest image: alexandreteles/revanced-releases-api:latest
environment: environment:
- GITHUB_TOKEN=YOUR_GITHUB_TOKEN
- REDIS_URL=revanced-releases-api-redis - REDIS_URL=revanced-releases-api-redis
- REDIS_PORT=6379 - REDIS_PORT=6379
- GITHUB_TOKEN=YOUR_GITHUB_TOKEN - UVICORN_HOST=0.0.0.0
- UVICORN_PORT=8000
- UVICORN_LOG_LEVEL=debug
ports: ports:
- 127.0.0.1:7934:8000 - 127.0.0.1:7934:8000
networks: networks:

View File

@ -19,6 +19,8 @@ from fastapi_cache.backends.redis import RedisBackend
from modules.Releases import Releases from modules.Releases import Releases
import modules.models.ResponseModels as ResponseModels import modules.models.ResponseModels as ResponseModels
import modules.utils.Logger as Logger
"""Get latest ReVanced releases from GitHub API.""" """Get latest ReVanced releases from GitHub API."""
# Load config # Load config
@ -122,6 +124,10 @@ async def startup() -> None:
return None return None
# setup right before running to make sure no other library overwrites it
Logger.setup_logging(LOG_LEVEL=config["logging"]["level"], JSON_LOGS=config["logging"]["json_logs"])
# Run app # Run app
if __name__ == '__main__': if __name__ == '__main__':
uvicorn.run(app, host=config['uvicorn']['host'], port=config['uvicorn']['port']) uvicorn.run(app, host=config['uvicorn']['host'], port=config['uvicorn']['port'])

View File

@ -1,38 +0,0 @@
import os
import toml
import msgpack
import aioredis
# Load config
config: dict = toml.load("config.toml")
# Redis connection parameters
redis_config: dict[ str, str | int ] = {
"url": f"redis://{os.environ['REDIS_URL']}",
"port": os.environ['REDIS_PORT'],
"database": config['internal-cache']['database'],
}
class InternalCache:
"""Implements an internal cache for ReVanced Releases API."""
redis_url = f"{redis_config['url']}:{redis_config['port']}/{redis_config['database']}"
redis = aioredis.from_url(redis_url, encoding="utf-8", decode_responses=True)
async def store(self, key: str, value: dict) -> None:
await self.redis.set(key, msgpack.packb(value), ex=config['internal-cache']['expire'])
async def delete(self, key: str) -> None:
await self.redis.delete(key)
async def update(self, key: str, value: dict) -> None:
await self.redis.set(key, msgpack.packb(value), ex=config['internal-cache']['expire'])
async def get(self, key: str) -> dict:
return msgpack.unpackb(await self.redis.get(key))

View File

@ -2,7 +2,9 @@ import os
import orjson import orjson
import httpx_cache import httpx_cache
from base64 import b64decode from base64 import b64decode
from modules.InternalCache import InternalCache from modules.utils.InternalCache import InternalCache
import modules.utils.Logger as Logger
class Releases: class Releases:
@ -12,7 +14,17 @@ class Releases:
'Authorization': "token " + os.environ['GITHUB_TOKEN'] 'Authorization': "token " + os.environ['GITHUB_TOKEN']
} }
httpx_client = httpx_cache.AsyncClient(headers=headers, http2=True) httpx_logger = Logger.HTTPXLogger()
httpx_client = httpx_cache.AsyncClient(
headers=headers,
http2=True,
event_hooks={
'request': [httpx_logger.log_request],
'response': [httpx_logger.log_response]
}
)
InternalCache = InternalCache() InternalCache = InternalCache()
async def _get_release(self, repository: str) -> list: async def _get_release(self, repository: str) -> list:
@ -50,13 +62,14 @@ class Releases:
dict: A dictionary containing assets from each repository dict: A dictionary containing assets from each repository
""" """
releases: dict[str, list] = {} releases: dict[str, list]
releases['tools'] = []
if await self.InternalCache.exists('releases'):
releases = await self.InternalCache.get('releases')
else:
releases = {}
releases['tools'] = []
try:
cached_releases = await self.InternalCache.get("releases")
return cached_releases
except:
for repository in repositories: for repository in repositories:
files = await self._get_release(repository) files = await self._get_release(repository)
if files: if files:
@ -84,10 +97,9 @@ class Releases:
Returns: Returns:
dict: Patches available for a given app dict: Patches available for a given app
""" """
try: if await self.InternalCache.exists('patches'):
cached_patches = await self.InternalCache.get("patches") patches = await self.InternalCache.get('patches')
return cached_patches else:
except:
patches = await self._get_patches_json() patches = await self._get_patches_json()
await self.InternalCache.store('patches', patches) await self.InternalCache.store('patches', patches)
@ -116,13 +128,13 @@ class Releases:
dict: A dictionary containing the contributors from each repository dict: A dictionary containing the contributors from each repository
""" """
contributors: dict[str, list] = {} contributors: dict[str, list]
contributors['repositories'] = []
try: if await self.InternalCache.exists('contributors'):
cached_contributors = await self.InternalCache.get("contributors") contributors = await self.InternalCache.get('contributors')
return cached_contributors else:
except: contributors = {}
contributors['repositories'] = []
for repository in repositories: for repository in repositories:
if 'revanced' in repository: if 'revanced' in repository:
repo_contributors = await self._get_contributors(repository) repo_contributors = await self._get_contributors(repository)

View File

@ -0,0 +1,72 @@
import os
import toml
import orjson
import msgpack
import aioredis
import modules.utils.Logger as Logger
# Load config
config: dict = toml.load("config.toml")
# Redis connection parameters
redis_config: dict[ str, str | int ] = {
"url": f"redis://{os.environ['REDIS_URL']}",
"port": os.environ['REDIS_PORT'],
"database": config['internal-cache']['database'],
}
class InternalCache:
"""Implements an internal cache for ReVanced Releases API."""
redis_url = f"{redis_config['url']}:{redis_config['port']}/{redis_config['database']}"
redis = aioredis.from_url(redis_url, encoding="utf-8", decode_responses=True)
InternalCacheLogger = Logger.InternalCacheLogger()
async def store(self, key: str, value: dict) -> None:
try:
await self.redis.set(key, orjson.dumps(value), ex=config['internal-cache']['expire'])
await self.InternalCacheLogger.log("SET", None, key)
except aioredis.RedisError as e:
await self.InternalCacheLogger.log("SET", e)
async def delete(self, key: str) -> None:
try:
await self.redis.delete(key)
await self.InternalCacheLogger.log("DEL", None, key)
except aioredis.RedisError as e:
await self.InternalCacheLogger.log("DEL", e)
async def update(self, key: str, value: dict) -> None:
try:
await self.redis.set(key, orjson.dumps(value), ex=config['internal-cache']['expire'])
await self.InternalCacheLogger.log("SET", None, key)
except aioredis.RedisError as e:
await self.InternalCacheLogger.log("SET", e)
async def get(self, key: str) -> dict:
try:
payload = orjson.loads(await self.redis.get(key))
await self.InternalCacheLogger.log("GET", None, key)
return payload
except aioredis.RedisError as e:
await self.InternalCacheLogger.log("GET", e)
return {}
async def exists(self, key: str) -> bool:
try:
if await self.redis.exists(key):
await self.InternalCacheLogger.log("EXISTS", None, key)
return True
else:
await self.InternalCacheLogger.log("EXISTS", None, key)
return False
except aioredis.RedisError as e:
await self.InternalCacheLogger.log("EXISTS", e)
return False

92
modules/utils/Logger.py Normal file
View File

@ -0,0 +1,92 @@
import sys
import logging
from loguru import logger
from typing import Optional
from types import FrameType
from redis import RedisError
class InterceptHandler(logging.Handler):
"""Setups a loging handler for uvicorn and FastAPI.
Args:
logging (logging.Handler)
"""
def emit(self, record: logging.LogRecord) -> None:
"""Emit a log record.
Args:
record (LogRecord): Logging record
"""
level: str | int
frame: Optional[FrameType]
depth: int
# Get corresponding Loguru level if it exists
try:
level = logger.level(record.levelname).name
except ValueError:
level = record.levelno
frame = logging.currentframe()
depth = 2
# Find caller from where originated the logged message
while frame.f_code.co_filename == logging.__file__:
frame = frame.f_back
depth += 1
logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())
class HTTPXLogger():
"""Logger adapter for HTTPX."""
async def log_request(self, request) -> None:
"""Logs HTTPX requests
Returns:
None
"""
logger.info(f"[HTTPX] Request: {request.method} {request.url} - Waiting for response")
async def log_response(self, response) -> None:
"""Logs HTTPX responses
Returns:
None
"""
request = response.request
logger.info(f"[HTTPX] Response: {request.method} {request.url} - Status: {response.status_code} {response.reason_phrase}")
class InternalCacheLogger:
async def log(self, operation: str, result: RedisError | None = None, key: str = "",) -> None:
"""Logs internal cache operations
Args:
operation (str): Operation name
key (str): Key used in the operation
"""
if type(result) is RedisError:
logger.error(f"[InternalCache] REDIS {operation} - Failed with error: {result}")
else:
logger.info(f"[InternalCache] REDIS {operation} {key} - OK")
def setup_logging(LOG_LEVEL: str, JSON_LOGS: bool) -> None:
"""Setup logging for uvicorn and FastAPI."""
# intercept everything at the root logger
logging.root.handlers = [InterceptHandler()]
logging.root.setLevel(LOG_LEVEL)
# remove every other logger's handlers
# and propagate to root logger
for name in logging.root.manager.loggerDict.keys():
logging.getLogger(name).handlers = []
logging.getLogger(name).propagate = True
# configure loguru
logger.configure(handlers=[{"sink": sys.stdout, "serialize": JSON_LOGS}])

View File

View File

@ -52,3 +52,7 @@ ignore_missing_imports = True
[mypy-httpx_cache.*] [mypy-httpx_cache.*]
# No stubs available # No stubs available
ignore_missing_imports = True ignore_missing_imports = True
[mypy-redis.*]
# No stubs available
ignore_missing_imports = True

42
poetry.lock generated
View File

@ -63,7 +63,7 @@ tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy"
[[package]] [[package]]
name = "certifi" name = "certifi"
version = "2022.6.15" version = "2022.6.15.1"
description = "Python package for providing Mozilla's CA Bundle." description = "Python package for providing Mozilla's CA Bundle."
category = "main" category = "main"
optional = false optional = false
@ -276,6 +276,21 @@ python-versions = "*"
[package.dependencies] [package.dependencies]
six = ">=1.4.1" six = ">=1.4.1"
[[package]]
name = "loguru"
version = "0.6.0"
description = "Python logging made (stupidly) simple"
category = "main"
optional = false
python-versions = ">=3.5"
[package.dependencies]
colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""}
win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""}
[package.extras]
dev = ["Sphinx (>=4.1.1)", "black (>=19.10b0)", "colorama (>=0.3.4)", "docutils (==0.16)", "flake8 (>=3.7.7)", "isort (>=5.1.1)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "tox (>=3.9.0)"]
[[package]] [[package]]
name = "msgpack" name = "msgpack"
version = "1.0.4" version = "1.0.4"
@ -563,6 +578,17 @@ category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
[[package]]
name = "win32-setctime"
version = "1.1.0"
description = "A small Python utility to set file creation time on Windows"
category = "main"
optional = false
python-versions = ">=3.5"
[package.extras]
dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
[[package]] [[package]]
name = "wrapt" name = "wrapt"
version = "1.14.1" version = "1.14.1"
@ -574,7 +600,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.10" python-versions = "^3.10"
content-hash = "b6bc93886458708baeea4c07ad92f0613d6aeac113875d9ff2e9a3afb71d000d" content-hash = "a2d02dd2c5903e3654434982ce86d728c2b9c63d6c6520a4bd295c362ac88bb0"
[metadata.files] [metadata.files]
aioredis = [ aioredis = [
@ -598,8 +624,8 @@ attrs = [
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
] ]
certifi = [ certifi = [
{file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, {file = "certifi-2022.6.15.1-py3-none-any.whl", hash = "sha256:43dadad18a7f168740e66944e4fa82c6611848ff9056ad910f8f7a3e46ab89e0"},
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, {file = "certifi-2022.6.15.1.tar.gz", hash = "sha256:cffdcd380919da6137f76633531a5817e3a9f268575c128249fb637e4f9e73fb"},
] ]
click = [ click = [
{file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
@ -740,6 +766,10 @@ limits = [
{file = "limits-1.6-py3-none-any.whl", hash = "sha256:12ae4449cf7daadee43edf4096acd9cb9f4bfdec3a995aa9fbd0f72b0b9af762"}, {file = "limits-1.6-py3-none-any.whl", hash = "sha256:12ae4449cf7daadee43edf4096acd9cb9f4bfdec3a995aa9fbd0f72b0b9af762"},
{file = "limits-1.6.tar.gz", hash = "sha256:6c0a57b42647f1141f5a7a0a8479b49e4367c24937a01bd9d4063a595c2dd48a"}, {file = "limits-1.6.tar.gz", hash = "sha256:6c0a57b42647f1141f5a7a0a8479b49e4367c24937a01bd9d4063a595c2dd48a"},
] ]
loguru = [
{file = "loguru-0.6.0-py3-none-any.whl", hash = "sha256:4e2414d534a2ab57573365b3e6d0234dfb1d84b68b7f3b948e6fb743860a77c3"},
{file = "loguru-0.6.0.tar.gz", hash = "sha256:066bd06758d0a513e9836fd9c6b5a75bfb3fd36841f4b996bc60b547a309d41c"},
]
msgpack = [ msgpack = [
{file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250"}, {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250"},
{file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88"}, {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88"},
@ -1115,6 +1145,10 @@ websockets = [
{file = "websockets-10.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3eda1cb7e9da1b22588cefff09f0951771d6ee9fa8dbe66f5ae04cc5f26b2b55"}, {file = "websockets-10.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3eda1cb7e9da1b22588cefff09f0951771d6ee9fa8dbe66f5ae04cc5f26b2b55"},
{file = "websockets-10.3.tar.gz", hash = "sha256:fc06cc8073c8e87072138ba1e431300e2d408f054b27047d047b549455066ff4"}, {file = "websockets-10.3.tar.gz", hash = "sha256:fc06cc8073c8e87072138ba1e431300e2d408f054b27047d047b549455066ff4"},
] ]
win32-setctime = [
{file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
{file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
]
wrapt = [ wrapt = [
{file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"},
{file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"},

View File

@ -18,9 +18,12 @@ services:
container_name: revanced-releases-api container_name: revanced-releases-api
image: alexandreteles/revanced-releases-api:latest image: alexandreteles/revanced-releases-api:latest
environment: environment:
- GITHUB_TOKEN=YOUR_GITHUB_TOKEN
- REDIS_URL=revanced-releases-api-redis - REDIS_URL=revanced-releases-api-redis
- REDIS_PORT=6379 - REDIS_PORT=6379
- GITHUB_TOKEN=YOUR_GITHUB_TOKEN - UVICORN_HOST=0.0.0.0
- UVICORN_PORT=8000
- UVICORN_LOG_LEVEL=debug
ports: ports:
- 127.0.0.1:7934:8000 - 127.0.0.1:7934:8000
networks: networks:

View File

@ -18,6 +18,7 @@ fastapi-cache2 = ">=0.1.9"
aioredis = {version = ">=2.0.1", extras = ["hiredis"]} aioredis = {version = ">=2.0.1", extras = ["hiredis"]}
redis = ">=4.3.4" redis = ">=4.3.4"
msgpack = ">=1.0.4" msgpack = ">=1.0.4"
loguru = ">=0.6.0"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
fastapi = ">=0.82.0" fastapi = ">=0.82.0"
@ -33,6 +34,7 @@ redis = ">=4.3.4"
msgpack = ">=1.0.4" msgpack = ">=1.0.4"
mypy = ">=0.971" mypy = ">=0.971"
types-toml = ">=0.10.8" types-toml = ">=0.10.8"
loguru = ">=0.6.0"
[build-system] [build-system]
requires = ["poetry-core>=1.0.0"] requires = ["poetry-core>=1.0.0"]

View File

@ -3,7 +3,7 @@ aiorwlock==1.3.0; python_version >= "3.7" and python_version < "4.0" and python_
anyio==3.6.1; python_version >= "3.7" and python_version < "4.0" and python_full_version >= "3.6.2" anyio==3.6.1; python_version >= "3.7" and python_version < "4.0" and python_full_version >= "3.6.2"
async-timeout==4.0.2; python_version >= "3.6" async-timeout==4.0.2; python_version >= "3.6"
attrs==21.4.0; python_version >= "3.7" and python_full_version < "3.0.0" and python_version < "4.0" or python_version >= "3.7" and python_version < "4.0" and python_full_version >= "3.5.0" attrs==21.4.0; python_version >= "3.7" and python_full_version < "3.0.0" and python_version < "4.0" or python_version >= "3.7" and python_version < "4.0" and python_full_version >= "3.5.0"
certifi==2022.6.15; python_version >= "3.7" and python_version < "4.0" certifi==2022.6.15.1; python_version >= "3.7" and python_version < "4.0"
click==8.1.3; python_version >= "3.7" and python_version < "4.0" click==8.1.3; python_version >= "3.7" and python_version < "4.0"
colorama==0.4.5; python_version >= "3.7" and python_full_version < "3.0.0" and sys_platform == "win32" and python_version < "4.0" and platform_system == "Windows" or sys_platform == "win32" and python_version >= "3.7" and python_full_version >= "3.5.0" and python_version < "4.0" and platform_system == "Windows" colorama==0.4.5; python_version >= "3.7" and python_full_version < "3.0.0" and sys_platform == "win32" and python_version < "4.0" and platform_system == "Windows" or sys_platform == "win32" and python_version >= "3.7" and python_full_version >= "3.5.0" and python_version < "4.0" and platform_system == "Windows"
deprecated==1.2.13; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" deprecated==1.2.13; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6"
@ -21,6 +21,7 @@ httpx==0.23.0; python_version >= "3.7"
hyperframe==6.0.1; python_version >= "3.7" and python_full_version >= "3.6.1" and python_version < "4.0" hyperframe==6.0.1; python_version >= "3.7" and python_full_version >= "3.6.1" and python_version < "4.0"
idna==3.3 idna==3.3
limits==1.6; python_version >= "3.7" and python_version < "4.0" limits==1.6; python_version >= "3.7" and python_version < "4.0"
loguru==0.6.0; python_version >= "3.5"
msgpack==1.0.4 msgpack==1.0.4
orjson==3.8.0; python_version >= "3.7" orjson==3.8.0; python_version >= "3.7"
packaging==21.3; python_version >= "3.6" packaging==21.3; python_version >= "3.6"
@ -43,4 +44,5 @@ uvicorn==0.18.3; python_version >= "3.7"
uvloop==0.16.0; sys_platform != "win32" and sys_platform != "cygwin" and platform_python_implementation != "PyPy" and python_version >= "3.7" and python_version < "4.0" uvloop==0.16.0; sys_platform != "win32" and sys_platform != "cygwin" and platform_python_implementation != "PyPy" and python_version >= "3.7" and python_version < "4.0"
watchfiles==0.16.1; python_version >= "3.7" and python_version < "4.0" watchfiles==0.16.1; python_version >= "3.7" and python_version < "4.0"
websockets==10.3; python_version >= "3.7" and python_version < "4.0" websockets==10.3; python_version >= "3.7" and python_version < "4.0"
win32-setctime==1.1.0; sys_platform == "win32" and python_version >= "3.5"
wrapt==1.14.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" wrapt==1.14.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6"

12
run.sh Executable file
View File

@ -0,0 +1,12 @@
#!/bin/bash
# This script is used to run the application
# It is used by the Dockerfile
# get number of cores
CORES=$(grep -c ^processor /proc/cpuinfo)
# Start the application
uvicorn main:app --host="$UVICORN_HOST" --port="$UVICORN_PORT" \
--workers="$CORES" --log-level="$UVICORN_LOG_LEVEL" --server-header \
--proxy-headers --forwarded-allow-ips="*"