mirror of
https://github.com/revanced/revanced-api.git
synced 2025-04-29 22:24:31 +02:00
feat: API rewrite (#2)
* feat: sanic framework settings * feat: initial implementation * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * refactor: backend changes * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix: docstrings out of place * feat: more gh endpoints * ci: fix pre-commit issues * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * feat: app info * ci: merge CI and fix triggers * chore: bump deps * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix: typing issues * chore: deps * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * refactor: clean up returns * ci: spread jobs correctly * ci: move to quodana * ci: fix issues with python modules * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * chore: pycharm config * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * refactor: improve code quality * feat: better README * ci: add quodana baseline config * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * ci: fix quodana config * ci: more qodana stuff * ci: revert qodana changes * ci: python interpreter detection is broken * feat: tests * ci: testing * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * ci: fix workflow names * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * chore: add deps * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * test: more tests * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * feat: /tools compat * feat: donations endpoint * feat: teams endpoint * fix: lock pydantic version * chore: deps * ci: docker builds * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * ci: remove coverage action and others * ci: pre-commit fixes --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
parent
cb52684edb
commit
45ef33741c
55
.github/workflows/codeql.yml
vendored
55
.github/workflows/codeql.yml
vendored
@ -1,55 +0,0 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ "dev" ]
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
default_branch: dev
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ env.default_branch }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.11.3'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
if [ -f requirements.txt ];
|
||||
then pip install -r requirements.txt;
|
||||
fi
|
||||
echo "CODEQL_PYTHON=$(which python)" >> $GITHUB_ENV
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
setup-python-dependencies: false
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
69
.github/workflows/dev.yml
vendored
Normal file
69
.github/workflows/dev.yml
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
name: Build dev branch
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "dev" ]
|
||||
schedule:
|
||||
- cron: '24 9 * * 6'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
IMAGE_TAG: ${{ github.sha }}
|
||||
|
||||
jobs:
|
||||
security_checks:
|
||||
runs-on: ubuntu-latest
|
||||
name: Security check
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Security Checks (PyCharm Security)
|
||||
uses: tonybaloney/pycharm-security@master
|
||||
with:
|
||||
path: .
|
||||
|
||||
build:
|
||||
needs: security_checks
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout Dockerfile
|
||||
id: checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
|
||||
- name: Setup QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: Setup Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
flavor: |
|
||||
latest=${{ startsWith(github.ref, 'refs/heads/main') }}
|
||||
suffix=-${{ github.sha }}
|
||||
|
||||
- name: Build Docker image
|
||||
id: build
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64/v8
|
||||
push: false
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
67
.github/workflows/main.yml
vendored
Normal file
67
.github/workflows/main.yml
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
name: Build and Publish Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: "24 9 * * 6"
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
IMAGE_TAG: ${{ github.sha }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout Dockerfile
|
||||
id: checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: Setup Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
id: ghcr
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
flavor: |
|
||||
latest=${{ startsWith(github.ref, 'refs/heads/main') }}
|
||||
suffix=-${{ github.sha }}
|
||||
|
||||
- name: Build and push main Docker image
|
||||
id: build
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
build-args: GH_TOKEN=${{ secrets.GH_TOKEN }}
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64/v8
|
||||
cache-to: type=gha,mode=max,ignore-error=true
|
||||
cache-from: type=gha
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
37
.github/workflows/mypy.yml
vendored
37
.github/workflows/mypy.yml
vendored
@ -1,26 +1,43 @@
|
||||
name: MyPy
|
||||
name: "MyPy | Static Type Checking"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [dev, main]
|
||||
pull_request:
|
||||
branches: [ "dev" ]
|
||||
types: [opened, reopened, edited, synchronize]
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
default_branch: dev
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
name: mypy
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.11.3
|
||||
architecture: x64
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Install mypy
|
||||
run: pip install mypy
|
||||
python-version: "3.11.4"
|
||||
|
||||
- name: Install project dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
if [ -f requirements.txt ];
|
||||
then pip install -r requirements.txt;
|
||||
fi
|
||||
|
||||
- name: Run mypy
|
||||
uses: sasanquaneuf/mypy-github-action@main
|
||||
with:
|
||||
checkName: 'mypy'
|
||||
checkName: "mypy"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
|
43
.github/workflows/pytest.yml
vendored
Normal file
43
.github/workflows/pytest.yml
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
name: "PyTest | Testing and Code Coverage"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [dev, main]
|
||||
pull_request:
|
||||
types: [opened, reopened, edited, synchronize]
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
default_branch: dev
|
||||
|
||||
jobs:
|
||||
pytest:
|
||||
name: pytest
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11.4"
|
||||
|
||||
- name: Install project dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
if [ -f requirements.txt ];
|
||||
then pip install -r requirements.txt;
|
||||
fi
|
||||
|
||||
- name: Run pytest
|
||||
uses: pavelzw/pytest-action@v2
|
||||
with:
|
||||
custom-arguments: "--cov --cov-report=xml"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}
|
48
.github/workflows/quodana.yml
vendored
Normal file
48
.github/workflows/quodana.yml
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
name: "Qodana | Code Quality Scan"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [dev, main]
|
||||
pull_request:
|
||||
types: [opened, reopened, edited, synchronize]
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
default_branch: dev
|
||||
|
||||
jobs:
|
||||
qodana:
|
||||
timeout-minutes: 15
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11.4"
|
||||
|
||||
- name: Install project dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
if [ -f requirements.txt ];
|
||||
then pip install -r requirements.txt;
|
||||
fi
|
||||
|
||||
- name: Install testing tools
|
||||
run: pip install mypy pydantic
|
||||
|
||||
- name: "Qodana Scan"
|
||||
uses: JetBrains/qodana-action@v2023.1.0
|
||||
env:
|
||||
QODANA_TOKEN: ${{ secrets.QODANA_TOKEN }}
|
||||
with:
|
||||
args: --baseline,qodana.sarif.json
|
||||
|
||||
- name: "Upload Qodana Report"
|
||||
uses: github/codeql-action/upload-sarif@v2
|
||||
with:
|
||||
sarif_file: ${{ runner.temp }}/qodana/results/qodana.sarif.json
|
@ -1,28 +1,30 @@
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
- id: check-docstring-first
|
||||
- id: debug-statements
|
||||
- id: name-tests-test
|
||||
- id: requirements-txt-fixer
|
||||
- id: check-toml
|
||||
- id: check-merge-conflict
|
||||
- repo: https://github.com/psf/black
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
- id: check-docstring-first
|
||||
- id: debug-statements
|
||||
- id: requirements-txt-fixer
|
||||
- id: check-toml
|
||||
- id: check-merge-conflict
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3.11
|
||||
- repo: https://github.com/iamthefij/docker-pre-commit
|
||||
- repo: https://github.com/iamthefij/docker-pre-commit
|
||||
rev: v3.0.1
|
||||
hooks:
|
||||
- id: docker-compose-check
|
||||
- repo: https://github.com/pryorda/dockerfilelint-precommit-hooks
|
||||
- repo: https://github.com/pryorda/dockerfilelint-precommit-hooks
|
||||
rev: v0.1.0
|
||||
hooks:
|
||||
- id: dockerfilelint
|
||||
stages: [commit]
|
||||
- id: dockerfilelint
|
||||
stages: [commit]
|
||||
|
||||
ci:
|
||||
autoupdate_branch: "dev"
|
||||
|
3
.vscode/settings.json
vendored
Normal file
3
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"python.analysis.typeCheckingMode": "off"
|
||||
}
|
14
Dockerfile
Normal file
14
Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
ARG GITHUB_TOKEN
|
||||
ENV GITHUB_TOKEN $GITHUB_TOKEN
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN apt update && \
|
||||
apt-get install build-essential libffi-dev --no-install-recommends -y \
|
||||
&& pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
CMD [ "python3", "-m" , "sanic", "app:app", "--fast", "--access-logs", "--motd", "--noisy-exceptions", "-H", "0.0.0.0"]
|
43
README.md
43
README.md
@ -1,2 +1,41 @@
|
||||
# revanced-api
|
||||
The rewrite, started
|
||||
# ReVanced Releases API
|
||||
|
||||
---
|
||||
|
||||

|
||||
[](https://github.com/revanced/revanced-api/actions/workflows/quodana.yml)
|
||||
[](https://github.com/revanced/revanced-api/actions/workflows/mypy.yml)
|
||||
|
||||
---
|
||||
|
||||
This is a simple API that proxies requests needed to feed the ReVanced Manager and website with data.
|
||||
|
||||
## Usage
|
||||
|
||||
To run this API, you need Python 3.11.x. You can install the dependencies with poetry:
|
||||
|
||||
```shell
|
||||
poetry install
|
||||
```
|
||||
|
||||
Create an environment variable called `GITHUB_TOKEN` with a valid GitHub token with read access to public repositories.
|
||||
|
||||
Then, you can run the API in development mode with:
|
||||
|
||||
```shell
|
||||
poetry run sanic app:app --dev
|
||||
```
|
||||
|
||||
or in production mode with:
|
||||
|
||||
```shell
|
||||
poetry run sanic app:app --fast
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
If you want to contribute to this project, feel free to open a pull request or an issue. We don't do much here, so it's pretty easy to contribute.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the AGPLv3 License - see the [LICENSE](LICENSE) file for details.
|
||||
|
11
api/__init__.py
Normal file
11
api/__init__.py
Normal file
@ -0,0 +1,11 @@
|
||||
# api/__init__.py
|
||||
from sanic import Blueprint
|
||||
|
||||
from api.github import github
|
||||
from api.ping import ping
|
||||
from api.socials import socials
|
||||
from api.apkdl import apkdl
|
||||
from api.compat import github as old
|
||||
from api.donations import donations
|
||||
|
||||
api = Blueprint.group(ping, github, socials, donations, apkdl, old, url_prefix="/")
|
32
api/apkdl.py
Normal file
32
api/apkdl.py
Normal file
@ -0,0 +1,32 @@
|
||||
"""
|
||||
This module provides a blueprint for the app endpoint.
|
||||
|
||||
Routes:
|
||||
- GET /app/info: Get app info.
|
||||
"""
|
||||
|
||||
from sanic import Blueprint, Request
|
||||
from sanic.response import JSONResponse, json
|
||||
from sanic_ext import openapi
|
||||
|
||||
from api.backends.apkdl import ApkDl
|
||||
from api.backends.entities import AppInfo
|
||||
from api.models.appinfo import AppInfoModel
|
||||
|
||||
from config import api_version
|
||||
|
||||
apkdl: Blueprint = Blueprint("app", version=api_version)
|
||||
|
||||
apkdl_backend: ApkDl = ApkDl()
|
||||
|
||||
|
||||
@apkdl.get("/app/info/<app_id:str>")
|
||||
@openapi.definition(
|
||||
summary="Get information about an app",
|
||||
response=[AppInfoModel],
|
||||
)
|
||||
async def root(request: Request, app_id: str) -> JSONResponse:
|
||||
data: dict[str, AppInfo] = {
|
||||
"app_info": await apkdl_backend.get_app_info(package_name=app_id)
|
||||
}
|
||||
return json(data, status=200)
|
0
api/backends/__init__.py
Normal file
0
api/backends/__init__.py
Normal file
64
api/backends/apkdl.py
Normal file
64
api/backends/apkdl.py
Normal file
@ -0,0 +1,64 @@
|
||||
from base64 import b64encode
|
||||
|
||||
from aiohttp import ClientResponse
|
||||
from bs4 import BeautifulSoup
|
||||
from sanic import SanicException
|
||||
from toolz.functoolz import compose
|
||||
|
||||
from api.backends.backend import AppInfoProvider
|
||||
from api.backends.entities import AppInfo
|
||||
from api.utils.http_utils import http_get
|
||||
|
||||
name: str = "apkdl"
|
||||
base_url: str = "https://apk-dl.com"
|
||||
|
||||
|
||||
class ApkDl(AppInfoProvider):
|
||||
def __init__(self):
|
||||
super().__init__(name, base_url)
|
||||
|
||||
async def get_app_info(self, package_name: str) -> AppInfo:
|
||||
"""Fetches information about an Android app from the ApkDl website.
|
||||
|
||||
Args:
|
||||
package_name (str): The package name of the app to fetch.
|
||||
|
||||
Returns:
|
||||
AppInfo: An AppInfo object containing the name, category, and logo of the app.
|
||||
|
||||
Raises:
|
||||
SanicException: If the HTTP request fails or the app data is incomplete or not found.
|
||||
"""
|
||||
app_url: str = f"{base_url}/{package_name}"
|
||||
response: ClientResponse = await http_get(headers={}, url=app_url)
|
||||
if response.status != 200:
|
||||
raise SanicException(
|
||||
f"ApkDl: {response.status}", status_code=response.status
|
||||
)
|
||||
page = BeautifulSoup(await response.read(), "lxml")
|
||||
find_div_text = compose(
|
||||
lambda d: d.find_next_sibling("div"),
|
||||
lambda d: page.find("div", text=d),
|
||||
)
|
||||
fetch_logo_url = compose(
|
||||
lambda div: div.img["src"],
|
||||
lambda _: page.find("div", {"class": "logo"}),
|
||||
)
|
||||
logo_response: ClientResponse = await http_get(
|
||||
headers={}, url=fetch_logo_url(None)
|
||||
)
|
||||
logo: str = (
|
||||
f"data:image/png;base64,{b64encode(await logo_response.content.read()).decode('utf-8')}"
|
||||
if logo_response.status == 200
|
||||
else ""
|
||||
)
|
||||
app_data = dict(
|
||||
name=find_div_text("App Name").text,
|
||||
category=find_div_text("Category").text,
|
||||
logo=logo,
|
||||
)
|
||||
if not all(app_data.values()):
|
||||
raise SanicException(
|
||||
"ApkDl: App data incomplete or not found", status_code=500
|
||||
)
|
||||
return AppInfo(**app_data)
|
91
api/backends/backend.py
Normal file
91
api/backends/backend.py
Normal file
@ -0,0 +1,91 @@
|
||||
from abc import abstractmethod
|
||||
from typing import Any, Protocol
|
||||
|
||||
from api.backends.entities import *
|
||||
|
||||
|
||||
class Backend(Protocol):
|
||||
"""Interface for a generic backend.
|
||||
|
||||
Attributes:
|
||||
name (str): Name of the backend.
|
||||
base_url (str): Base URL of the backend.
|
||||
|
||||
Methods:
|
||||
list_releases: Retrieve a list of releases.
|
||||
get_release_by_tag_name: Retrieve a release by its tag name.
|
||||
get_latest_release: Retrieve the latest release.
|
||||
get_latest_pre_release: Retrieve the latest pre-release.
|
||||
get_release_notes: Retrieve the release notes of a specific release.
|
||||
get_contributors: Retrieve the list of contributors.
|
||||
get_patches: Retrieve the patches of a specific release.
|
||||
"""
|
||||
|
||||
name: str
|
||||
base_url: str
|
||||
|
||||
def __init__(self, name: str, base_url: str):
|
||||
self.name = name
|
||||
self.base_url = base_url
|
||||
|
||||
@abstractmethod
|
||||
async def list_releases(self, *args: Any, **kwargs: Any) -> list[Release]:
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
async def get_release_by_tag_name(self, *args: Any, **kwargs: Any) -> Release:
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
async def get_latest_release(self, *args: Any, **kwargs: Any) -> Release:
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
async def get_latest_pre_release(self, *args: Any, **kwargs: Any) -> Release:
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
async def get_contributors(self, *args: Any, **kwargs: Any) -> list[Contributor]:
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
async def get_patches(self, *args: Any, **kwargs: Any) -> list[dict]:
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
async def get_team_members(self, *args: Any, **kwargs: Any) -> list[Contributor]:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class Repository:
|
||||
"""A repository that communicates with a specific backend.
|
||||
|
||||
Attributes:
|
||||
backend (Backend): The backend instance used to communicate with the repository.
|
||||
"""
|
||||
|
||||
def __init__(self, backend: Backend):
|
||||
self.backend = backend
|
||||
|
||||
|
||||
class AppInfoProvider(Protocol):
|
||||
"""Interface for a generic app info provider.
|
||||
|
||||
Attributes:
|
||||
name (str): Name of the app info provider.
|
||||
base_url (str): Base URL of the app info provider.
|
||||
|
||||
Methods:
|
||||
get_app_info: Retrieve information about an app.
|
||||
"""
|
||||
|
||||
name: str
|
||||
base_url: str
|
||||
|
||||
def __init__(self, name: str, base_url: str):
|
||||
self.name = name
|
||||
self.base_url = base_url
|
||||
|
||||
@abstractmethod
|
||||
async def get_app_info(self, *args: Any, **kwargs: Any) -> AppInfo:
|
||||
raise NotImplementedError
|
126
api/backends/entities.py
Normal file
126
api/backends/entities.py
Normal file
@ -0,0 +1,126 @@
|
||||
from typing import Optional
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Metadata(dict):
|
||||
"""
|
||||
Represents the metadata of a release.
|
||||
|
||||
Attributes:
|
||||
- tag_name (str): The name of the release tag.
|
||||
- name (str): The name of the release.
|
||||
- body (str): The body of the release.
|
||||
- draft (bool): Whether the release is a draft.
|
||||
- prerelease (bool): Whether the release is a prerelease.
|
||||
- created_at (str): The creation date of the release.
|
||||
- published_at (str): The publication date of the release.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tag_name: str,
|
||||
name: str,
|
||||
draft: bool,
|
||||
prerelease: bool,
|
||||
created_at: str,
|
||||
published_at: str,
|
||||
body: str,
|
||||
repository: Optional[str] = None,
|
||||
):
|
||||
dict.__init__(
|
||||
self,
|
||||
tag_name=tag_name,
|
||||
name=name,
|
||||
draft=draft,
|
||||
prerelease=prerelease,
|
||||
created_at=created_at,
|
||||
published_at=published_at,
|
||||
body=body,
|
||||
repository=repository,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Asset(dict):
|
||||
"""
|
||||
Represents an asset in a release.
|
||||
|
||||
Attributes:
|
||||
- name (str): The name of the asset.
|
||||
- content_type (str): The MIME type of the asset content.
|
||||
- download_url (str): The URL to download the asset.
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, content_type: str, browser_download_url: str):
|
||||
dict.__init__(
|
||||
self,
|
||||
name=name,
|
||||
content_type=content_type,
|
||||
browser_download_url=browser_download_url,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Release(dict):
|
||||
"""
|
||||
Represents a release.
|
||||
|
||||
Attributes:
|
||||
- metadata (Metadata): The metadata of the release.
|
||||
- assets (list[Asset]): The assets of the release.
|
||||
"""
|
||||
|
||||
def __init__(self, metadata: Metadata, assets: list[Asset]):
|
||||
dict.__init__(self, metadata=metadata, assets=assets)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Contributor(dict):
|
||||
"""
|
||||
Represents a contributor to a repository.
|
||||
|
||||
Attributes:
|
||||
- login (str): The GitHub username of the contributor.
|
||||
- avatar_url (str): The URL to the contributor's avatar image.
|
||||
- html_url (str): The URL to the contributor's GitHub profile.
|
||||
- contributions (Optional[int]): The number of contributions the contributor has made to the repository.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
login: str,
|
||||
avatar_url: str,
|
||||
html_url: str,
|
||||
contributions: Optional[int] = None,
|
||||
):
|
||||
if contributions:
|
||||
dict.__init__(
|
||||
self,
|
||||
login=login,
|
||||
avatar_url=avatar_url,
|
||||
html_url=html_url,
|
||||
contributions=contributions,
|
||||
)
|
||||
else:
|
||||
dict.__init__(self, login=login, avatar_url=avatar_url, html_url=html_url)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AppInfo(dict):
|
||||
"""
|
||||
Represents the information of an app.
|
||||
|
||||
Attributes:
|
||||
- name (str): The name of the app.
|
||||
- category (str): The app category.
|
||||
- logo (str): The base64 enconded app logo.
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, category: str, logo: str):
|
||||
dict.__init__(
|
||||
self,
|
||||
name=name,
|
||||
category=category,
|
||||
logo=logo,
|
||||
)
|
355
api/backends/github.py
Normal file
355
api/backends/github.py
Normal file
@ -0,0 +1,355 @@
|
||||
import asyncio
|
||||
import os
|
||||
from operator import eq
|
||||
from typing import Any, Optional
|
||||
|
||||
import ujson
|
||||
from aiohttp import ClientResponse
|
||||
from sanic import SanicException
|
||||
from toolz import filter, map
|
||||
from toolz.dicttoolz import get_in, keyfilter
|
||||
from toolz.itertoolz import mapcat
|
||||
|
||||
from api.backends.backend import Backend, Repository
|
||||
from api.backends.entities import *
|
||||
from api.backends.entities import Contributor
|
||||
from api.utils.http_utils import http_get
|
||||
|
||||
repo_name: str = "github"
|
||||
base_url: str = "https://api.github.com"
|
||||
|
||||
|
||||
class GithubRepository(Repository):
|
||||
"""
|
||||
A repository class that represents a Github repository.
|
||||
|
||||
Args:
|
||||
owner (str): The username of the owner of the Github repository.
|
||||
name (str): The name of the Github repository.
|
||||
"""
|
||||
|
||||
def __init__(self, owner: str, name: str):
|
||||
"""
|
||||
Initializes a new instance of the GithubRepository class.
|
||||
|
||||
Args:
|
||||
owner (str): The username of the owner of the Github repository.
|
||||
name (str): The name of the Github repository.
|
||||
"""
|
||||
super().__init__(Github())
|
||||
self.owner = owner
|
||||
self.name = name
|
||||
|
||||
|
||||
class Github(Backend):
|
||||
"""
|
||||
A backend class that interacts with the Github API.
|
||||
|
||||
Attributes:
|
||||
name (str): The name of the Github backend.
|
||||
base_url (str): The base URL of the Github API.
|
||||
token (str): The Github access token used for authentication.
|
||||
headers (dict[str, str]): The HTTP headers to be sent with each request to the Github API.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Initializes a new instance of the Github class.
|
||||
"""
|
||||
super().__init__(repo_name, base_url)
|
||||
self.token: Optional[str] = os.getenv("GITHUB_TOKEN")
|
||||
self.headers: dict[str, str] = {
|
||||
"Authorization": f"Bearer {self.token}",
|
||||
"Accept": "application/vnd.github+json",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
}
|
||||
self.repositories_rest_endpoint: str = f"{base_url}/repos"
|
||||
|
||||
@staticmethod
|
||||
async def __assemble_release(release: dict) -> Release:
|
||||
async def __assemble_asset(asset: dict) -> Asset:
|
||||
asset_data: dict = keyfilter(
|
||||
lambda key: key in {"name", "content_type", "browser_download_url"},
|
||||
asset,
|
||||
)
|
||||
return Asset(**asset_data)
|
||||
|
||||
filter_metadata = keyfilter(
|
||||
lambda key: key
|
||||
in {
|
||||
"tag_name",
|
||||
"name",
|
||||
"draft",
|
||||
"prerelease",
|
||||
"created_at",
|
||||
"published_at",
|
||||
"body",
|
||||
},
|
||||
release,
|
||||
)
|
||||
metadata = Metadata(**filter_metadata)
|
||||
assets = await asyncio.gather(*map(__assemble_asset, release["assets"]))
|
||||
return Release(metadata=metadata, assets=assets)
|
||||
|
||||
@staticmethod
|
||||
async def __assemble_contributor(
|
||||
contributor: dict, team_view: bool = False
|
||||
) -> Contributor:
|
||||
if team_view:
|
||||
filter_contributor = keyfilter(
|
||||
lambda key: key in {"login", "avatar_url", "html_url"},
|
||||
contributor,
|
||||
)
|
||||
return Contributor(**filter_contributor)
|
||||
|
||||
filter_contributor = keyfilter(
|
||||
lambda key: key in {"login", "avatar_url", "html_url", "contributions"},
|
||||
contributor,
|
||||
)
|
||||
return Contributor(**filter_contributor)
|
||||
|
||||
async def list_releases(
|
||||
self, repository: GithubRepository, per_page: int = 30, page: int = 1
|
||||
) -> list[Release]:
|
||||
"""
|
||||
Returns a list of Release objects for a given Github repository.
|
||||
|
||||
Args:
|
||||
repository (GithubRepository): The Github repository for which to retrieve the releases.
|
||||
per_page (int): The number of releases to return per page.
|
||||
page (int): The page number of the releases to return.
|
||||
|
||||
Returns:
|
||||
list[Release]: A list of Release objects.
|
||||
"""
|
||||
list_releases_endpoint: str = f"{self.repositories_rest_endpoint}/{repository.owner}/{repository.name}/releases?per_page={per_page}&page={page}"
|
||||
response: ClientResponse = await http_get(
|
||||
headers=self.headers, url=list_releases_endpoint
|
||||
)
|
||||
if response.status != 200:
|
||||
raise SanicException(
|
||||
context=await response.json(loads=ujson.loads),
|
||||
status_code=response.status,
|
||||
)
|
||||
releases: list[Release] = await asyncio.gather(
|
||||
*map(
|
||||
lambda release: self.__assemble_release(release),
|
||||
await response.json(loads=ujson.loads),
|
||||
)
|
||||
)
|
||||
return releases
|
||||
|
||||
async def get_release_by_tag_name(
|
||||
self, repository: GithubRepository, tag_name: str
|
||||
) -> Release:
|
||||
"""
|
||||
Retrieves a specific release for a given Github repository by its tag name.
|
||||
|
||||
Args:
|
||||
repository (GithubRepository): The Github repository for which to retrieve the release.
|
||||
tag_name (str): The tag name of the release to retrieve.
|
||||
|
||||
Returns:
|
||||
Release: The Release object representing the retrieved release.
|
||||
"""
|
||||
release_by_tag_endpoint: str = f"{self.repositories_rest_endpoint}/{repository.owner}/{repository.name}/releases/tags/{tag_name}"
|
||||
response: ClientResponse = await http_get(
|
||||
headers=self.headers, url=release_by_tag_endpoint
|
||||
)
|
||||
if response.status != 200:
|
||||
raise SanicException(
|
||||
context=await response.json(loads=ujson.loads),
|
||||
status_code=response.status,
|
||||
)
|
||||
return await self.__assemble_release(await response.json(loads=ujson.loads))
|
||||
|
||||
async def get_latest_release(
|
||||
self,
|
||||
repository: GithubRepository,
|
||||
) -> Release:
|
||||
"""Get the latest release for a given repository.
|
||||
|
||||
Args:
|
||||
repository (GithubRepository): The Github repository for which to retrieve the release.
|
||||
|
||||
Returns:
|
||||
Release: The latest release for the given repository.
|
||||
"""
|
||||
latest_release_endpoint: str = f"{self.repositories_rest_endpoint}/{repository.owner}/{repository.name}/releases/latest"
|
||||
response: ClientResponse = await http_get(
|
||||
headers=self.headers, url=latest_release_endpoint
|
||||
)
|
||||
if response.status != 200:
|
||||
raise SanicException(
|
||||
context=await response.json(loads=ujson.loads),
|
||||
status_code=response.status,
|
||||
)
|
||||
return await self.__assemble_release(await response.json(loads=ujson.loads))
|
||||
|
||||
async def get_latest_pre_release(
|
||||
self,
|
||||
repository: GithubRepository,
|
||||
) -> Release:
|
||||
"""Get the latest pre-release for a given repository.
|
||||
|
||||
Args:
|
||||
repository (GithubRepository): The Github repository for which to retrieve the release.
|
||||
|
||||
Returns:
|
||||
Release: The latest pre-release for the given repository.
|
||||
"""
|
||||
list_releases_endpoint: str = f"{self.repositories_rest_endpoint}/{repository.owner}/{repository.name}/releases?per_page=10&page=1"
|
||||
response: ClientResponse = await http_get(
|
||||
headers=self.headers, url=list_releases_endpoint
|
||||
)
|
||||
if response.status != 200:
|
||||
raise SanicException(
|
||||
context=await response.json(loads=ujson.loads),
|
||||
status_code=response.status,
|
||||
)
|
||||
latest_pre_release = next(
|
||||
filter(
|
||||
lambda release: release["prerelease"],
|
||||
await response.json(loads=ujson.loads),
|
||||
)
|
||||
)
|
||||
return await self.__assemble_release(latest_pre_release)
|
||||
|
||||
async def get_contributors(self, repository: GithubRepository) -> list[Contributor]:
|
||||
"""Get a list of contributors for a given repository.
|
||||
|
||||
Args:
|
||||
repository (GithubRepository): The repository for which to retrieve contributors.
|
||||
|
||||
Returns:
|
||||
list[Contributor]: A list of contributors for the given repository.
|
||||
"""
|
||||
|
||||
contributors_endpoint: str = f"{self.repositories_rest_endpoint}/{repository.owner}/{repository.name}/contributors"
|
||||
response: ClientResponse = await http_get(
|
||||
headers=self.headers, url=contributors_endpoint
|
||||
)
|
||||
if response.status != 200:
|
||||
raise SanicException(
|
||||
context=await response.json(loads=ujson.loads),
|
||||
status_code=response.status,
|
||||
)
|
||||
contributors: list[Contributor] = await asyncio.gather(
|
||||
*map(self.__assemble_contributor, await response.json(loads=ujson.loads))
|
||||
)
|
||||
|
||||
return contributors
|
||||
|
||||
async def get_patches(
|
||||
self, repository: GithubRepository, tag_name: str
|
||||
) -> list[dict]:
|
||||
"""Get a dictionary of patch URLs for a given repository.
|
||||
|
||||
Args:
|
||||
repository (GithubRepository): The repository for which to retrieve patches.
|
||||
tag_name: The name of the release tag.
|
||||
|
||||
Returns:
|
||||
list[dict]: A JSON object containing the patches.
|
||||
"""
|
||||
|
||||
async def __fetch_download_url(release: Release) -> str:
|
||||
asset = get_in(["assets"], release)
|
||||
patch_asset = next(
|
||||
filter(lambda x: eq(get_in(["name"], x), "patches.json"), asset), None
|
||||
)
|
||||
return get_in(["browser_download_url"], patch_asset)
|
||||
|
||||
response: ClientResponse = await http_get(
|
||||
headers=self.headers,
|
||||
url=await __fetch_download_url(
|
||||
await self.get_release_by_tag_name(
|
||||
repository=repository, tag_name=tag_name
|
||||
)
|
||||
),
|
||||
)
|
||||
if response.status != 200:
|
||||
raise SanicException(
|
||||
context=await response.json(loads=ujson.loads),
|
||||
status_code=response.status,
|
||||
)
|
||||
return ujson.loads(await response.read())
|
||||
|
||||
async def get_team_members(self, repository: GithubRepository) -> list[Contributor]:
|
||||
"""Get the list of team members from the owner organization of a given repository.
|
||||
|
||||
Args:
|
||||
repository (GithubRepository): The repository for which to retrieve team members in the owner organization.
|
||||
|
||||
Returns:
|
||||
list[Contributor]: A list of members in the owner organization.
|
||||
"""
|
||||
team_members_endpoint: str = f"{self.base_url}/orgs/{repository.owner}/members"
|
||||
response: ClientResponse = await http_get(
|
||||
headers=self.headers, url=team_members_endpoint
|
||||
)
|
||||
if response.status != 200:
|
||||
raise SanicException(
|
||||
context=await response.json(loads=ujson.loads),
|
||||
status_code=response.status,
|
||||
)
|
||||
team_members: list[Contributor] = await asyncio.gather(
|
||||
*map(
|
||||
lambda member: self.__assemble_contributor(member, team_view=True),
|
||||
await response.json(loads=ujson.loads),
|
||||
)
|
||||
)
|
||||
|
||||
return team_members
|
||||
|
||||
async def compat_get_tools(
|
||||
self, repositories: list[GithubRepository], dev: bool
|
||||
) -> list:
|
||||
"""Get the latest releases for a set of repositories (v1 compat).
|
||||
|
||||
Args:
|
||||
repositories (set[GithubRepository]): The repositories for which to retrieve releases.
|
||||
dev: If we should get the latest pre-release instead.
|
||||
|
||||
Returns:
|
||||
list[dict[str, str]]: A JSON object containing the releases.
|
||||
"""
|
||||
|
||||
def transform(data, repository):
|
||||
"""Transforms a dictionary from the input list into a list of dictionaries with the desired structure.
|
||||
|
||||
Args:
|
||||
data(dict): A dictionary from the input list.
|
||||
|
||||
Returns:
|
||||
_[list]: A list of dictionaries with the desired structure.
|
||||
"""
|
||||
|
||||
def process_asset(asset):
|
||||
"""Transforms an asset dictionary into a new dictionary with the desired structure.
|
||||
|
||||
Args:
|
||||
asset(dict): An asset dictionary.
|
||||
|
||||
Returns:
|
||||
_[dict]: A new dictionary with the desired structure.
|
||||
"""
|
||||
return {
|
||||
"repository": f"{repository.owner}/{repository.name}",
|
||||
"version": data["metadata"]["tag_name"],
|
||||
"timestamp": data["metadata"]["published_at"],
|
||||
"name": asset["name"],
|
||||
"browser_download_url": asset["browser_download_url"],
|
||||
"content_type": asset["content_type"],
|
||||
}
|
||||
|
||||
return map(process_asset, data["assets"])
|
||||
|
||||
results = await asyncio.gather(
|
||||
*map(
|
||||
lambda release: self.get_latest_release(release),
|
||||
repositories,
|
||||
)
|
||||
)
|
||||
|
||||
return list(mapcat(lambda pair: transform(*pair), zip(results, repositories)))
|
61
api/compat.py
Normal file
61
api/compat.py
Normal file
@ -0,0 +1,61 @@
|
||||
"""
|
||||
This module provides endpoints for compatibility with the old API.
|
||||
|
||||
Routes:
|
||||
- GET /<repo:str>/releases: Retrieve a list of releases for a Github repository.
|
||||
- GET /<repo:str>/releases/latest: Retrieve the latest release for a Github repository.
|
||||
- GET /<repo:str>/releases/tag/<tag:str>: Retrieve a specific release for a Github repository by its tag name.
|
||||
- GET /<repo:str>/contributors: Retrieve a list of contributors for a Github repository.
|
||||
- GET /patches/<tag:str>: Retrieve a list of patches for a given release tag.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from sanic import Blueprint, Request
|
||||
from sanic.response import JSONResponse, json
|
||||
from sanic_ext import openapi
|
||||
|
||||
from api.backends.github import Github, GithubRepository
|
||||
from api.models.github import *
|
||||
from api.models.compat import ToolsResponseModel
|
||||
from config import compat_repositories, owner
|
||||
|
||||
github: Blueprint = Blueprint("old")
|
||||
|
||||
github_backend: Github = Github()
|
||||
|
||||
|
||||
@github.get("/tools")
|
||||
@openapi.definition(
|
||||
summary="Get patching tools' latest version.", response=[ToolsResponseModel]
|
||||
)
|
||||
async def tools(request: Request) -> JSONResponse:
|
||||
"""
|
||||
Retrieve a list of releases for a Github repository.
|
||||
|
||||
**Args:**
|
||||
- repo (str): The name of the Github repository to retrieve releases for.
|
||||
|
||||
**Query Parameters:**
|
||||
- per_page (int): The number of releases to retrieve per page.
|
||||
- page (int): The page number of the releases to retrieve.
|
||||
|
||||
**Returns:**
|
||||
- JSONResponse: A Sanic JSONResponse object containing the list of releases.
|
||||
|
||||
**Raises:**
|
||||
- HTTPException: If there is an error retrieving the releases.
|
||||
"""
|
||||
|
||||
data: dict[str, list] = {
|
||||
"tools": await github_backend.compat_get_tools(
|
||||
repositories=[
|
||||
GithubRepository(owner=owner, name=repo)
|
||||
for repo in compat_repositories
|
||||
if repo not in ["revanced-releases-api", "revanced-website"]
|
||||
],
|
||||
dev=True if request.args.get("dev") else False,
|
||||
)
|
||||
}
|
||||
|
||||
return json(data, status=200)
|
31
api/donations.py
Normal file
31
api/donations.py
Normal file
@ -0,0 +1,31 @@
|
||||
"""
|
||||
This module provides a blueprint for the donations endpoint.
|
||||
|
||||
Routes:
|
||||
- GET /donations: Get ReVanced donation links and wallets.
|
||||
"""
|
||||
|
||||
from sanic import Blueprint, Request
|
||||
from sanic.response import JSONResponse, json
|
||||
from sanic_ext import openapi
|
||||
|
||||
from api.models.donations import DonationsResponseModel
|
||||
from config import donation_info, api_version
|
||||
|
||||
donations: Blueprint = Blueprint("donations", version=api_version)
|
||||
|
||||
|
||||
@donations.get("/donations")
|
||||
@openapi.definition(
|
||||
summary="Get ReVanced donation links and wallets",
|
||||
response=[DonationsResponseModel],
|
||||
)
|
||||
async def root(request: Request) -> JSONResponse:
|
||||
"""
|
||||
Returns a JSONResponse with a dictionary containing ReVanced donation links and wallets.
|
||||
|
||||
**Returns:**
|
||||
- JSONResponse: A Sanic JSONResponse instance containing a dictionary with the donation links and wallets.
|
||||
"""
|
||||
data: dict[str, dict] = {"donations": donation_info}
|
||||
return json(data, status=200)
|
208
api/github.py
Normal file
208
api/github.py
Normal file
@ -0,0 +1,208 @@
|
||||
"""
|
||||
This module provides endpoints for interacting with the Github API.
|
||||
|
||||
Routes:
|
||||
- GET /<repo:str>/releases: Retrieve a list of releases for a Github repository.
|
||||
- GET /<repo:str>/releases/latest: Retrieve the latest release for a Github repository.
|
||||
- GET /<repo:str>/releases/tag/<tag:str>: Retrieve a specific release for a Github repository by its tag name.
|
||||
- GET /<repo:str>/contributors: Retrieve a list of contributors for a Github repository.
|
||||
- GET /patches/<tag:str>: Retrieve a list of patches for a given release tag.
|
||||
|
||||
"""
|
||||
|
||||
from sanic import Blueprint, Request
|
||||
from sanic.response import JSONResponse, json
|
||||
from sanic_ext import openapi
|
||||
|
||||
from api.backends.entities import Release, Contributor
|
||||
from api.backends.github import Github, GithubRepository
|
||||
from api.models.github import *
|
||||
from config import owner, default_repository, api_version
|
||||
|
||||
github: Blueprint = Blueprint("github", version=api_version)
|
||||
|
||||
github_backend: Github = Github()
|
||||
|
||||
|
||||
@github.get("/<repo:str>/releases")
|
||||
@openapi.definition(
|
||||
summary="Get releases for a repository", response=[ReleaseListResponseModel]
|
||||
)
|
||||
async def list_releases(request: Request, repo: str) -> JSONResponse:
|
||||
"""
|
||||
Retrieve a list of releases for a Github repository.
|
||||
|
||||
**Args:**
|
||||
- repo (str): The name of the Github repository to retrieve releases for.
|
||||
|
||||
**Query Parameters:**
|
||||
- per_page (int): The number of releases to retrieve per page.
|
||||
- page (int): The page number of the releases to retrieve.
|
||||
|
||||
**Returns:**
|
||||
- JSONResponse: A Sanic JSONResponse object containing the list of releases.
|
||||
|
||||
**Raises:**
|
||||
- HTTPException: If there is an error retrieving the releases.
|
||||
"""
|
||||
|
||||
per_page = int(request.args.get("per_page")) if request.args.get("per_page") else 30
|
||||
page = int(request.args.get("page")) if request.args.get("page") else 1
|
||||
|
||||
data: dict[str, list[Release]] = {
|
||||
"releases": await github_backend.list_releases(
|
||||
repository=GithubRepository(owner=owner, name=repo),
|
||||
per_page=per_page,
|
||||
page=page,
|
||||
)
|
||||
}
|
||||
|
||||
return json(data, status=200)
|
||||
|
||||
|
||||
@github.get("/<repo:str>/releases/latest")
|
||||
@openapi.definition(
|
||||
summary="Get the latest release for a repository",
|
||||
response=SingleReleaseResponseModel,
|
||||
)
|
||||
async def latest_release(request: Request, repo: str) -> JSONResponse:
|
||||
"""
|
||||
Retrieve the latest release for a Github repository.
|
||||
|
||||
**Args:**
|
||||
- repo (str): The name of the Github repository to retrieve the release for.
|
||||
|
||||
**Query Parameters:**
|
||||
- dev (bool): Whether or not to retrieve the latest development release.
|
||||
|
||||
**Returns:**
|
||||
- JSONResponse: A Sanic JSONResponse object containing the release.
|
||||
|
||||
**Raises:**
|
||||
- HTTPException: If there is an error retrieving the releases.
|
||||
"""
|
||||
|
||||
data: dict[str, Release] = {
|
||||
"release": await github_backend.get_latest_pre_release(
|
||||
repository=GithubRepository(owner=owner, name=repo)
|
||||
)
|
||||
if request.args.get("dev") == "true"
|
||||
else await github_backend.get_latest_release(
|
||||
repository=GithubRepository(owner=owner, name=repo)
|
||||
)
|
||||
}
|
||||
|
||||
return json(data, status=200)
|
||||
|
||||
|
||||
@github.get("/<repo:str>/releases/tag/<tag:str>")
|
||||
@openapi.definition(
|
||||
summary="Retrieve a release for a Github repository by its tag name.",
|
||||
response=SingleReleaseResponseModel,
|
||||
)
|
||||
async def get_release_by_tag_name(
|
||||
request: Request, repo: str, tag: str
|
||||
) -> JSONResponse:
|
||||
"""
|
||||
Retrieve a release for a Github repository by its tag name.
|
||||
|
||||
**Args:**
|
||||
- repo (str): The name of the Github repository to retrieve the release for.
|
||||
- tag (str): The tag for the release to be retrieved.
|
||||
|
||||
**Returns:**
|
||||
- JSONResponse: A Sanic JSONResponse object containing the release.
|
||||
|
||||
**Raises:**
|
||||
- HTTPException: If there is an error retrieving the releases.
|
||||
"""
|
||||
|
||||
data: dict[str, Release] = {
|
||||
"release": await github_backend.get_release_by_tag_name(
|
||||
repository=GithubRepository(owner=owner, name=repo), tag_name=tag
|
||||
)
|
||||
}
|
||||
|
||||
return json(data, status=200)
|
||||
|
||||
|
||||
@github.get("/<repo:str>/contributors")
|
||||
@openapi.definition(
|
||||
summary="Retrieve a list of contributors for a repository.",
|
||||
response=ContributorsModel,
|
||||
)
|
||||
async def get_contributors(request: Request, repo: str) -> JSONResponse:
|
||||
"""
|
||||
Retrieve a list of contributors for a repository.
|
||||
|
||||
**Args:**
|
||||
- repo (str): The name of the Github repository to retrieve the contributors for.
|
||||
|
||||
**Returns:**
|
||||
- JSONResponse: A Sanic JSONResponse object containing the list of contributors.
|
||||
|
||||
**Raises:**
|
||||
- HTTPException: If there is an error retrieving the contributors.
|
||||
"""
|
||||
|
||||
data: dict[str, list[Contributor]] = {
|
||||
"contributors": await github_backend.get_contributors(
|
||||
repository=GithubRepository(owner=owner, name=repo)
|
||||
)
|
||||
}
|
||||
|
||||
return json(data, status=200)
|
||||
|
||||
|
||||
@github.get("/patches/<tag:str>")
|
||||
@openapi.definition(
|
||||
summary="Retrieve a list of patches for a release.", response=PatchesModel
|
||||
)
|
||||
async def get_patches(request: Request, tag: str) -> JSONResponse:
|
||||
"""
|
||||
Retrieve a list of patches for a release.
|
||||
|
||||
**Args:**
|
||||
- tag (str): The tag for the patches to be retrieved.
|
||||
|
||||
**Returns:**
|
||||
- JSONResponse: A Sanic JSONResponse object containing the list of patches.
|
||||
|
||||
**Raises:**
|
||||
- HTTPException: If there is an error retrieving the patches.
|
||||
"""
|
||||
|
||||
repo: str = "revanced-patches"
|
||||
|
||||
data: dict[str, list[dict]] = {
|
||||
"patches": await github_backend.get_patches(
|
||||
repository=GithubRepository(owner=owner, name=repo), tag_name=tag
|
||||
)
|
||||
}
|
||||
|
||||
return json(data, status=200)
|
||||
|
||||
|
||||
@github.get("/team/members")
|
||||
@openapi.definition(
|
||||
summary="Retrieve a list of team members for the Revanced organization.",
|
||||
response=TeamMembersModel,
|
||||
)
|
||||
async def get_team_members(request: Request) -> JSONResponse:
|
||||
"""
|
||||
Retrieve a list of team members for the Revanced organization.
|
||||
|
||||
**Returns:**
|
||||
- JSONResponse: A Sanic JSONResponse object containing the list of team members.
|
||||
|
||||
**Raises:**
|
||||
- HTTPException: If there is an error retrieving the team members.
|
||||
"""
|
||||
|
||||
data: dict[str, list[Contributor]] = {
|
||||
"members": await github_backend.get_team_members(
|
||||
repository=GithubRepository(owner=owner, name=default_repository)
|
||||
)
|
||||
}
|
||||
|
||||
return json(data, status=200)
|
0
api/models/__init__.py
Normal file
0
api/models/__init__.py
Normal file
19
api/models/appinfo.py
Normal file
19
api/models/appinfo.py
Normal file
@ -0,0 +1,19 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AppInfoFields(BaseModel):
|
||||
"""
|
||||
Fields for the AppInfo endpoint.
|
||||
"""
|
||||
|
||||
name: str
|
||||
category: str
|
||||
logo: str
|
||||
|
||||
|
||||
class AppInfoModel(BaseModel):
|
||||
"""
|
||||
Response model app info.
|
||||
"""
|
||||
|
||||
app_info: AppInfoFields
|
27
api/models/compat.py
Normal file
27
api/models/compat.py
Normal file
@ -0,0 +1,27 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class ToolsResponseFields(BaseModel):
|
||||
"""Implements the fields for the /tools endpoint.
|
||||
|
||||
Args:
|
||||
BaseModel (pydantic.BaseModel): BaseModel from pydantic
|
||||
"""
|
||||
|
||||
repository: str
|
||||
version: str
|
||||
timestamp: str
|
||||
name: str
|
||||
size: str | None = None
|
||||
browser_download_url: str
|
||||
content_type: str
|
||||
|
||||
|
||||
class ToolsResponseModel(BaseModel):
|
||||
"""Implements the JSON response model for the /tools endpoint.
|
||||
|
||||
Args:
|
||||
BaseModel (pydantic.BaseModel): BaseModel from pydantic
|
||||
"""
|
||||
|
||||
tools: list[ToolsResponseFields]
|
13
api/models/donations.py
Normal file
13
api/models/donations.py
Normal file
@ -0,0 +1,13 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class DonationsResponseModel(BaseModel):
|
||||
"""
|
||||
A Pydantic BaseModel that represents a dictionary of donation links.
|
||||
"""
|
||||
|
||||
donations: dict[str, str]
|
||||
"""
|
||||
A dictionary where the keys are the names of the donation destinations, and
|
||||
the values are the links to services or wallet addresses.
|
||||
"""
|
127
api/models/github.py
Normal file
127
api/models/github.py
Normal file
@ -0,0 +1,127 @@
|
||||
from typing import Any, Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class MetadataFields(BaseModel):
|
||||
"""
|
||||
Metadata fields for a GitHub release.
|
||||
"""
|
||||
|
||||
tag_name: str
|
||||
name: str
|
||||
draft: bool
|
||||
prerelease: bool
|
||||
created_at: str
|
||||
published_at: str
|
||||
body: str
|
||||
|
||||
|
||||
class AssetFields(BaseModel):
|
||||
"""
|
||||
Asset fields for a GitHub release.
|
||||
"""
|
||||
|
||||
name: str
|
||||
content_type: str
|
||||
browser_download_url: str
|
||||
|
||||
|
||||
class ReleaseResponseModel(BaseModel):
|
||||
"""
|
||||
Response model for a GitHub release.
|
||||
"""
|
||||
|
||||
metadata: MetadataFields
|
||||
assets: list[AssetFields]
|
||||
|
||||
|
||||
class SingleReleaseResponseModel(BaseModel):
|
||||
"""
|
||||
Response model for a GitHub release.
|
||||
"""
|
||||
|
||||
release: ReleaseResponseModel
|
||||
|
||||
|
||||
class ReleaseListResponseModel(BaseModel):
|
||||
"""
|
||||
Response model for a list of GitHub releases.
|
||||
"""
|
||||
|
||||
releases: list[ReleaseResponseModel]
|
||||
|
||||
|
||||
class CompatiblePackagesResponseFields(BaseModel):
|
||||
"""
|
||||
Implements the fields for compatible packages in the PatchesResponseFields class.
|
||||
"""
|
||||
|
||||
name: str
|
||||
versions: list[str] | None
|
||||
|
||||
|
||||
class PatchesOptionsResponseFields(BaseModel):
|
||||
key: str
|
||||
title: str
|
||||
description: str
|
||||
required: bool
|
||||
choices: list[Any] | None
|
||||
|
||||
|
||||
class PatchesResponseFields(BaseModel):
|
||||
"""
|
||||
Implements the fields for the /patches endpoint.
|
||||
"""
|
||||
|
||||
name: str
|
||||
description: str
|
||||
version: str
|
||||
excluded: bool
|
||||
dependencies: list[str] | None
|
||||
options: list[PatchesOptionsResponseFields] | None
|
||||
compatiblePackages: list[CompatiblePackagesResponseFields]
|
||||
|
||||
|
||||
class PatchesModel(BaseModel):
|
||||
"""
|
||||
Response model for a list of GitHub releases.
|
||||
"""
|
||||
|
||||
patches: list[PatchesResponseFields]
|
||||
|
||||
|
||||
class ContributorsFields(BaseModel):
|
||||
"""
|
||||
Implements the fields for a contributor.
|
||||
"""
|
||||
|
||||
login: str
|
||||
avatar_url: str
|
||||
html_url: str
|
||||
contributions: Optional[int]
|
||||
|
||||
|
||||
class ContributorsModel(BaseModel):
|
||||
"""
|
||||
Response model for a list of contributors.
|
||||
"""
|
||||
|
||||
contributors: list[ContributorsFields]
|
||||
|
||||
|
||||
class TeamMemberFields(BaseModel):
|
||||
"""
|
||||
Implements the fields for a team member.
|
||||
"""
|
||||
|
||||
login: str
|
||||
avatar_url: str
|
||||
html_url: str
|
||||
|
||||
|
||||
class TeamMembersModel(BaseModel):
|
||||
"""
|
||||
Responde model for a list of team members.
|
||||
"""
|
||||
|
||||
members: list[TeamMemberFields]
|
13
api/models/socials.py
Normal file
13
api/models/socials.py
Normal file
@ -0,0 +1,13 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class SocialsResponseModel(BaseModel):
|
||||
"""
|
||||
A Pydantic BaseModel that represents a dictionary of social links.
|
||||
"""
|
||||
|
||||
socials: dict[str, str]
|
||||
"""
|
||||
A dictionary where the keys are the names of the social networks, and
|
||||
the values are the links to the profiles or pages.
|
||||
"""
|
24
api/ping.py
Normal file
24
api/ping.py
Normal file
@ -0,0 +1,24 @@
|
||||
"""
|
||||
This module provides endpoints for pinging the API.
|
||||
|
||||
Routes:
|
||||
- HEAD /ping: Ping the API.
|
||||
"""
|
||||
|
||||
from sanic import Blueprint, HTTPResponse, Request, response
|
||||
from sanic_ext import openapi
|
||||
from config import api_version
|
||||
|
||||
ping: Blueprint = Blueprint("ping", version=api_version)
|
||||
|
||||
|
||||
@ping.head("/ping")
|
||||
@openapi.summary("Ping the API")
|
||||
async def root(request: Request) -> HTTPResponse:
|
||||
"""
|
||||
Endpoint for pinging the API.
|
||||
|
||||
**Returns:**
|
||||
- Empty response with status code 204.
|
||||
"""
|
||||
return response.empty(status=204)
|
31
api/socials.py
Normal file
31
api/socials.py
Normal file
@ -0,0 +1,31 @@
|
||||
"""
|
||||
This module provides a blueprint for the socials endpoint.
|
||||
|
||||
Routes:
|
||||
- GET /socials: Get ReVanced socials.
|
||||
"""
|
||||
|
||||
from sanic import Blueprint, Request
|
||||
from sanic.response import JSONResponse, json
|
||||
from sanic_ext import openapi
|
||||
|
||||
from api.models.socials import SocialsResponseModel
|
||||
from config import social_links, api_version
|
||||
|
||||
socials: Blueprint = Blueprint("socials", version=api_version)
|
||||
|
||||
|
||||
@socials.get("/socials")
|
||||
@openapi.definition(
|
||||
summary="Get ReVanced socials",
|
||||
response=[SocialsResponseModel],
|
||||
)
|
||||
async def root(request: Request) -> JSONResponse:
|
||||
"""
|
||||
Returns a JSONResponse with a dictionary containing ReVanced social links.
|
||||
|
||||
**Returns:**
|
||||
- JSONResponse: A Sanic JSONResponse instance containing a dictionary with the social links.
|
||||
"""
|
||||
data: dict[str, dict] = {"socials": social_links}
|
||||
return json(data, status=200)
|
0
api/utils/__init__.py
Normal file
0
api/utils/__init__.py
Normal file
26
api/utils/http_utils.py
Normal file
26
api/utils/http_utils.py
Normal file
@ -0,0 +1,26 @@
|
||||
from typing import Optional
|
||||
|
||||
import ujson
|
||||
from aiohttp import ClientSession
|
||||
|
||||
_client: Optional[ClientSession] = None
|
||||
|
||||
|
||||
async def http_get(headers, url):
|
||||
"""
|
||||
Performs a GET HTTP request to a given URL with the provided headers.
|
||||
|
||||
Args:
|
||||
headers (dict): A dictionary containing headers to be included in the HTTP request.
|
||||
url (str): The URL to which the HTTP request will be made.
|
||||
|
||||
Returns:
|
||||
The HTTP response returned by the server.
|
||||
"""
|
||||
global _client
|
||||
if _client is None:
|
||||
_client = ClientSession(json_serialize=ujson.dumps)
|
||||
return await _client.get(url, headers=headers)
|
||||
else:
|
||||
assert isinstance(_client, ClientSession)
|
||||
return await _client.get(url, headers=headers)
|
39
app.py
Normal file
39
app.py
Normal file
@ -0,0 +1,39 @@
|
||||
# app.py
|
||||
from sanic import Sanic
|
||||
import sanic.response
|
||||
from sanic_ext import Config
|
||||
|
||||
from api import api
|
||||
from config import *
|
||||
|
||||
REDIRECTS = {
|
||||
"/": "/docs/swagger",
|
||||
}
|
||||
|
||||
app = Sanic("ReVanced-API")
|
||||
app.extend(config=Config(oas_ignore_head=False))
|
||||
app.ext.openapi.describe(
|
||||
title=openapi_title,
|
||||
version=openapi_version,
|
||||
description=openapi_description,
|
||||
)
|
||||
app.config.CORS_ALWAYS_SEND = True
|
||||
app.config.CORS_AUTOMATIC_OPTIONS = True
|
||||
app.config.CORS_VARY_HEADER = True
|
||||
app.config.CORS_METHODS = ["GET", "HEAD", "OPTIONS"]
|
||||
app.config.CORS_SUPPORTS_CREDENTIALS = True
|
||||
app.config.CORS_SEND_WILDCARD = True
|
||||
app.config.CORS_ORIGINS = "*"
|
||||
|
||||
app.blueprint(api)
|
||||
|
||||
|
||||
# https://sanic.dev/en/guide/how-to/static-redirects.html
|
||||
|
||||
|
||||
def get_static_function(value):
|
||||
return lambda *_, **__: value
|
||||
|
||||
|
||||
for src, dest in REDIRECTS.items():
|
||||
app.route(src)(get_static_function(sanic.response.redirect(dest)))
|
80
config.py
Normal file
80
config.py
Normal file
@ -0,0 +1,80 @@
|
||||
# Social Links
|
||||
|
||||
from email.policy import default
|
||||
|
||||
|
||||
social_links: dict[str, str] = {
|
||||
"website": "https://revanced.app",
|
||||
"github": "https://github.com/revanced",
|
||||
"twitter": "https://twitter.com/revancedapp",
|
||||
"discord": "https://revanced.app/discord",
|
||||
"reddit": "https://www.reddit.com/r/revancedapp",
|
||||
"telegram": "https://t.me/app_revanced",
|
||||
"youtube": "https://www.youtube.com/@ReVanced",
|
||||
}
|
||||
|
||||
# Donation info
|
||||
|
||||
donation_info: dict[str, str] = {
|
||||
"opencollective": "https://opencollective.com/revanced",
|
||||
"github": "https://github.com/sponsors/ReVanced",
|
||||
"btc": "bc1q4x8j6mt27y5gv0q625t8wkr87ruy8fprpy4v3f",
|
||||
"doge": "D8GH73rNjudgi6bS2krrXWEsU9KShedLXp",
|
||||
"eth": "0x7ab4091e00363654bf84B34151225742cd92FCE5",
|
||||
"ltc": "LbJi8EuoDcwaZvykcKmcrM74jpjde23qJ2",
|
||||
"xmr": "46YwWDbZD6jVptuk5mLHsuAmh1BnUMSjSNYacozQQEraWSQ93nb2yYVRHoMR6PmFYWEHsLHg9tr1cH5M8Rtn7YaaGQPCjSh",
|
||||
}
|
||||
|
||||
# API Configuration
|
||||
|
||||
backend: str = "github"
|
||||
redis: dict[str, str | int] = {"host": "localhost", "port": 6379}
|
||||
|
||||
# GitHub Backend Configuration
|
||||
|
||||
owner: str = "revanced"
|
||||
default_repository: str = ".github"
|
||||
|
||||
# API Versioning
|
||||
|
||||
api_version: str = "v2"
|
||||
openapi_version: str = "2.0.0"
|
||||
openapi_title: str = "ReVanced API"
|
||||
openapi_description: str = """
|
||||
## The official JSON API for ReVanced Releases 🚀
|
||||
|
||||
### Links
|
||||
|
||||
- [Changelogs](https://github.com/revanced/)
|
||||
- [Official links to ReVanced](https://revanced.app)
|
||||
|
||||
### Important Information
|
||||
|
||||
* Rate Limiting - 60 requests per minute
|
||||
* Cache - 5 minutes
|
||||
|
||||
### Additional Notes
|
||||
|
||||
1. Breaking changes are to be expected
|
||||
2. Client side caching is advised to avoid unnecessary requests
|
||||
3. Abuse of the API will result in IP blocks
|
||||
"""
|
||||
|
||||
# Testing Configuration
|
||||
|
||||
github_testing_repository: str = "revanced-patches"
|
||||
github_testing_tag: str = "v2.173.0"
|
||||
apkdl_testing_package: str = "com.google.android.youtube"
|
||||
|
||||
# Old API Configuration
|
||||
|
||||
compat_api_version: str = "v1"
|
||||
compat_repositories: list = [
|
||||
"revanced-patcher",
|
||||
"revanced-patches",
|
||||
"revanced-integrations",
|
||||
"revanced-manager",
|
||||
"revanced-cli",
|
||||
"revanced-website",
|
||||
"revanced-releases-api",
|
||||
]
|
24
conftest.py
Normal file
24
conftest.py
Normal file
@ -0,0 +1,24 @@
|
||||
import asyncio
|
||||
|
||||
import pytest
|
||||
from sanic import Sanic
|
||||
|
||||
from api import api
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app() -> Sanic:
|
||||
app: Sanic = Sanic("ReVanced-API")
|
||||
app.blueprint(api)
|
||||
app.config.TOUCHUP = False
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def event_loop():
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
except RuntimeError:
|
||||
loop = asyncio.new_event_loop()
|
||||
yield loop
|
||||
loop.close()
|
9
mypy.ini
9
mypy.ini
@ -1,5 +1,5 @@
|
||||
[mypy]
|
||||
python_version = 3.10
|
||||
python_version = 3.11
|
||||
pretty = true
|
||||
follow_imports = normal
|
||||
namespace_packages = true
|
||||
@ -15,3 +15,10 @@ warn_redundant_casts = true
|
||||
warn_unused_configs = true
|
||||
warn_unused_ignores = true
|
||||
warn_unreachable = true
|
||||
plugins = pydantic.mypy
|
||||
|
||||
[mypy-toolz.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-sanic_testing.*]
|
||||
ignore_missing_imports = True
|
||||
|
2161
poetry.lock
generated
Normal file
2161
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
@ -8,9 +8,34 @@ readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.11"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
aiohttp = {version = ">=3.8.4", extras = ["speedups"]}
|
||||
sanic = {version = ">=23.3.0", extras = ["ext", "http3"]}
|
||||
ujson = ">=5.7.0"
|
||||
asyncstdlib = "3.10.6"
|
||||
pydantic = "^1.10.11"
|
||||
aioquic = ">=0.9.20"
|
||||
cytoolz = ">=0.12.1"
|
||||
beautifulsoup4 = ">=4.12.2"
|
||||
setuptools = ">=67.7.2"
|
||||
lxml = ">=4.9.2"
|
||||
mypy = ">=1.2.0"
|
||||
types-ujson = ">=5.7.0.5"
|
||||
types-aiofiles = ">=23.1.0.1"
|
||||
sanic-testing = ">=23.3.0"
|
||||
pytest-asyncio = ">=0.21.0"
|
||||
types-beautifulsoup4 = ">=4.12.0.5"
|
||||
pytest-md = ">=0.2.0"
|
||||
pytest-emoji = ">=0.2.0"
|
||||
coverage = ">=7.2.5"
|
||||
pytest-cov = "^4.0.0"
|
||||
pytest = ">=7.4.0"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
filterwarnings = [
|
||||
"ignore::DeprecationWarning",
|
||||
"ignore::pytest.PytestCollectionWarning"
|
||||
]
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
12194
qodana.sarif.json
Normal file
12194
qodana.sarif.json
Normal file
File diff suppressed because it is too large
Load Diff
7
qodana.yaml
Normal file
7
qodana.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
version: "1.0"
|
||||
linter: jetbrains/qodana-python:2023.1-eap
|
||||
include:
|
||||
- name: CheckDependencyLicenses
|
||||
exclude:
|
||||
- name: PyInterpreterInspection
|
||||
- name: PyUnresolvedReferencesInspection
|
63
requirements.txt
Normal file
63
requirements.txt
Normal file
@ -0,0 +1,63 @@
|
||||
aiodns==3.0.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
aiofiles==23.1.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
aiohttp[speedups]==3.8.4 ; python_version >= "3.11" and python_version < "4.0"
|
||||
aioquic==0.9.21 ; python_version >= "3.11" and python_version < "4.0"
|
||||
aiosignal==1.3.1 ; python_version >= "3.11" and python_version < "4.0"
|
||||
anyio==3.7.1 ; python_version >= "3.11" and python_version < "4.0"
|
||||
async-timeout==4.0.2 ; python_version >= "3.11" and python_version < "4.0"
|
||||
asyncstdlib==3.10.6 ; python_version >= "3.11" and python_version < "4.0"
|
||||
attrs==23.1.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
beautifulsoup4==4.12.2 ; python_version >= "3.11" and python_version < "4.0"
|
||||
brotli==1.0.9 ; python_version >= "3.11" and python_version < "4.0"
|
||||
certifi==2023.5.7 ; python_version >= "3.11" and python_version < "4.0"
|
||||
cffi==1.15.1 ; python_version >= "3.11" and python_version < "4.0"
|
||||
charset-normalizer==3.2.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
colorama==0.4.6 ; python_version >= "3.11" and python_version < "4.0" and sys_platform == "win32"
|
||||
coverage==7.2.7 ; python_version >= "3.11" and python_version < "4.0"
|
||||
coverage[toml]==7.2.7 ; python_version >= "3.11" and python_version < "4.0"
|
||||
cryptography==41.0.2 ; python_version >= "3.11" and python_version < "4.0"
|
||||
cytoolz==0.12.1 ; python_version >= "3.11" and python_version < "4.0"
|
||||
frozenlist==1.3.3 ; python_version >= "3.11" and python_version < "4.0"
|
||||
h11==0.14.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
html5tagger==1.3.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
httpcore==0.16.3 ; python_version >= "3.11" and python_version < "4.0"
|
||||
httptools==0.6.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
httpx==0.23.3 ; python_version >= "3.11" and python_version < "4.0"
|
||||
idna==3.4 ; python_version >= "3.11" and python_version < "4.0"
|
||||
iniconfig==2.0.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
lxml==4.9.3 ; python_version >= "3.11" and python_version < "4.0"
|
||||
multidict==6.0.4 ; python_version >= "3.11" and python_version < "4.0"
|
||||
mypy==1.4.1 ; python_version >= "3.11" and python_version < "4.0"
|
||||
mypy-extensions==1.0.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
packaging==23.1 ; python_version >= "3.11" and python_version < "4.0"
|
||||
pluggy==1.2.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
pycares==4.3.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
pycparser==2.21 ; python_version >= "3.11" and python_version < "4.0"
|
||||
pydantic==1.10.11 ; python_version >= "3.11" and python_version < "4.0"
|
||||
pylsqpack==0.3.17 ; python_version >= "3.11" and python_version < "4.0"
|
||||
pyopenssl==23.2.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
pytest==7.4.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
pytest-asyncio==0.21.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
pytest-cov==4.1.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
pytest-emoji==0.2.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
pytest-md==0.2.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
pyyaml==6.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
rfc3986[idna2008]==1.5.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
sanic-ext==23.3.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
sanic-routing==22.8.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
sanic-testing==23.3.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
sanic[ext,http3]==23.3.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
setuptools==68.0.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
sniffio==1.3.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
soupsieve==2.4.1 ; python_version >= "3.11" and python_version < "4.0"
|
||||
toolz==0.12.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
tracerite==1.1.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
types-aiofiles==23.1.0.4 ; python_version >= "3.11" and python_version < "4.0"
|
||||
types-beautifulsoup4==4.12.0.5 ; python_version >= "3.11" and python_version < "4.0"
|
||||
types-html5lib==1.1.11.14 ; python_version >= "3.11" and python_version < "4.0"
|
||||
types-ujson==5.8.0.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
typing-extensions==4.7.1 ; python_version >= "3.11" and python_version < "4.0"
|
||||
ujson==5.8.0 ; python_version >= "3.11" and python_version < "4.0"
|
||||
uvloop==0.17.0 ; sys_platform != "win32" and implementation_name == "cpython" and python_version >= "3.11" and python_version < "4.0"
|
||||
websockets==11.0.3 ; python_version >= "3.11" and python_version < "4.0"
|
||||
yarl==1.9.2 ; python_version >= "3.11" and python_version < "4.0"
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
17
tests/test_apkdl.py
Normal file
17
tests/test_apkdl.py
Normal file
@ -0,0 +1,17 @@
|
||||
import pytest
|
||||
from sanic import Sanic
|
||||
|
||||
from api.models.appinfo import AppInfoModel
|
||||
|
||||
from config import api_version, apkdl_testing_package
|
||||
|
||||
# socials
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_socials(app: Sanic):
|
||||
_, response = await app.asgi_client.get(
|
||||
f"/{api_version}/app/info/{apkdl_testing_package}"
|
||||
)
|
||||
assert response.status == 200
|
||||
assert AppInfoModel(app_info=response.json["app_info"])
|
13
tests/test_compat.py
Normal file
13
tests/test_compat.py
Normal file
@ -0,0 +1,13 @@
|
||||
import pytest
|
||||
from sanic import Sanic
|
||||
|
||||
from api.models.compat import ToolsResponseModel
|
||||
|
||||
# compatibility layer
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_compat_tools(app: Sanic):
|
||||
_, response = await app.asgi_client.get(f"/tools")
|
||||
assert response.status == 200
|
||||
assert ToolsResponseModel(tools=[tool for tool in response.json["tools"]])
|
110
tests/test_github.py
Normal file
110
tests/test_github.py
Normal file
@ -0,0 +1,110 @@
|
||||
import pytest
|
||||
from sanic import Sanic
|
||||
from sanic_testing.testing import TestingResponse
|
||||
|
||||
from api.models.github import (
|
||||
AssetFields,
|
||||
MetadataFields,
|
||||
PatchesResponseFields,
|
||||
ReleaseListResponseModel,
|
||||
ReleaseResponseModel,
|
||||
SingleReleaseResponseModel,
|
||||
ContributorsFields,
|
||||
ContributorsModel,
|
||||
PatchesModel,
|
||||
TeamMemberFields,
|
||||
TeamMembersModel,
|
||||
)
|
||||
|
||||
from config import github_testing_repository, github_testing_tag, api_version
|
||||
|
||||
|
||||
# utils
|
||||
|
||||
|
||||
async def __test_single_release(response: TestingResponse) -> bool:
|
||||
try:
|
||||
assert response.status == 200
|
||||
assert SingleReleaseResponseModel(
|
||||
release=ReleaseResponseModel(
|
||||
metadata=MetadataFields(**response.json["release"]["metadata"]),
|
||||
assets=[
|
||||
AssetFields(**asset) for asset in response.json["release"]["assets"]
|
||||
],
|
||||
)
|
||||
)
|
||||
return True
|
||||
except AssertionError:
|
||||
return False
|
||||
|
||||
|
||||
# github
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_releases(app: Sanic):
|
||||
_, response = await app.asgi_client.get(
|
||||
f"/{api_version}/{github_testing_repository}/releases"
|
||||
)
|
||||
assert response.status == 200
|
||||
assert ReleaseListResponseModel(
|
||||
releases=[
|
||||
ReleaseResponseModel(
|
||||
metadata=MetadataFields(**release["metadata"]),
|
||||
assets=[AssetFields(**asset) for asset in release["assets"]],
|
||||
)
|
||||
for release in response.json["releases"]
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_latest_release(app: Sanic):
|
||||
_, response = await app.asgi_client.get(
|
||||
f"/{api_version}/{github_testing_repository}/releases/latest"
|
||||
)
|
||||
_, response_dev = await app.asgi_client.get(
|
||||
f"/{api_version}/{github_testing_repository}/releases/latest?dev=true"
|
||||
)
|
||||
assert await __test_single_release(response)
|
||||
assert await __test_single_release(response_dev)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_release_by_tag(app: Sanic):
|
||||
_, response = await app.asgi_client.get(
|
||||
f"/{api_version}/{github_testing_repository}/releases/tag/{github_testing_tag}"
|
||||
)
|
||||
assert await __test_single_release(response)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_contributors(app: Sanic):
|
||||
_, response = await app.asgi_client.get(
|
||||
f"/{api_version}/{github_testing_repository}/contributors"
|
||||
)
|
||||
assert ContributorsModel(
|
||||
contributors=[
|
||||
ContributorsFields(**contributor)
|
||||
for contributor in response.json["contributors"]
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_patches(app: Sanic):
|
||||
_, response = await app.asgi_client.get(
|
||||
f"/{api_version}/patches/{github_testing_tag}"
|
||||
)
|
||||
|
||||
assert PatchesModel(
|
||||
patches=[PatchesResponseFields(**patch) for patch in response.json["patches"]]
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_team_members(app: Sanic):
|
||||
_, response = await app.asgi_client.get(f"/{api_version}/team/members")
|
||||
assert TeamMembersModel(
|
||||
members=[TeamMemberFields(**member) for member in response.json["members"]]
|
||||
)
|
12
tests/test_ping.py
Normal file
12
tests/test_ping.py
Normal file
@ -0,0 +1,12 @@
|
||||
import pytest
|
||||
from sanic import Sanic
|
||||
|
||||
from config import api_version
|
||||
|
||||
# ping
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_ping(app: Sanic):
|
||||
_, response = await app.asgi_client.head(f"/{api_version}/ping")
|
||||
assert response.status == 204
|
15
tests/test_socials.py
Normal file
15
tests/test_socials.py
Normal file
@ -0,0 +1,15 @@
|
||||
import pytest
|
||||
from sanic import Sanic
|
||||
|
||||
from api.models.socials import SocialsResponseModel
|
||||
|
||||
from config import api_version
|
||||
|
||||
# socials
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_socials(app: Sanic):
|
||||
_, response = await app.asgi_client.get(f"/{api_version}/socials")
|
||||
assert response.status == 200
|
||||
assert SocialsResponseModel(**response.json)
|
Loading…
x
Reference in New Issue
Block a user