mirror of
https://github.com/revanced/revanced-static-api.git
synced 2025-04-29 22:24:37 +02:00
chore: Merge branch dev
to main
This commit is contained in:
commit
005bbbd2a9
28
.github/workflows/update.yml
vendored
28
.github/workflows/update.yml
vendored
@ -1,12 +1,16 @@
|
||||
name: Update static API
|
||||
name: Update static files
|
||||
|
||||
on:
|
||||
repository_dispatch:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
output-branch:
|
||||
descripton: "Branch to push static files to"
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
update:
|
||||
name: Update static API
|
||||
name: Update static files
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Clone repository
|
||||
@ -17,14 +21,14 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.10.10
|
||||
python-version: 3.11.3
|
||||
|
||||
- name: Create configuration file
|
||||
- name: Write config file
|
||||
run: echo "${{ vars.CONFIG }}" > config.json
|
||||
|
||||
- name: Check for existing deployment exists
|
||||
- name: Check if a deployment exists
|
||||
run: |
|
||||
if git ls-remote --exit-code --heads origin gh-pages; then
|
||||
if git ls-remote --exit-code --heads origin ${{ inputs.output-branch }}; then
|
||||
echo "::set-output name=deployment-exists::true"
|
||||
else
|
||||
echo "::set-output name=deployment-exists::false"
|
||||
@ -36,17 +40,17 @@ jobs:
|
||||
run: |
|
||||
git config user.name revanced-bot
|
||||
git config user.email github@revanced.app
|
||||
git subtree add --prefix=static/ origin/gh-pages --squash
|
||||
git subtree add --prefix=static/ origin/${{ inputs.output-branch }} --squash
|
||||
|
||||
- name: Update static API
|
||||
- name: Update static files
|
||||
run: |
|
||||
pip install requests
|
||||
python src/main.py
|
||||
poetry install
|
||||
poetry run python main.py
|
||||
|
||||
- name: Commit changes
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
message: "chore: updated static files"
|
||||
message: "chore: Update static files"
|
||||
fetch: false
|
||||
push: false
|
||||
|
||||
@ -54,6 +58,6 @@ jobs:
|
||||
uses: s0/git-publish-subdir-action@develop
|
||||
env:
|
||||
REPO: self
|
||||
BRANCH: gh-pages
|
||||
BRANCH: ${{ inputs.output-branch }}
|
||||
FOLDER: static
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -1,2 +1,3 @@
|
||||
__pycache__
|
||||
config.json
|
||||
config.json
|
||||
static
|
19
README.md
19
README.md
@ -1,29 +1,30 @@
|
||||
# Static API
|
||||
|
||||
Proof of concept repository to host a static API using GitHub workflows.
|
||||
Repository to host a static API using GitHub workflows.
|
||||
|
||||
## How it works
|
||||
|
||||
On CI trigger static files are generated and commited to the `gh-pages` branch.
|
||||
When CI is ran, static files are generated and commited to the specified branch.
|
||||
The file `generator.py` provides a list of static file generator classes. Each class has a name.
|
||||
The configuration file `config.json` is read which contains the configuration for the API.
|
||||
By specifying the name of the generator in the `generators` array of the configuration, the corresponding generator will be used. The current object of the configuration is passed to the generator.
|
||||
|
||||
The static files are generated by the configuration provided in a `config.json` file in the root.
|
||||
The API configuration consists out of an array of objects. Each object is responsible for an API. Based on the provided pair with the key `type` in these objects a method in `generator.py` is selected to generate the API. The objects are passed to the corresponding method to consume additional key-value pairs.
|
||||
|
||||
The following API configuration generates the `contributor` and `release` API for selected repositories:
|
||||
The following configuration generates static files using the `contributors` and `releases` generator for selected repositories:
|
||||
|
||||
```json
|
||||
{
|
||||
"api": [
|
||||
{
|
||||
"type": "release+contributor",
|
||||
"generators": ["releases", "contributors"],
|
||||
"repositories": ["user/repo"]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
All static files are generated in the output path specified in the configuration.
|
||||
All static files are generated in the output path specified in the configuration.
|
||||
The `purge` array in the configuration specifies which files should be deleted before generating the static files.
|
||||
|
||||
## Setup
|
||||
|
||||
A repository variable `CONFIG` is expected by CD with the configuration (string escaped) which will be used by CD to generate the static files.
|
||||
A repository variable `CONFIG` is expected by CD with the configuration (string escaped) which will be used by CD to generate the static files.
|
||||
|
146
app/api.py
Normal file
146
app/api.py
Normal file
@ -0,0 +1,146 @@
|
||||
from abc import abstractmethod
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
class Api:
|
||||
_api_key: str | None
|
||||
|
||||
@abstractmethod
|
||||
def __init__(self, api_key: str | None = None):
|
||||
self._api_key = api_key
|
||||
|
||||
@abstractmethod
|
||||
def get_release(
|
||||
self, repository: str, all: bool = False, prerelease: bool = False
|
||||
) -> dict | list:
|
||||
"""Gets the release(s) for a repository.
|
||||
|
||||
Args:
|
||||
repository (str): The repository to get releases for.
|
||||
all (bool, optional): Whether to get all releases or not. Defaults to False.
|
||||
prerelease (bool, optional): Whether to get prereleases or not. Defaults to False.
|
||||
Returns:
|
||||
dict | list: The release(s) for the repository.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def get_contributor(self, repository):
|
||||
"""Gets the contributors for a repository.
|
||||
|
||||
Args:
|
||||
repository (str): The repository to get contributors for.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def get_members(self, organization):
|
||||
'''Gets the team for an organization.
|
||||
|
||||
Args:
|
||||
organization (str): The organization to get the team for.
|
||||
'''
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class GitHubApi(Api):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def get_contributor(self, repository):
|
||||
def transform_contributor(contributor: dict) -> dict:
|
||||
"""Transforms a contributor into a dict.
|
||||
|
||||
Args:
|
||||
contributor (dict): The contributor to transform.
|
||||
|
||||
Returns:
|
||||
dict: The transformed contributor.
|
||||
"""
|
||||
|
||||
return {
|
||||
"username": contributor["login"],
|
||||
"avatar": contributor["avatar_url"], # TODO: Proxy via a CDN.
|
||||
"link": contributor["html_url"],
|
||||
"contributions": contributor["contributions"],
|
||||
}
|
||||
|
||||
def sort_and_delete_key(contributor: dict) -> int:
|
||||
contributions = contributor["contributions"]
|
||||
del contributor["contributions"]
|
||||
return contributions
|
||||
|
||||
contributors = requests.get(
|
||||
f"https://api.github.com/repos/{repository}/contributors"
|
||||
).json()
|
||||
contributors = list(
|
||||
map(transform_contributor, contributors)
|
||||
) # List might not be needed.
|
||||
contributors.sort(key=sort_and_delete_key, reverse=True)
|
||||
|
||||
return contributors
|
||||
|
||||
def get_release(
|
||||
self, repository: str, all: bool = False, prerelease: bool = False
|
||||
) -> dict | list:
|
||||
def transform_release(release: dict) -> dict:
|
||||
"""Transforms a release dict into a dict.
|
||||
|
||||
Args:
|
||||
release (dict): The release dict to transform.
|
||||
|
||||
Returns:
|
||||
dict: The transformed release dict.
|
||||
"""
|
||||
|
||||
return {
|
||||
# TODO: Check if theres any need for this: 'id': release['id'].
|
||||
"tag": release["tag_name"],
|
||||
"prerelease": release["prerelease"],
|
||||
"published_at": release["published_at"],
|
||||
"assets": [
|
||||
{
|
||||
"name": asset["name"],
|
||||
"download_url": asset[
|
||||
"browser_download_url"
|
||||
], # TODO: Proxy via a CDN.
|
||||
}
|
||||
for asset in release["assets"]
|
||||
],
|
||||
}
|
||||
|
||||
# A little bit of code duplication but more readable than a ternary operation.
|
||||
if all:
|
||||
releases: list = requests.get(
|
||||
f"https://api.github.com/repos/{repository}/releases"
|
||||
).json()
|
||||
# List might not be needed.
|
||||
return list(map(transform_release, releases))
|
||||
else:
|
||||
latest_release: dict = requests.get(
|
||||
f"https://api.github.com/repos/{repository}/releases/latest?prerelease={prerelease}"
|
||||
).json()
|
||||
return transform_release(latest_release)
|
||||
|
||||
def get_members(self, organization):
|
||||
def transform_team_member(member: dict) -> dict:
|
||||
'''Transforms a team member into a dict.
|
||||
|
||||
Args:
|
||||
member (dict): The member to transform.
|
||||
|
||||
Returns:
|
||||
dict: The transformed member.
|
||||
'''
|
||||
|
||||
return {
|
||||
'username': member['login'],
|
||||
'avatar': member['avatar_url'], # TODO: Proxy via a CDN.
|
||||
'link': member['html_url']
|
||||
}
|
||||
|
||||
members = requests.get(
|
||||
f'https://api.github.com/orgs/{organization}/members').json()
|
||||
# List might not be needed.
|
||||
return list(map(transform_team_member, members))
|
6
app/config.py
Normal file
6
app/config.py
Normal file
@ -0,0 +1,6 @@
|
||||
import json
|
||||
|
||||
|
||||
def load_config() -> dict:
|
||||
with open("config.json", "r") as config_file:
|
||||
return json.load(config_file)
|
177
app/generator.py
Normal file
177
app/generator.py
Normal file
@ -0,0 +1,177 @@
|
||||
from os.path import join
|
||||
from app import api
|
||||
from app.utils import get_repository_name, to_json, write_json, read_json, create_if_not_exists
|
||||
from abc import abstractmethod
|
||||
|
||||
class Generator:
|
||||
_api: api.Api
|
||||
|
||||
def __init__(self, name: str, api: api.Api | None = None):
|
||||
"""
|
||||
Args:
|
||||
name (str): The name of the generator
|
||||
api (Api | None): An optional api to use for the generator.
|
||||
"""
|
||||
self.name = name
|
||||
self._api = api
|
||||
|
||||
@abstractmethod
|
||||
def generate(self, config, path):
|
||||
"""
|
||||
Generates static files based on the supplied config to the specified path.
|
||||
|
||||
Args:
|
||||
config (dict): The configuration for the generator
|
||||
path (str): The path to generate the static files to
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class ReleasesGenerator(Generator):
|
||||
"""
|
||||
Generates a release file for each repository in the config.
|
||||
The release file is named after the tag of the release and contains the latest release information of the repository.
|
||||
A `latest.json` file is also generated containing the latest release of the repository.
|
||||
"""
|
||||
|
||||
def __init__(self, api):
|
||||
super().__init__("releases", api)
|
||||
|
||||
def generate(self, config, path):
|
||||
path = join(path, "releases")
|
||||
|
||||
repositories = config["repositories"]
|
||||
|
||||
for repository in repositories:
|
||||
release = self._api.get_release(repository)
|
||||
repository_name = get_repository_name(repository)
|
||||
|
||||
tag = release["tag"]
|
||||
|
||||
release_path = join(path, repository_name)
|
||||
release_json = to_json(release)
|
||||
|
||||
create_if_not_exists(release_path)
|
||||
|
||||
write_json(release_json, join(
|
||||
release_path, f"{tag}.json"), overwrite=False)
|
||||
write_json(
|
||||
release_json, join(release_path, "latest.json")
|
||||
) # Overwrite the latest release
|
||||
|
||||
# At last join the current tag to an index file
|
||||
index_path = join(path, f"{repository_name}.json")
|
||||
|
||||
index = read_json(index_path, [])
|
||||
if tag not in index: # TODO: Check if there a better way to do this
|
||||
index.append(tag) # Add the current tag to the index
|
||||
|
||||
write_json(index, index_path)
|
||||
|
||||
|
||||
class ContributorsGenerator(Generator):
|
||||
"""
|
||||
Generates a contributor file for each repository in the config.
|
||||
The contributor file is named after the repository and contains the contributors of the repository.
|
||||
"""
|
||||
|
||||
def __init__(self, api):
|
||||
super().__init__("contributors", api)
|
||||
|
||||
def generate(self, config, path):
|
||||
path = join(path, "contributors")
|
||||
|
||||
create_if_not_exists(path)
|
||||
repositories = config["repositories"]
|
||||
|
||||
for repository in repositories:
|
||||
repository_name = get_repository_name(repository)
|
||||
|
||||
contributors = self._api.get_contributor(repository)
|
||||
contributors_path = join(path, f"{repository_name}.json")
|
||||
|
||||
write_json(contributors, contributors_path)
|
||||
|
||||
|
||||
class ConnectionsGenerator(Generator):
|
||||
"""
|
||||
Generates a file containing the connections of the organization.
|
||||
"""
|
||||
|
||||
def __init__(self, api):
|
||||
super().__init__("connections", api)
|
||||
|
||||
def generate(self, config, path):
|
||||
new_connections = config["connections"]
|
||||
|
||||
connections_path = join(path, f"connections.json")
|
||||
|
||||
write_json(new_connections, connections_path)
|
||||
|
||||
|
||||
class TeamGenerator(Generator):
|
||||
"""
|
||||
Generates a team file containing the members of the organization.
|
||||
"""
|
||||
|
||||
def __init__(self, api):
|
||||
super().__init__("team", api)
|
||||
|
||||
def generate(self, config, path):
|
||||
organization = config["organization"]
|
||||
|
||||
team = self._api.get_members(organization)
|
||||
|
||||
team_path = join(path, f"team.json")
|
||||
|
||||
write_json(team, team_path)
|
||||
|
||||
|
||||
class DonationsGenerator(Generator):
|
||||
"""
|
||||
Generates a donation file containing ways to donate to the organization.
|
||||
"""
|
||||
|
||||
def __init__(self, api):
|
||||
super().__init__("donations", api)
|
||||
|
||||
def generate(self, config, path):
|
||||
links = config["links"] if "links" in config else []
|
||||
wallets = config["wallets"] if "wallets" in config else []
|
||||
|
||||
donation_path = join(path, f"donations.json")
|
||||
|
||||
write_json(
|
||||
{
|
||||
"links": links,
|
||||
"wallets": wallets
|
||||
},
|
||||
donation_path
|
||||
)
|
||||
|
||||
|
||||
class GeneratorProvider:
|
||||
generators: list[Generator]
|
||||
|
||||
def __init__(self, generators: list[Generator]):
|
||||
self.generators = generators
|
||||
|
||||
def get(self, name: str) -> Generator | None:
|
||||
for generator in self.generators:
|
||||
if generator.name == name:
|
||||
return generator
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class DefaultGeneratorProvider(GeneratorProvider):
|
||||
def __init__(self):
|
||||
self._api = api.GitHubApi()
|
||||
|
||||
super().__init__([
|
||||
ReleasesGenerator(self._api),
|
||||
ContributorsGenerator(self._api),
|
||||
ConnectionsGenerator(self._api),
|
||||
TeamGenerator(self._api),
|
||||
DonationsGenerator(self._api)
|
||||
])
|
28
app/utils.py
Normal file
28
app/utils.py
Normal file
@ -0,0 +1,28 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
def write_json(text: str | dict | list, to, overwrite=True):
|
||||
if not os.path.exists(to) or overwrite:
|
||||
with open(to, "w") as f:
|
||||
text = to_json(text)
|
||||
f.write(text)
|
||||
|
||||
def to_json(text: str | dict | list):
|
||||
if not isinstance(text, str):
|
||||
text = json.dumps(text, indent=2)
|
||||
return text
|
||||
|
||||
def read_json(path, default):
|
||||
if os.path.exists(path):
|
||||
with open(path, "r") as f:
|
||||
return json.load(f)
|
||||
return default
|
||||
|
||||
|
||||
def create_if_not_exists(path):
|
||||
os.makedirs(path, exist_ok=True)
|
||||
|
||||
|
||||
def get_repository_name(repository: str):
|
||||
return repository.split("/")[-1]
|
@ -1,17 +1,114 @@
|
||||
{
|
||||
"api": [
|
||||
"configs": [
|
||||
{
|
||||
"type": "release+contributor",
|
||||
"generators": [
|
||||
"releases",
|
||||
"contributors"
|
||||
],
|
||||
"repositories": [
|
||||
"user/repo"
|
||||
"revanced/revanced-patches",
|
||||
"revanced/revanced-integrations",
|
||||
"revanced/revanced-manager"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "social",
|
||||
"socials": {
|
||||
"website": "https://yourwebsite.com"
|
||||
}
|
||||
"generators": [
|
||||
"connections"
|
||||
],
|
||||
"connections": [
|
||||
{
|
||||
"name": "Website",
|
||||
"url": "https://revanced.app",
|
||||
"preferred": true
|
||||
},
|
||||
{
|
||||
"name": "GitHub",
|
||||
"url": "https://github.com/revanced",
|
||||
"preferred": false
|
||||
},
|
||||
{
|
||||
"name": "Twitter",
|
||||
"url": "https://twitter.com/revancedapp",
|
||||
"preferred": false
|
||||
},
|
||||
{
|
||||
"name": "Discord",
|
||||
"url": "https://revanced.app/discord",
|
||||
"preferred": true
|
||||
},
|
||||
{
|
||||
"name": "Reddit",
|
||||
"url": "https://www.reddit.com/r/revancedapp",
|
||||
"preferred": false
|
||||
},
|
||||
{
|
||||
"name": "Telegram",
|
||||
"url": "https://t.me/app_revanced",
|
||||
"preferred": false
|
||||
},
|
||||
{
|
||||
"name": "YouTube",
|
||||
"url": "https://www.youtube.com/@ReVanced",
|
||||
"preferred": false
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"generators": [
|
||||
"team"
|
||||
],
|
||||
"organization": "revanced"
|
||||
},
|
||||
{
|
||||
"generators": [
|
||||
"donations"
|
||||
],
|
||||
"links": [
|
||||
{
|
||||
"name": "Open Collective",
|
||||
"url": "https://opencollective.com/revanced",
|
||||
"preferred": true
|
||||
},
|
||||
{
|
||||
"name": "GitHub Sponsors",
|
||||
"url": "https://github.com/sponsors/ReVanced",
|
||||
"preferred": false
|
||||
}
|
||||
],
|
||||
"wallets": [
|
||||
{
|
||||
"network": "Bitcoin",
|
||||
"currency_code": "BTC",
|
||||
"address": "bc1q4x8j6mt27y5gv0q625t8wkr87ruy8fprpy4v3f",
|
||||
"preferred": false
|
||||
},
|
||||
{
|
||||
"network": "Dogecoin",
|
||||
"currency_code": "DOGE",
|
||||
"address": "D8GH73rNjudgi6bS2krrXWEsU9KShedLXp",
|
||||
"preferred": true
|
||||
},
|
||||
{
|
||||
"network": "Ethereum",
|
||||
"currency_code": "ETH",
|
||||
"address": "0x7ab4091e00363654bf84B34151225742cd92FCE5",
|
||||
"preferred": false
|
||||
},
|
||||
{
|
||||
"network": "Litecoin",
|
||||
"currency_code": "LTC",
|
||||
"address": "LbJi8EuoDcwaZvykcKmcrM74jpjde23qJ2",
|
||||
"preferred": false
|
||||
},
|
||||
{
|
||||
"network": "Monero",
|
||||
"currency_code": "XMR",
|
||||
"address": "46YwWDbZD6jVptuk5mLHsuAmh1BnUMSjSNYacozQQEraWSQ93nb2yYVRHoMR6PmFYWEHsLHg9tr1cH5M8Rtn7YaaGQPCjSh",
|
||||
"preferred": false
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"output": "static"
|
||||
"output": "static",
|
||||
"purge": []
|
||||
}
|
26
main.py
Normal file
26
main.py
Normal file
@ -0,0 +1,26 @@
|
||||
from genericpath import isdir, isfile
|
||||
import os
|
||||
import shutil
|
||||
from app.config import load_config
|
||||
from app.generator import DefaultGeneratorProvider
|
||||
|
||||
config = load_config()
|
||||
|
||||
output = config["output"] if "output" in config else "static"
|
||||
purge = config["purge"] if "purge" in config else []
|
||||
generator_configs = config["configs"]
|
||||
|
||||
generator_provider = DefaultGeneratorProvider()
|
||||
|
||||
for path in purge:
|
||||
if isdir(path):
|
||||
shutil.rmtree(path)
|
||||
elif isfile(path):
|
||||
os.remove(path)
|
||||
|
||||
for config in generator_configs:
|
||||
for generator_name in config["generators"]:
|
||||
generator = generator_provider.get(generator_name)
|
||||
if generator is None:
|
||||
continue
|
||||
generator.generate(config, output)
|
179
poetry.lock
generated
Normal file
179
poetry.lock
generated
Normal file
@ -0,0 +1,179 @@
|
||||
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2023.7.22"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"},
|
||||
{file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.3.1"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "charset-normalizer-3.3.1.tar.gz", hash = "sha256:d9137a876020661972ca6eec0766d81aef8a5627df628b664b234b73396e727e"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8aee051c89e13565c6bd366813c386939f8e928af93c29fda4af86d25b73d8f8"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:352a88c3df0d1fa886562384b86f9a9e27563d4704ee0e9d56ec6fcd270ea690"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:223b4d54561c01048f657fa6ce41461d5ad8ff128b9678cfe8b2ecd951e3f8a2"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f861d94c2a450b974b86093c6c027888627b8082f1299dfd5a4bae8e2292821"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1171ef1fc5ab4693c5d151ae0fdad7f7349920eabbaca6271f95969fa0756c2d"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28f512b9a33235545fbbdac6a330a510b63be278a50071a336afc1b78781b147"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0e842112fe3f1a4ffcf64b06dc4c61a88441c2f02f373367f7b4c1aa9be2ad5"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f9bc2ce123637a60ebe819f9fccc614da1bcc05798bbbaf2dd4ec91f3e08846"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f194cce575e59ffe442c10a360182a986535fd90b57f7debfaa5c845c409ecc3"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9a74041ba0bfa9bc9b9bb2cd3238a6ab3b7618e759b41bd15b5f6ad958d17605"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b578cbe580e3b41ad17b1c428f382c814b32a6ce90f2d8e39e2e635d49e498d1"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6db3cfb9b4fcecb4390db154e75b49578c87a3b9979b40cdf90d7e4b945656e1"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:debb633f3f7856f95ad957d9b9c781f8e2c6303ef21724ec94bea2ce2fcbd056"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-win32.whl", hash = "sha256:87071618d3d8ec8b186d53cb6e66955ef2a0e4fa63ccd3709c0c90ac5a43520f"},
|
||||
{file = "charset_normalizer-3.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:e372d7dfd154009142631de2d316adad3cc1c36c32a38b16a4751ba78da2a397"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae4070f741f8d809075ef697877fd350ecf0b7c5837ed68738607ee0a2c572cf"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58e875eb7016fd014c0eea46c6fa92b87b62c0cb31b9feae25cbbe62c919f54d"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dbd95e300367aa0827496fe75a1766d198d34385a58f97683fe6e07f89ca3e3c"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de0b4caa1c8a21394e8ce971997614a17648f94e1cd0640fbd6b4d14cab13a72"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:985c7965f62f6f32bf432e2681173db41336a9c2611693247069288bcb0c7f8b"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a15c1fe6d26e83fd2e5972425a772cca158eae58b05d4a25a4e474c221053e2d"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae55d592b02c4349525b6ed8f74c692509e5adffa842e582c0f861751701a673"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be4d9c2770044a59715eb57c1144dedea7c5d5ae80c68fb9959515037cde2008"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:851cf693fb3aaef71031237cd68699dded198657ec1e76a76eb8be58c03a5d1f"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:31bbaba7218904d2eabecf4feec0d07469284e952a27400f23b6628439439fa7"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:871d045d6ccc181fd863a3cd66ee8e395523ebfbc57f85f91f035f50cee8e3d4"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:501adc5eb6cd5f40a6f77fbd90e5ab915c8fd6e8c614af2db5561e16c600d6f3"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f5fb672c396d826ca16a022ac04c9dce74e00a1c344f6ad1a0fdc1ba1f332213"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-win32.whl", hash = "sha256:bb06098d019766ca16fc915ecaa455c1f1cd594204e7f840cd6258237b5079a8"},
|
||||
{file = "charset_normalizer-3.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:8af5a8917b8af42295e86b64903156b4f110a30dca5f3b5aedea123fbd638bff"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae8e5142dcc7a49168f4055255dbcced01dc1714a90a21f87448dc8d90617d1"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5b70bab78accbc672f50e878a5b73ca692f45f5b5e25c8066d748c09405e6a55"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ceca5876032362ae73b83347be8b5dbd2d1faf3358deb38c9c88776779b2e2f"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d95638ff3613849f473afc33f65c401a89f3b9528d0d213c7037c398a51296"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9edbe6a5bf8b56a4a84533ba2b2f489d0046e755c29616ef8830f9e7d9cf5728"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6a02a3c7950cafaadcd46a226ad9e12fc9744652cc69f9e5534f98b47f3bbcf"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10b8dd31e10f32410751b3430996f9807fc4d1587ca69772e2aa940a82ab571a"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edc0202099ea1d82844316604e17d2b175044f9bcb6b398aab781eba957224bd"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b891a2f68e09c5ef989007fac11476ed33c5c9994449a4e2c3386529d703dc8b"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:71ef3b9be10070360f289aea4838c784f8b851be3ba58cf796262b57775c2f14"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:55602981b2dbf8184c098bc10287e8c245e351cd4fdcad050bd7199d5a8bf514"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:46fb9970aa5eeca547d7aa0de5d4b124a288b42eaefac677bde805013c95725c"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:520b7a142d2524f999447b3a0cf95115df81c4f33003c51a6ab637cbda9d0bf4"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-win32.whl", hash = "sha256:8ec8ef42c6cd5856a7613dcd1eaf21e5573b2185263d87d27c8edcae33b62a61"},
|
||||
{file = "charset_normalizer-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:baec8148d6b8bd5cee1ae138ba658c71f5b03e0d69d5907703e3e1df96db5e41"},
|
||||
{file = "charset_normalizer-3.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63a6f59e2d01310f754c270e4a257426fe5a591dc487f1983b3bbe793cf6bac6"},
|
||||
{file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d6bfc32a68bc0933819cfdfe45f9abc3cae3877e1d90aac7259d57e6e0f85b1"},
|
||||
{file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f3100d86dcd03c03f7e9c3fdb23d92e32abbca07e7c13ebd7ddfbcb06f5991f"},
|
||||
{file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39b70a6f88eebe239fa775190796d55a33cfb6d36b9ffdd37843f7c4c1b5dc67"},
|
||||
{file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e12f8ee80aa35e746230a2af83e81bd6b52daa92a8afaef4fea4a2ce9b9f4fa"},
|
||||
{file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b6cefa579e1237ce198619b76eaa148b71894fb0d6bcf9024460f9bf30fd228"},
|
||||
{file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:61f1e3fb621f5420523abb71f5771a204b33c21d31e7d9d86881b2cffe92c47c"},
|
||||
{file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4f6e2a839f83a6a76854d12dbebde50e4b1afa63e27761549d006fa53e9aa80e"},
|
||||
{file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:1ec937546cad86d0dce5396748bf392bb7b62a9eeb8c66efac60e947697f0e58"},
|
||||
{file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:82ca51ff0fc5b641a2d4e1cc8c5ff108699b7a56d7f3ad6f6da9dbb6f0145b48"},
|
||||
{file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:633968254f8d421e70f91c6ebe71ed0ab140220469cf87a9857e21c16687c034"},
|
||||
{file = "charset_normalizer-3.3.1-cp37-cp37m-win32.whl", hash = "sha256:c0c72d34e7de5604df0fde3644cc079feee5e55464967d10b24b1de268deceb9"},
|
||||
{file = "charset_normalizer-3.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:63accd11149c0f9a99e3bc095bbdb5a464862d77a7e309ad5938fbc8721235ae"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5a3580a4fdc4ac05f9e53c57f965e3594b2f99796231380adb2baaab96e22761"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2465aa50c9299d615d757c1c888bc6fef384b7c4aec81c05a0172b4400f98557"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb7cd68814308aade9d0c93c5bd2ade9f9441666f8ba5aa9c2d4b389cb5e2a45"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e43805ccafa0a91831f9cd5443aa34528c0c3f2cc48c4cb3d9a7721053874b"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:854cc74367180beb327ab9d00f964f6d91da06450b0855cbbb09187bcdb02de5"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c15070ebf11b8b7fd1bfff7217e9324963c82dbdf6182ff7050519e350e7ad9f"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4c99f98fc3a1835af8179dcc9013f93594d0670e2fa80c83aa36346ee763d2"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fb765362688821404ad6cf86772fc54993ec11577cd5a92ac44b4c2ba52155b"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dced27917823df984fe0c80a5c4ad75cf58df0fbfae890bc08004cd3888922a2"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a66bcdf19c1a523e41b8e9d53d0cedbfbac2e93c649a2e9502cb26c014d0980c"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ecd26be9f112c4f96718290c10f4caea6cc798459a3a76636b817a0ed7874e42"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f70fd716855cd3b855316b226a1ac8bdb3caf4f7ea96edcccc6f484217c9597"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:17a866d61259c7de1bdadef418a37755050ddb4b922df8b356503234fff7932c"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-win32.whl", hash = "sha256:548eefad783ed787b38cb6f9a574bd8664468cc76d1538215d510a3cd41406cb"},
|
||||
{file = "charset_normalizer-3.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:45f053a0ece92c734d874861ffe6e3cc92150e32136dd59ab1fb070575189c97"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bc791ec3fd0c4309a753f95bb6c749ef0d8ea3aea91f07ee1cf06b7b02118f2f"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8c61fb505c7dad1d251c284e712d4e0372cef3b067f7ddf82a7fa82e1e9a93"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2c092be3885a1b7899cd85ce24acedc1034199d6fca1483fa2c3a35c86e43041"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2000c54c395d9e5e44c99dc7c20a64dc371f777faf8bae4919ad3e99ce5253e"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cb50a0335382aac15c31b61d8531bc9bb657cfd848b1d7158009472189f3d62"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c30187840d36d0ba2893bc3271a36a517a717f9fd383a98e2697ee890a37c273"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe81b35c33772e56f4b6cf62cf4aedc1762ef7162a31e6ac7fe5e40d0149eb67"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0bf89afcbcf4d1bb2652f6580e5e55a840fdf87384f6063c4a4f0c95e378656"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:06cf46bdff72f58645434d467bf5228080801298fbba19fe268a01b4534467f5"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3c66df3f41abee950d6638adc7eac4730a306b022570f71dd0bd6ba53503ab57"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd805513198304026bd379d1d516afbf6c3c13f4382134a2c526b8b854da1c2e"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:9505dc359edb6a330efcd2be825fdb73ee3e628d9010597aa1aee5aa63442e97"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31445f38053476a0c4e6d12b047b08ced81e2c7c712e5a1ad97bc913256f91b2"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-win32.whl", hash = "sha256:bd28b31730f0e982ace8663d108e01199098432a30a4c410d06fe08fdb9e93f4"},
|
||||
{file = "charset_normalizer-3.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:555fe186da0068d3354cdf4bbcbc609b0ecae4d04c921cc13e209eece7720727"},
|
||||
{file = "charset_normalizer-3.3.1-py3-none-any.whl", hash = "sha256:800561453acdecedaac137bf09cd719c7a440b6800ec182f077bb8e7025fb708"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.4"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
||||
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.31.0"
|
||||
description = "Python HTTP for Humans."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
|
||||
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<3"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.31.0.10"
|
||||
description = "Typing stubs for requests"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "types-requests-2.31.0.10.tar.gz", hash = "sha256:dc5852a76f1eaf60eafa81a2e50aefa3d1f015c34cf0cba130930866b1b22a92"},
|
||||
{file = "types_requests-2.31.0.10-py3-none-any.whl", hash = "sha256:b32b9a86beffa876c0c3ac99a4cd3b8b51e973fb8e3bd4e0a6bb32c7efad80fc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
urllib3 = ">=2"
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.0.7"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"},
|
||||
{file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
|
||||
secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"]
|
||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.11"
|
||||
content-hash = "747d86bc4792bf2d1f4713f4ee23731066fd73ef95c3bb6a15bbdce896232193"
|
18
pyproject.toml
Normal file
18
pyproject.toml
Normal file
@ -0,0 +1,18 @@
|
||||
[tool.poetry]
|
||||
name = "revanced-static-api"
|
||||
version = "0.1.0"
|
||||
description = "Static API for ReVanced"
|
||||
authors = ["Alexandre Teles <alexandre.teles@ufba.br>", "oSumAtrIX <johan.melkonyan1@web.de>"]
|
||||
license = "GPLv3"
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.11"
|
||||
requests = "^2.28.2"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
types-requests = "^2.28.11.17"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
@ -1,97 +0,0 @@
|
||||
from abc import abstractmethod
|
||||
|
||||
import requests
|
||||
|
||||
class Api():
|
||||
_api_key: str
|
||||
|
||||
@abstractmethod
|
||||
def __init__(self, api_key: str = None) -> None:
|
||||
self._api_key: str = api_key
|
||||
|
||||
@abstractmethod
|
||||
def get_release(self, repository: str, all: bool = False, prerelease: bool = False) -> dict | list:
|
||||
'''Gets the release(s) for a repository.
|
||||
|
||||
Args:
|
||||
repository (str): The repository to get releases for.
|
||||
all (bool, optional): Whether to get all releases or not. Defaults to False.
|
||||
prerelease (bool, optional): Whether to get prereleases or not. Defaults to False.
|
||||
Returns:
|
||||
dict | list: The release(s) for the repository.
|
||||
'''
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def get_contributor(self, repository):
|
||||
'''Gets the contributors for a repository.
|
||||
|
||||
Args:
|
||||
repository (str): The repository to get contributors for.
|
||||
'''
|
||||
raise NotImplementedError
|
||||
|
||||
class GitHubApi(Api):
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
def get_contributor(self, repository):
|
||||
def transform_contributor(contributor: dict) -> dict:
|
||||
'''Transforms a contributor into a dict.
|
||||
|
||||
Args:
|
||||
contributor (dict): The contributor to transform.
|
||||
|
||||
Returns:
|
||||
dict: The transformed contributor.
|
||||
'''
|
||||
|
||||
return {
|
||||
'username': contributor['login'],
|
||||
'avatar': contributor['avatar_url'], # TODO: Proxy via a CDN.
|
||||
'link': contributor['html_url'],
|
||||
'contributions': contributor['contributions']
|
||||
}
|
||||
|
||||
def sort_and_delete_key(contributor: dict) -> int:
|
||||
contributions = contributor['contributions']
|
||||
del contributor['contributions']
|
||||
return contributions
|
||||
|
||||
contributors = requests.get(f'https://api.github.com/repos/{repository}/contributors').json()
|
||||
contributors = list(map(transform_contributor, contributors)) # List might not be needed.
|
||||
contributors.sort(key=sort_and_delete_key, reverse=True)
|
||||
|
||||
return contributors
|
||||
|
||||
def get_release(self, repository: str, all: bool = False, prerelease: bool = False) -> dict | list:
|
||||
def transform_release(release: dict) -> dict:
|
||||
'''Transforms a release dict into a dict.
|
||||
|
||||
Args:
|
||||
release (dict): The release dict to transform.
|
||||
|
||||
Returns:
|
||||
dict: The transformed release dict.
|
||||
'''
|
||||
|
||||
return {
|
||||
# TODO: Check if theres any need for this: 'id': release['id'].
|
||||
'tag': release['tag_name'],
|
||||
'prerelease': release['prerelease'],
|
||||
'published_at': release['published_at'],
|
||||
'assets': [
|
||||
{
|
||||
'name': asset['name'],
|
||||
'download_url': asset['browser_download_url'] # TODO: Proxy via a CDN.
|
||||
} for asset in release['assets']
|
||||
]
|
||||
}
|
||||
|
||||
# A little bit of code duplication but more readable than a ternary operation.
|
||||
if all:
|
||||
releases: list = requests.get(f'https://api.github.com/repos/{repository}/releases').json()
|
||||
return list(map(transform_release, releases)) # List might not be needed.
|
||||
else:
|
||||
latest_release: object = requests.get(f'https://api.github.com/repos/{repository}/releases/latest?prerelease={prerelease}').json()
|
||||
return transform_release(latest_release)
|
@ -1,5 +0,0 @@
|
||||
import json
|
||||
|
||||
def load_config() -> dict:
|
||||
with open('config.json', 'r') as config_file:
|
||||
return json.load(config_file)
|
@ -1,110 +0,0 @@
|
||||
import json
|
||||
from os.path import join
|
||||
from app import api
|
||||
from app.utils import get_repository_name, write_json, read_json, create_if_not_exists
|
||||
from abc import abstractmethod
|
||||
|
||||
class Api():
|
||||
_api: api.Api
|
||||
|
||||
def __init__(self, name: str, api: api.Api = api.GitHubApi()) -> None:
|
||||
self.name = name
|
||||
self._api = api
|
||||
|
||||
@abstractmethod
|
||||
def generate(self, config, path):
|
||||
'''
|
||||
Generates the api based on the config to the path.
|
||||
|
||||
Args:
|
||||
config (dict): The config for the api
|
||||
path (str): The path where the api should be generated
|
||||
'''
|
||||
raise NotImplementedError
|
||||
|
||||
class ReleaseApi(Api):
|
||||
def __init__(self, api) -> None:
|
||||
super().__init__("release", api)
|
||||
pass
|
||||
|
||||
def generate(self, config, path):
|
||||
path = join(path, 'release')
|
||||
|
||||
repositories = config["repositories"]
|
||||
|
||||
for repository in repositories:
|
||||
release = self._api.get_release(repository)
|
||||
repository_name = get_repository_name(repository)
|
||||
|
||||
tag = release['tag']
|
||||
|
||||
release_path = join(path, repository_name)
|
||||
release_json = json.dumps(release)
|
||||
|
||||
create_if_not_exists(release_path)
|
||||
|
||||
write_json(release_json, join(release_path, f'{tag}.json'), overwrite=False)
|
||||
write_json(release_json, join(release_path, 'latest.json')) # Overwrite the latest release
|
||||
|
||||
# At last join the current tag to an index file
|
||||
index_path = join(path, f'{repository_name}.json')
|
||||
|
||||
index = read_json(index_path, [])
|
||||
if tag not in index: # TODO: Check if there a better way to do this
|
||||
index.append(tag) # Add the current tag to the index
|
||||
|
||||
write_json(index, index_path)
|
||||
|
||||
class ContributorApi(Api):
|
||||
def __init__(self, api) -> None:
|
||||
super().__init__("contributor", api)
|
||||
pass
|
||||
|
||||
def generate(self, config, path):
|
||||
path = join(path, 'contributor')
|
||||
|
||||
create_if_not_exists(path)
|
||||
repositories = config["repositories"]
|
||||
|
||||
for repository in repositories:
|
||||
repository_name = get_repository_name(repository)
|
||||
|
||||
contributors = self._api.get_contributor(repository)
|
||||
contributors_path = join(path, f'{repository_name}.json')
|
||||
|
||||
write_json(contributors, contributors_path)
|
||||
|
||||
class SocialApi(Api):
|
||||
def __init__(self, api) -> None:
|
||||
super().__init__("social", api)
|
||||
|
||||
def generate(self, config, path):
|
||||
new_social = config
|
||||
|
||||
social_path = join(path, f"social.json")
|
||||
social = read_json(social_path, new_social)
|
||||
|
||||
write_json(social, social_path)
|
||||
|
||||
class ApiProvider():
|
||||
_apis: list[Api]
|
||||
|
||||
def __init__(self, apis: list[Api]) -> None:
|
||||
self._apis = apis
|
||||
|
||||
def get(self, name: str) -> Api:
|
||||
for api in self._apis:
|
||||
if api.name == name:
|
||||
return api
|
||||
|
||||
return None
|
||||
|
||||
class DefaultApiProvider(ApiProvider):
|
||||
def __init__(self):
|
||||
self._api = api.GitHubApi() # Use GitHub as default api
|
||||
|
||||
super().__init__([
|
||||
ReleaseApi(self._api),
|
||||
ContributorApi(self._api),
|
||||
SocialApi(self._api)]
|
||||
)
|
@ -1,21 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
def write_json(text: str | dict | list, to, overwrite=True):
|
||||
if not os.path.exists(to) or overwrite:
|
||||
with open(to, 'w') as f:
|
||||
if not isinstance(text, str):
|
||||
text = json.dumps(text)
|
||||
f.write(text)
|
||||
|
||||
def read_json(path, default):
|
||||
if os.path.exists(path):
|
||||
with open(path, 'r') as f:
|
||||
return json.load(f)
|
||||
return default
|
||||
|
||||
def create_if_not_exists(path):
|
||||
os.makedirs(path, exist_ok=True)
|
||||
|
||||
def get_repository_name(repository: str):
|
||||
return repository.split('/')[-1]
|
17
src/main.py
17
src/main.py
@ -1,17 +0,0 @@
|
||||
from app.config import load_config
|
||||
from app.generator import DefaultApiProvider
|
||||
|
||||
config = load_config()
|
||||
|
||||
output = config['output']
|
||||
apis = config['api']
|
||||
|
||||
api_provider = DefaultApiProvider()
|
||||
|
||||
for api in apis:
|
||||
types = api['type'].split('+')
|
||||
del api['type'] # Don't need the type for the api anymore below
|
||||
for type in types:
|
||||
api_type = api_provider.get(type)
|
||||
if api_type is None: continue
|
||||
api_type.generate(api, output)
|
Loading…
x
Reference in New Issue
Block a user