mirror of
https://github.com/revanced/revanced-api.git
synced 2025-04-29 22:24:31 +02:00
feat: Disallow all web crawlers (#111)
* feat: Disallow all web crawlers * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
parent
d61ddcc8ac
commit
b69acfa8d7
@ -9,7 +9,17 @@ from api.compat import github as compat
|
||||
from api.donations import donations
|
||||
from api.announcements import announcements
|
||||
from api.login import login
|
||||
from api.robots import robots
|
||||
|
||||
api = Blueprint.group(
|
||||
login, ping, github, info, socials, donations, announcements, compat, url_prefix="/"
|
||||
login,
|
||||
ping,
|
||||
github,
|
||||
info,
|
||||
socials,
|
||||
donations,
|
||||
announcements,
|
||||
compat,
|
||||
robots,
|
||||
url_prefix="/",
|
||||
)
|
||||
|
10
api/robots.py
Normal file
10
api/robots.py
Normal file
@ -0,0 +1,10 @@
|
||||
from sanic import Blueprint
|
||||
from sanic.response import text
|
||||
|
||||
|
||||
robots: Blueprint = Blueprint("robots")
|
||||
|
||||
|
||||
@robots.get("/robots.txt")
|
||||
async def robots_txt(request):
|
||||
return text("User-agent: *\nDisallow: /", content_type="text/plain")
|
Loading…
x
Reference in New Issue
Block a user