feat: Disallow all web crawlers (#111)

* feat: Disallow all web crawlers

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
oSumAtrIX 2023-10-20 23:19:59 +02:00 committed by GitHub
parent d61ddcc8ac
commit b69acfa8d7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 21 additions and 1 deletions

View File

@ -9,7 +9,17 @@ from api.compat import github as compat
from api.donations import donations from api.donations import donations
from api.announcements import announcements from api.announcements import announcements
from api.login import login from api.login import login
from api.robots import robots
api = Blueprint.group( api = Blueprint.group(
login, ping, github, info, socials, donations, announcements, compat, url_prefix="/" login,
ping,
github,
info,
socials,
donations,
announcements,
compat,
robots,
url_prefix="/",
) )

10
api/robots.py Normal file
View File

@ -0,0 +1,10 @@
from sanic import Blueprint
from sanic.response import text
robots: Blueprint = Blueprint("robots")
@robots.get("/robots.txt")
async def robots_txt(request):
return text("User-agent: *\nDisallow: /", content_type="text/plain")