Add IndieAuth support
This commit is contained in:
parent
10676b039a
commit
c10a27cc08
14 changed files with 578 additions and 19 deletions
59
app/utils/indieauth.py
Normal file
59
app/utils/indieauth.py
Normal file
|
@ -0,0 +1,59 @@
|
|||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
import mf2py # type: ignore
|
||||
from loguru import logger
|
||||
|
||||
from app import config
|
||||
from app.utils.url import make_abs
|
||||
|
||||
|
||||
@dataclass
|
||||
class IndieAuthClient:
|
||||
logo: str | None
|
||||
name: str
|
||||
url: str
|
||||
|
||||
|
||||
def _get_prop(props: dict[str, Any], name: str, default=None) -> Any:
|
||||
if name in props:
|
||||
items = props.get(name)
|
||||
if isinstance(items, list):
|
||||
return items[0]
|
||||
return items
|
||||
return default
|
||||
|
||||
|
||||
async def get_client_id_data(url: str) -> IndieAuthClient | None:
|
||||
async with httpx.AsyncClient() as client:
|
||||
try:
|
||||
resp = await client.get(
|
||||
url,
|
||||
headers={
|
||||
"User-Agent": config.USER_AGENT,
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
except (httpx.HTTPError, httpx.HTTPStatusError):
|
||||
logger.exception(f"Failed to discover webmention endpoint for {url}")
|
||||
return None
|
||||
|
||||
data = mf2py.parse(doc=resp.text)
|
||||
for item in data["items"]:
|
||||
if "h-x-app" in item["type"] or "h-app" in item["type"]:
|
||||
props = item.get("properties", {})
|
||||
print(props)
|
||||
logo = _get_prop(props, "logo")
|
||||
return IndieAuthClient(
|
||||
logo=make_abs(logo, url) if logo else None,
|
||||
name=_get_prop(props, "name"),
|
||||
url=_get_prop(props, "url", url),
|
||||
)
|
||||
|
||||
return IndieAuthClient(
|
||||
logo=None,
|
||||
name=url,
|
||||
url=url,
|
||||
)
|
|
@ -8,6 +8,18 @@ from loguru import logger
|
|||
from app.config import DEBUG
|
||||
|
||||
|
||||
def make_abs(url: str | None, parent: str) -> str | None:
|
||||
if url is None:
|
||||
return None
|
||||
|
||||
if url.startswith("http"):
|
||||
return url
|
||||
|
||||
return (
|
||||
urlparse(parent)._replace(path=url, params="", query="", fragment="").geturl()
|
||||
)
|
||||
|
||||
|
||||
class InvalidURLError(Exception):
|
||||
pass
|
||||
|
||||
|
|
|
@ -1,23 +1,10 @@
|
|||
from urllib.parse import urlparse
|
||||
|
||||
import httpx
|
||||
from bs4 import BeautifulSoup # type: ignore
|
||||
from loguru import logger
|
||||
|
||||
from app import config
|
||||
from app.utils.url import is_url_valid
|
||||
|
||||
|
||||
def _make_abs(url: str | None, parent: str) -> str | None:
|
||||
if url is None:
|
||||
return None
|
||||
|
||||
if url.startswith("http"):
|
||||
return url
|
||||
|
||||
return (
|
||||
urlparse(parent)._replace(path=url, params="", query="", fragment="").geturl()
|
||||
)
|
||||
from app.utils.url import make_abs
|
||||
|
||||
|
||||
async def _discover_webmention_endoint(url: str) -> str | None:
|
||||
|
@ -37,13 +24,13 @@ async def _discover_webmention_endoint(url: str) -> str | None:
|
|||
|
||||
for k, v in resp.links.items():
|
||||
if k and "webmention" in k:
|
||||
return _make_abs(resp.links[k].get("url"), url)
|
||||
return make_abs(resp.links[k].get("url"), url)
|
||||
|
||||
soup = BeautifulSoup(resp.text, "html5lib")
|
||||
wlinks = soup.find_all(["link", "a"], attrs={"rel": "webmention"})
|
||||
for wlink in wlinks:
|
||||
if "href" in wlink.attrs:
|
||||
return _make_abs(wlink.attrs["href"], url)
|
||||
return make_abs(wlink.attrs["href"], url)
|
||||
|
||||
return None
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue