diff --git a/.build.yml b/.build.yml new file mode 100644 index 0000000..2efde64 --- /dev/null +++ b/.build.yml @@ -0,0 +1,22 @@ +image: ubuntu/22.10 +sources: +- https://git.sr.ht/~tsileo/microblog.pub +packages: +- python3 +- python3-dev +- libxml2-dev +- libxslt-dev +- gcc +- libjpeg-dev +- zlib1g-dev +- libffi-dev +- python3.10-venv +tasks: + - setup: | + curl -sSL https://install.python-poetry.org | python3 - + - tests: | + export PATH="/home/build/.local/bin:$PATH" + cd microblog.pub + poetry install --no-interaction + poetry run inv lint + poetry run inv tests diff --git a/.dockerignore b/.dockerignore index fdf7aa9..b31c018 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,4 +1,9 @@ -__pycache__/ -data/ -data2/ -tests/ +**/.git +data/** +**/__pycache__ +*.pyc +*.swp +docs/ +.mypy_cache +.pytest_cache +docker-compose.yml diff --git a/.drone.yml b/.drone.yml deleted file mode 100644 index 367d27d..0000000 --- a/.drone.yml +++ /dev/null @@ -1,95 +0,0 @@ ---- -priviliged: true # Needed for Docker in Docker -kind: pipeline -name: default -steps: - # Run Mypy/Flake8/black linters before any further work is done - - name: lint - image: python:3 - commands: - - pip install -U pip - - pip install mypy flake8 black - - black --check . - - flake8 - - mypy --ignore-missing-imports . - - mypy --ignore-missing-imports setup_wizard - - # Build the container images we need for the test suite - - name: build_containers - image: docker:dind - environment: - DOCKER_HOST: tcp://docker:2375 - commands: - - apk update && apk upgrade && apk add --no-cache bash git openssh curl - - docker network create fede - - docker pull mongo - - docker pull poussetaches/poussetaches - - docker build . -t microblogpub:latest - - # Run poussetaches (will be shared by the two microblog.pub instances) "in the background" - - name: run_poussetaches - image: docker:dind - detach: true - environment: - DOCKER_HOST: tcp://docker:2375 - POUSSETACHES_AUTH_KEY: lol - commands: - - docker run -p 7991:7991 --net fede -e POUSSETACHES_AUTH_KEY --name poussetaches poussetaches/poussetaches - - # Run MongoDB (will be shared by the two microblog.pub instances) "in the background" - - name: run_mongodb - image: docker:dind - detach: true - environment: - DOCKER_HOST: tcp://docker:2375 - commands: - - docker run -p 27017:27017 --net fede --name mongo mongo - - # Run a first microblog.pub instance "in the background" - - name: microblogpub_instance1 - image: docker:dind - detach: true - environment: - DOCKER_HOST: tcp://docker:2375 - MICROBLOGPUB_DEBUG: 1 - MICROBLOGPUB_POUSSETACHES_HOST: http://poussetaches:7991 - MICROBLOGPUB_INTERNAL_HOST: http://instance1_web:5005 - MICROBLOGPUB_MONGODB_HOST: mongo:27017 - POUSSETACHES_AUTH_KEY: lol - commands: - - sleep 5 - - 'docker run -p 5006:5005 --net fede -v "`pwd`/tests/fixtures/instance1/config:/app/config" -e MICROBLOGPUB_DEBUG -e MICROBLOGPUB_INTERNAL_HOST -e MICROBLOGPUB_MONGODB_HOST -e MICROBLOGPUB_POUSSETACHES_HOST -e POUSSETACHES_AUTH_KEY --name instance1_web microblogpub' - - # Run the second microblog.pub instance "in the background" - - name: microblogpub_instance2 - image: docker:dind - detach: true - environment: - DOCKER_HOST: tcp://docker:2375 - MICROBLOGPUB_DEBUG: 1 - MICROBLOGPUB_POUSSETACHES_HOST: http://poussetaches:7991 - MICROBLOGPUB_INTERNAL_HOST: http://instance2_web:5005 - MICROBLOGPUB_MONGODB_HOST: mongo:27017 - POUSSETACHES_AUTH_KEY: lol - commands: - - 'docker run -p 5007:5005 --net fede -v "`pwd`/tests/fixtures/instance2/config:/app/config" -e MICROBLOGPUB_DEBUG -e MICROBLOGPUB_INTERNAL_HOST -e MICROBLOGPUB_MONGODB_HOST -e MICROBLOGPUB_POUSSETACHES_HOST -e POUSSETACHES_AUTH_KEY --name instance2_web microblogpub' - - # Run some tests against the two instances to ensure federation is working - - name: federation_test - image: python:3 - commands: - - pip install -U pip - - pip install -r dev-requirements.txt - # Federation tests (with two local instances) - - python -m pytest -v -s --ignore data -k federatio - -# Setup the services needed to do some "Docker in Docker" (or dind) -services: -- name: docker - image: docker:dind - privileged: true ---- -kind: signature -hmac: ae911176117298c18ecfcd95fbdbd62304c5f32462b42f2aefdd5a5b834fed60 - -... diff --git a/.env b/.env deleted file mode 100644 index 70fb2a3..0000000 --- a/.env +++ /dev/null @@ -1,3 +0,0 @@ -WEB_PORT=5005 -CONFIG_DIR=./config -DATA_DIR=./data diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..c1b2260 --- /dev/null +++ b/.flake8 @@ -0,0 +1,4 @@ +[flake8] +max-line-length = 88 +extend-ignore = E203 +exclude = alembic/versions diff --git a/.gitignore b/.gitignore index e43f899..f6eb93d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,8 @@ -*.sw[op] -key_*.pem -data/* -config/* -static/media/* - -.mypy_cache/ +*.db __pycache__/ +.mypy_cache/ +.pytest_cache/ +docs/dist/ +requirements.txt +app/_version.py +app/static/favicon.ico diff --git a/.isort.cfg b/.isort.cfg deleted file mode 100644 index 0ed7634..0000000 --- a/.isort.cfg +++ /dev/null @@ -1,3 +0,0 @@ -[settings] -line_length=120 -force_single_line=true diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 0000000..655fc7b --- /dev/null +++ b/AUTHORS @@ -0,0 +1,11 @@ +Thomas Sileo +Kevin Wallace +Miguel Jacq +Alexey Shpakovsky +Josh Washburne +João Costa +Sam +Ash McAllan +Cassio Zen +Cocoa +Jane diff --git a/Dockerfile b/Dockerfile index ce2d647..ff21be1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,36 @@ -FROM python:3.7 -COPY requirements.txt /app/requirements.txt +FROM r.batts.cloud/debian:bookworm as python-base +RUN apt update && \ + apt install -y --no-install-recommends python3 && \ + apt clean all && rm -rf /var/lib/apt/lists/* +ENV PYTHONUNBUFFERED=1 \ + PYTHONDONTWRITEBYTECODE=1 \ + POETRY_HOME="/opt/poetry" \ + POETRY_VIRTUALENVS_IN_PROJECT=true \ + POETRY_NO_INTERACTION=1 \ + PYSETUP_PATH="/opt/venv" \ + VENV_PATH="/opt/venv/.venv" +ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH" + +FROM python-base as builder-base +RUN apt-get update +RUN apt-get install -y --no-install-recommends curl build-essential gcc libffi-dev libssl-dev libxml2-dev libxslt1-dev zlib1g-dev libxslt-dev gcc libjpeg-dev zlib1g-dev libwebp-dev +# rustc is needed to compile Python packages +RUN curl https://sh.rustup.rs -sSf | bash -s -- -y +ENV PATH="/root/.cargo/bin:${PATH}" +RUN curl -sSL https://install.python-poetry.org | python3 - +WORKDIR $PYSETUP_PATH +COPY poetry.lock pyproject.toml ./ +RUN poetry install --only main + +FROM python-base as production +RUN apt-get update +RUN apt-get install -y --no-install-recommends libjpeg-dev libxslt1-dev libxml2-dev libxslt-dev +RUN groupadd --gid 1000 microblogpub \ + && useradd --uid 1000 --gid microblogpub --shell /bin/bash microblogpub +COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH +COPY . /app/ +RUN chown -R 1000:1000 /app +USER microblogpub WORKDIR /app -RUN pip install -r requirements.txt -ADD . /app -ENV FLASK_APP=app.py -CMD ["./run.sh"] +EXPOSE 8000 +CMD ["./misc/docker_start.sh"] diff --git a/ENVVARS.md b/ENVVARS.md deleted file mode 100644 index 44ec36b..0000000 --- a/ENVVARS.md +++ /dev/null @@ -1,9 +0,0 @@ -| var | default | -|----------------------------------|-------------------------| -| POUSSETACHES_AUTH_KEY | | -| FLASK_DEBUG | 0 | -| MICROBLOGPUB_DEBUG | "false" | -| MICROBLOGPUB_INTERNAL_HOST | "http://localhost:5000" | -| MICROBLOGPUB_MONGODB_HOST | "localhost:27017" | -| MICROBLOGPUB_POUSSETACHES_HOST | "http://localhost:7991" | -| MICROBLOGPUB_WIZARD_PROJECT_NAME | "microblogpub" | diff --git a/LICENSE b/LICENSE index be3f7b2..7d1aa0b 100644 --- a/LICENSE +++ b/LICENSE @@ -629,8 +629,8 @@ to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. - - Copyright (C) + Microblog.pub, an ActivityPub powered microblog software + Copyright (C) 2022 Thomas Sileo This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by diff --git a/Makefile b/Makefile index b42421d..213c271 100644 --- a/Makefile +++ b/Makefile @@ -1,55 +1,47 @@ SHELL := /bin/bash -PYTHON=python -SETUP_WIZARD_IMAGE=microblogpub-setup-wizard:latest PWD=$(shell pwd) -# Build the config (will error if an existing config/me.yml is found) via a Docker container +.PHONY: build +build: + docker build -t microblogpub/microblogpub . + .PHONY: config config: - # Build the container for the setup wizard on-the-fly - cd setup_wizard && docker build . -t $(SETUP_WIZARD_IMAGE) # Run and remove instantly - -docker run -e MICROBLOGPUB_WIZARD_PROJECT_NAME --rm -it --volume $(PWD):/app/out $(SETUP_WIZARD_IMAGE) - # Finally, remove the tagged image - docker rmi $(SETUP_WIZARD_IMAGE) + -docker run --rm -it --volume `pwd`/data:/app/data microblogpub/microblogpub inv configuration-wizard -# Reload the federation test instances (for local dev) -.PHONY: reload-fed -reload-fed: - docker build . -t microblogpub:latest - docker-compose -p instance2 -f docker-compose-tests.yml stop - docker-compose -p instance1 -f docker-compose-tests.yml stop - WEB_PORT=5006 CONFIG_DIR=./tests/fixtures/instance1/config docker-compose -p instance1 -f docker-compose-tests.yml up -d --force-recreate --build - WEB_PORT=5007 CONFIG_DIR=./tests/fixtures/instance2/config docker-compose -p instance2 -f docker-compose-tests.yml up -d --force-recreate --build +.PHONY: update +update: + -docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv update --no-update-deps -# Reload the local dev instance -.PHONY: reload-dev -reload-dev: - docker build . -t microblogpub:latest - docker-compose -f docker-compose-dev.yml up -d --force-recreate +.PHONY: prune-old-data +prune-old-data: + -docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv prune-old-data -# Build the microblogpub Docker image -.PHONY: microblogpub -microblogpub: - # Update microblog.pub - git pull - # Rebuild the Docker image - docker build . --no-cache -t microblogpub:latest +.PHONY: webfinger +webfinger: + -docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv webfinger $(account) -.PHONY: css -css: - # Download pure.css if needed - if [[ ! -f static/pure.css ]]; then curl https://unpkg.com/purecss@1.0.1/build/pure-min.css > static/pure.css; fi - # Download the emojis from twemoji if needded - if [[ ! -d static/twemoji ]]; then wget https://github.com/twitter/twemoji/archive/v12.1.2.tar.gz && tar xvzf v12.1.2.tar.gz && mv twemoji-12.1.2/assets/svg static/twemoji && rm -rf twemoji-12.1.2 && rm -f v12.1.2.tar.gz; fi +.PHONY: move-to +move-to: + -docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv move-to $(account) -# Run the docker-compose project locally (will perform a update if the project is already running) -.PHONY: run -run: microblogpub css - # (poussetaches and microblogpub Docker image will updated) - # Update MongoDB - docker pull mongo:3 - docker pull poussetaches/poussetaches - # Restart the project - docker-compose stop - docker-compose up -d --force-recreate --build +.PHONY: self-destruct +self-destruct: + -docker run --rm --it --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv self-destruct + +.PHONY: reset-password +reset-password: + -docker run --rm -it --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv reset-password + +.PHONY: check-config +check-config: + -docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv check-config + +.PHONY: compile-scss +compile-scss: + -docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv compile-scss + +.PHONY: import-mastodon-following-accounts +import-mastodon-following-accounts: + -docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv import-mastodon-following-accounts $(path) diff --git a/README.md b/README.md index 6abe066..5f77b63 100644 --- a/README.md +++ b/README.md @@ -1,120 +1,67 @@ # microblog.pub -

- microblog.pub -

-

A self-hosted, single-user, ActivityPub powered microblog.

-

-Build Status -#microblog.pub on Matrix -License -Code style: black -

+A self-hosted, single-user, ActivityPub powered microblog. -**Still in early development/I do not recommend to run an instance yet.** +[![builds.sr.ht status](https://builds.sr.ht/~tsileo/microblog.pub.svg)](https://builds.sr.ht/~tsileo/microblog.pub?) +[![AGPL 3.0](https://img.shields.io/badge/license-AGPL_3.0-blue.svg?style=flat)](https://git.sr.ht/~tsileo/microblog.pub/tree/v2/item/LICENSE) - +Instances in the wild: + + - [microblog.pub](https://microblog.pub/) (follow to get updated about the project) + - [hexa.ninja](https://hexa.ninja) (theme customization example) + - [testing.microblog.pub](https://testing.microblog.pub/) + - [Irish Left Archive](https://posts.leftarchive.ie/) (another theme customization example) + +There are still some rough edges, but the server is mostly functional. ## Features - - Implements a basic [ActivityPub](https://activitypub.rocks/) server (with federation) - - S2S (Server to Server) and C2S (Client to Server) protocols - - Compatible with [Mastodon](https://joinmastodon.org/) and others ([Pleroma](https://pleroma.social/), Misskey, Plume, PixelFed, Hubzilla...) - - Exposes your outbox as a basic microblog - - Support all content types from the Fediverse (`Note`, `Article`, `Page`, `Video`, `Image`, `Question`...) - - Markdown support - - Server-side code syntax highlighting - - Comes with an admin UI with notifications and the stream of people you follow - - Private "bookmark" support - - List support - - Allows you to attach files to your notes - - Custom emojis support - - Cares about your privacy - - The image upload endpoint strips EXIF meta data before storing the file - - Every attachment/media is cached (or proxied) by the server - - No JavaScript, **that's it**. Even the admin UI is pure HTML/CSS - - (well except for the Emoji picker within the admin, but it's only few line of hand-written JavaScript) - - Easy to customize (the theme is written Sass) - - mobile-friendly theme - - with dark and light version + - Implements the [ActivityPub](https://activitypub.rocks/) server to server protocol + - Federate with all the other popular ActivityPub servers like Pleroma, PixelFed, PeerTube, Mastodon... + - Consume most of the content types available (notes, articles, videos, pictures...) + - Exposes your ActivityPub profile as a minimalist microblog + - Author notes in Markdown, with code highlighting support + - Dedicated section for articles/blog posts (enabled when the first article is posted) + - Lightweight + - Uses SQLite, and Python 3.10+ + - Can be deployed on small VPS + - Privacy-aware + - EXIF metadata (like GPS location) are stripped before storage + - Every media is proxied through the server + - Strict access control for your outbox enforced via HTTP signature + - **No** Javascript + - The UI is pure HTML/CSS + - Except tiny bits of hand-written JS in the note composer to insert emoji and add alt text to images - IndieWeb citizen - - Microformats aware (exports `h-feed`, `h-entry`, `h-cards`, ...) - - Export a feed in the HTML that is WebSub compatible - - Partial [Micropub](https://www.w3.org/TR/micropub/) support ([implementation report](https://micropub.rocks/implementation-reports/servers/416/s0BDEXZiX805btoa47sz)) - - Implements [IndieAuth](https://indieauth.spec.indieweb.org/) endpoints (authorization and token endpoint) - - You can use your ActivityPub identity to login to other websites/app (with U2F support) - - Send [Webmentions](https://www.w3.org/TR/webmention/) to linked website (only for public notes) - - Exports RSS/Atom/[JSON](https://jsonfeed.org/) feeds - - You stream/timeline is also available in an (authenticated) JSON feed - - Comes with a tiny HTTP API to help posting new content and and read your inbox/notifications - - Deployable with Docker (Docker compose for everything: dev, test and deployment) - - Focused on testing - - Tested against the [official ActivityPub test suite](https://test.activitypub.rocks/), see [the results](https://activitypub.rocks/implementation-report/) - - [CI runs "federation" tests against two instances](https://d.a4.io/tsileo/microblog.pub) - - Project is running 2 up-to-date instances ([here](https://microblog.pub) and [there](https://a4.io)) - - Manually tested against other major platforms + - [IndieAuth](https://www.w3.org/TR/indieauth/) support (OAuth2 extension) + - [Microformats](http://microformats.org/wiki/Main_Page) everywhere + - [Micropub](https://www.w3.org/TR/micropub/) support + - Sends and processes [Webmentions](https://www.w3.org/TR/webmention/) + - RSS/Atom/[JSON](https://www.jsonfeed.org/) feed + - Easy to backup + - Everything is stored in the `data/` directory: config, uploads, secrets and the SQLite database. + +## Getting started + +Check out the [online documentation](https://docs.microblog.pub). + +## Credits + + - Emoji from [Twemoji](https://twemoji.twitter.com/) + - Awesome custom goose emoji from [@pamela@bsd.network](https://bsd.network/@pamela) -## User Guide +## Contributing -Remember that _microblog.pub_ is still in early development. +All the development takes place on [sourcehut](https://sr.ht/~tsileo/microblog.pub/), GitHub is only used as a mirror: -The easiest and recommended way to run _microblog.pub_ in production is to use the provided docker-compose config. + - [Project](https://sr.ht/~tsileo/microblog.pub/) + - [Issue tracker](https://todo.sr.ht/~tsileo/microblog.pub) + - [Mailing list](https://sr.ht/~tsileo/microblog.pub/lists) -First install [Docker](https://docs.docker.com/install/) and [Docker Compose](https://docs.docker.com/compose/install/). -Python is not needed on the host system. - -Note that all the generated data (config included) will be stored on the host (i.e. not only in Docker) in `config/` and `data/`. - -### Installation - -```shell -$ git clone https://github.com/tsileo/microblog.pub -$ cd microblog.pub -$ make config -``` - -Once the initial configuration is done, you can still tweak the config by editing `config/me.yml` directly. +Contributions are welcomed, check out the [contributing section of the documentation](https://docs.microblog.pub/developer_guide.html#contributing) for more details. -### Deployment +## License -To spawn the docker-compose project (running this command will also update _microblog.pub_ to latest and restart everything if it's already running): - -```shell -$ make run -``` - -By default, the server will listen on `localhost:5005` (http://localhost:5005 should work if you're running locally). - -For production, you need to setup a reverse proxy (nginx, caddy) to forward your domain to the local server -(and check [certbot](https://certbot.eff.org/) for getting a free TLS certificate). - -### Backup - -The easiest way to backup all of your data is to backup the `microblog.pub/` directory directly (that's what I do and I have been able to restore super easily). -It should be safe to copy the directory while the Docker compose project is running. - - -## Development - -The project requires Python3.7+. - -The most convenient way to hack on _microblog.pub_ is to run the Python server on the host directly, and evetything else in Docker. - -```shell -# One-time setup (in a new virtual env) -$ pip install -r requirements.txt -# Start MongoDB and poussetaches -$ make poussetaches -$ env POUSSETACHES_AUTH_KEY="" docker-compose -f docker-compose-dev.yml up -d -# Run the server locally -$ FLASK_DEBUG=1 MICROBLOGPUB_DEBUG=1 FLASK_APP=app.py POUSSETACHES_AUTH_KEY="" flask run -p 5005 --with-threads -``` - - -## Contributions - -Contributions/PRs are welcome, please open an issue to start a discussion before your start any work. +The project is licensed under the GNU AGPL v3 LICENSE (see the LICENSE file). diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..9635f78 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,105 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +timezone = UTC + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..ca982b3 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,86 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +import app.models # noqa: F401 # Register models +from alembic import context +from app.database import SQLALCHEMY_DATABASE_URL +from app.database import Base + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +config.set_main_option("sqlalchemy.url", SQLALCHEMY_DATABASE_URL) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + render_as_batch=True, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + render_as_batch=True, + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..55df286 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/2022_07_28_1725-f5717d82b3ff_initial_migration.py b/alembic/versions/2022_07_28_1725-f5717d82b3ff_initial_migration.py new file mode 100644 index 0000000..a184ac1 --- /dev/null +++ b/alembic/versions/2022_07_28_1725-f5717d82b3ff_initial_migration.py @@ -0,0 +1,393 @@ +"""Initial migration + +Revision ID: f5717d82b3ff +Revises: +Create Date: 2022-07-28 17:25:31.081326+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = 'f5717d82b3ff' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('actor', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('ap_id', sa.String(), nullable=False), + sa.Column('ap_actor', sa.JSON(), nullable=False), + sa.Column('ap_type', sa.String(), nullable=False), + sa.Column('handle', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('actor', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_actor_ap_id'), ['ap_id'], unique=True) + batch_op.create_index(batch_op.f('ix_actor_handle'), ['handle'], unique=False) + batch_op.create_index(batch_op.f('ix_actor_id'), ['id'], unique=False) + + op.create_table('inbox', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('actor_id', sa.Integer(), nullable=False), + sa.Column('server', sa.String(), nullable=False), + sa.Column('is_hidden_from_stream', sa.Boolean(), nullable=False), + sa.Column('ap_actor_id', sa.String(), nullable=False), + sa.Column('ap_type', sa.String(), nullable=False), + sa.Column('ap_id', sa.String(), nullable=False), + sa.Column('ap_context', sa.String(), nullable=True), + sa.Column('ap_published_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('ap_object', sa.JSON(), nullable=False), + sa.Column('activity_object_ap_id', sa.String(), nullable=True), + sa.Column('visibility', sa.Enum('PUBLIC', 'UNLISTED', 'FOLLOWERS_ONLY', 'DIRECT', name='visibilityenum'), nullable=False), + sa.Column('relates_to_inbox_object_id', sa.Integer(), nullable=True), + sa.Column('relates_to_outbox_object_id', sa.Integer(), nullable=True), + sa.Column('undone_by_inbox_object_id', sa.Integer(), nullable=True), + sa.Column('liked_via_outbox_object_ap_id', sa.String(), nullable=True), + sa.Column('announced_via_outbox_object_ap_id', sa.String(), nullable=True), + sa.Column('voted_for_answers', sa.JSON(), nullable=True), + sa.Column('is_bookmarked', sa.Boolean(), nullable=False), + sa.Column('is_deleted', sa.Boolean(), nullable=False), + sa.Column('is_transient', sa.Boolean(), server_default='0', nullable=False), + sa.Column('replies_count', sa.Integer(), nullable=False), + sa.Column('og_meta', sa.JSON(), nullable=True), + sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ), + sa.ForeignKeyConstraint(['relates_to_inbox_object_id'], ['inbox.id'], ), + sa.ForeignKeyConstraint(['relates_to_outbox_object_id'], ['outbox.id'], ), + sa.ForeignKeyConstraint(['undone_by_inbox_object_id'], ['inbox.id'], ), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('inbox', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_inbox_activity_object_ap_id'), ['activity_object_ap_id'], unique=False) + batch_op.create_index(batch_op.f('ix_inbox_ap_id'), ['ap_id'], unique=True) + batch_op.create_index(batch_op.f('ix_inbox_ap_type'), ['ap_type'], unique=False) + batch_op.create_index(batch_op.f('ix_inbox_id'), ['id'], unique=False) + + op.create_table('incoming_activity', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('webmention_source', sa.String(), nullable=True), + sa.Column('sent_by_ap_actor_id', sa.String(), nullable=True), + sa.Column('ap_id', sa.String(), nullable=True), + sa.Column('ap_object', sa.JSON(), nullable=True), + sa.Column('tries', sa.Integer(), nullable=False), + sa.Column('next_try', sa.DateTime(timezone=True), nullable=True), + sa.Column('last_try', sa.DateTime(timezone=True), nullable=True), + sa.Column('is_processed', sa.Boolean(), nullable=False), + sa.Column('is_errored', sa.Boolean(), nullable=False), + sa.Column('error', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('incoming_activity', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_incoming_activity_ap_id'), ['ap_id'], unique=False) + batch_op.create_index(batch_op.f('ix_incoming_activity_id'), ['id'], unique=False) + + op.create_table('indieauth_authorization_request', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('code', sa.String(), nullable=False), + sa.Column('scope', sa.String(), nullable=False), + sa.Column('redirect_uri', sa.String(), nullable=False), + sa.Column('client_id', sa.String(), nullable=False), + sa.Column('code_challenge', sa.String(), nullable=True), + sa.Column('code_challenge_method', sa.String(), nullable=True), + sa.Column('is_used', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('indieauth_authorization_request', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_indieauth_authorization_request_code'), ['code'], unique=True) + batch_op.create_index(batch_op.f('ix_indieauth_authorization_request_id'), ['id'], unique=False) + + op.create_table('outbox', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('is_hidden_from_homepage', sa.Boolean(), nullable=False), + sa.Column('public_id', sa.String(), nullable=False), + sa.Column('ap_type', sa.String(), nullable=False), + sa.Column('ap_id', sa.String(), nullable=False), + sa.Column('ap_context', sa.String(), nullable=True), + sa.Column('ap_object', sa.JSON(), nullable=False), + sa.Column('activity_object_ap_id', sa.String(), nullable=True), + sa.Column('source', sa.String(), nullable=True), + sa.Column('revisions', sa.JSON(), nullable=True), + sa.Column('ap_published_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('visibility', sa.Enum('PUBLIC', 'UNLISTED', 'FOLLOWERS_ONLY', 'DIRECT', name='visibilityenum'), nullable=False), + sa.Column('likes_count', sa.Integer(), nullable=False), + sa.Column('announces_count', sa.Integer(), nullable=False), + sa.Column('replies_count', sa.Integer(), nullable=False), + sa.Column('webmentions_count', sa.Integer(), server_default='0', nullable=False), + sa.Column('og_meta', sa.JSON(), nullable=True), + sa.Column('is_pinned', sa.Boolean(), nullable=False), + sa.Column('is_transient', sa.Boolean(), server_default='0', nullable=False), + sa.Column('is_deleted', sa.Boolean(), nullable=False), + sa.Column('relates_to_inbox_object_id', sa.Integer(), nullable=True), + sa.Column('relates_to_outbox_object_id', sa.Integer(), nullable=True), + sa.Column('relates_to_actor_id', sa.Integer(), nullable=True), + sa.Column('undone_by_outbox_object_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['relates_to_actor_id'], ['actor.id'], ), + sa.ForeignKeyConstraint(['relates_to_inbox_object_id'], ['inbox.id'], ), + sa.ForeignKeyConstraint(['relates_to_outbox_object_id'], ['outbox.id'], ), + sa.ForeignKeyConstraint(['undone_by_outbox_object_id'], ['outbox.id'], ), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('outbox', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_outbox_activity_object_ap_id'), ['activity_object_ap_id'], unique=False) + batch_op.create_index(batch_op.f('ix_outbox_ap_id'), ['ap_id'], unique=True) + batch_op.create_index(batch_op.f('ix_outbox_ap_type'), ['ap_type'], unique=False) + batch_op.create_index(batch_op.f('ix_outbox_id'), ['id'], unique=False) + batch_op.create_index(batch_op.f('ix_outbox_public_id'), ['public_id'], unique=False) + + op.create_table('upload', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('content_type', sa.String(), nullable=False), + sa.Column('content_hash', sa.String(), nullable=False), + sa.Column('has_thumbnail', sa.Boolean(), nullable=False), + sa.Column('blurhash', sa.String(), nullable=True), + sa.Column('width', sa.Integer(), nullable=True), + sa.Column('height', sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('content_hash') + ) + with op.batch_alter_table('upload', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_upload_id'), ['id'], unique=False) + + op.create_table('follower', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('actor_id', sa.Integer(), nullable=False), + sa.Column('inbox_object_id', sa.Integer(), nullable=False), + sa.Column('ap_actor_id', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ), + sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('actor_id'), + sa.UniqueConstraint('ap_actor_id') + ) + with op.batch_alter_table('follower', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_follower_id'), ['id'], unique=False) + + op.create_table('following', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('actor_id', sa.Integer(), nullable=False), + sa.Column('outbox_object_id', sa.Integer(), nullable=False), + sa.Column('ap_actor_id', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ), + sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('actor_id'), + sa.UniqueConstraint('ap_actor_id') + ) + with op.batch_alter_table('following', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_following_id'), ['id'], unique=False) + + op.create_table('indieauth_access_token', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('indieauth_authorization_request_id', sa.Integer(), nullable=True), + sa.Column('access_token', sa.String(), nullable=False), + sa.Column('expires_in', sa.Integer(), nullable=False), + sa.Column('scope', sa.String(), nullable=False), + sa.Column('is_revoked', sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint(['indieauth_authorization_request_id'], ['indieauth_authorization_request.id'], ), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_indieauth_access_token_access_token'), ['access_token'], unique=True) + batch_op.create_index(batch_op.f('ix_indieauth_access_token_id'), ['id'], unique=False) + + op.create_table('outbox_object_attachment', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('filename', sa.String(), nullable=False), + sa.Column('alt', sa.String(), nullable=True), + sa.Column('outbox_object_id', sa.Integer(), nullable=False), + sa.Column('upload_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), + sa.ForeignKeyConstraint(['upload_id'], ['upload.id'], ), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('outbox_object_attachment', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_outbox_object_attachment_id'), ['id'], unique=False) + + op.create_table('outgoing_activity', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('recipient', sa.String(), nullable=False), + sa.Column('outbox_object_id', sa.Integer(), nullable=True), + sa.Column('inbox_object_id', sa.Integer(), nullable=True), + sa.Column('webmention_target', sa.String(), nullable=True), + sa.Column('tries', sa.Integer(), nullable=False), + sa.Column('next_try', sa.DateTime(timezone=True), nullable=True), + sa.Column('last_try', sa.DateTime(timezone=True), nullable=True), + sa.Column('last_status_code', sa.Integer(), nullable=True), + sa.Column('last_response', sa.String(), nullable=True), + sa.Column('is_sent', sa.Boolean(), nullable=False), + sa.Column('is_errored', sa.Boolean(), nullable=False), + sa.Column('error', sa.String(), nullable=True), + sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ), + sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('outgoing_activity', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_outgoing_activity_id'), ['id'], unique=False) + + op.create_table('poll_answer', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('outbox_object_id', sa.Integer(), nullable=False), + sa.Column('poll_type', sa.String(), nullable=False), + sa.Column('inbox_object_id', sa.Integer(), nullable=False), + sa.Column('actor_id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ), + sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ), + sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('outbox_object_id', 'name', 'actor_id', name='uix_outbox_object_id_name_actor_id') + ) + with op.batch_alter_table('poll_answer', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_poll_answer_id'), ['id'], unique=False) + batch_op.create_index('uix_one_of_outbox_object_id_actor_id', ['outbox_object_id', 'actor_id'], unique=True, sqlite_where=sa.text('poll_type = "oneOf"')) + + op.create_table('tagged_outbox_object', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('outbox_object_id', sa.Integer(), nullable=False), + sa.Column('tag', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('outbox_object_id', 'tag', name='uix_tagged_object') + ) + with op.batch_alter_table('tagged_outbox_object', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_tagged_outbox_object_id'), ['id'], unique=False) + batch_op.create_index(batch_op.f('ix_tagged_outbox_object_tag'), ['tag'], unique=False) + + op.create_table('webmention', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('is_deleted', sa.Boolean(), nullable=False), + sa.Column('source', sa.String(), nullable=False), + sa.Column('source_microformats', sa.JSON(), nullable=True), + sa.Column('target', sa.String(), nullable=False), + sa.Column('outbox_object_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('source', 'target', name='uix_source_target') + ) + with op.batch_alter_table('webmention', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_webmention_id'), ['id'], unique=False) + batch_op.create_index(batch_op.f('ix_webmention_source'), ['source'], unique=True) + batch_op.create_index(batch_op.f('ix_webmention_target'), ['target'], unique=False) + + op.create_table('notifications', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('notification_type', sa.Enum('NEW_FOLLOWER', 'UNFOLLOW', 'FOLLOW_REQUEST_ACCEPTED', 'FOLLOW_REQUEST_REJECTED', 'LIKE', 'UNDO_LIKE', 'ANNOUNCE', 'UNDO_ANNOUNCE', 'MENTION', 'NEW_WEBMENTION', 'UPDATED_WEBMENTION', 'DELETED_WEBMENTION', name='notificationtype'), nullable=True), + sa.Column('is_new', sa.Boolean(), nullable=False), + sa.Column('actor_id', sa.Integer(), nullable=True), + sa.Column('outbox_object_id', sa.Integer(), nullable=True), + sa.Column('inbox_object_id', sa.Integer(), nullable=True), + sa.Column('webmention_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ), + sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ), + sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), + sa.ForeignKeyConstraint(['webmention_id'], ['webmention.id'], name='fk_webmention_id'), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('notifications', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_notifications_id'), ['id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('notifications', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_notifications_id')) + + op.drop_table('notifications') + with op.batch_alter_table('webmention', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_webmention_target')) + batch_op.drop_index(batch_op.f('ix_webmention_source')) + batch_op.drop_index(batch_op.f('ix_webmention_id')) + + op.drop_table('webmention') + with op.batch_alter_table('tagged_outbox_object', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_tagged_outbox_object_tag')) + batch_op.drop_index(batch_op.f('ix_tagged_outbox_object_id')) + + op.drop_table('tagged_outbox_object') + with op.batch_alter_table('poll_answer', schema=None) as batch_op: + batch_op.drop_index('uix_one_of_outbox_object_id_actor_id', sqlite_where=sa.text('poll_type = "oneOf"')) + batch_op.drop_index(batch_op.f('ix_poll_answer_id')) + + op.drop_table('poll_answer') + with op.batch_alter_table('outgoing_activity', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_outgoing_activity_id')) + + op.drop_table('outgoing_activity') + with op.batch_alter_table('outbox_object_attachment', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_outbox_object_attachment_id')) + + op.drop_table('outbox_object_attachment') + with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_indieauth_access_token_id')) + batch_op.drop_index(batch_op.f('ix_indieauth_access_token_access_token')) + + op.drop_table('indieauth_access_token') + with op.batch_alter_table('following', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_following_id')) + + op.drop_table('following') + with op.batch_alter_table('follower', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_follower_id')) + + op.drop_table('follower') + with op.batch_alter_table('upload', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_upload_id')) + + op.drop_table('upload') + with op.batch_alter_table('outbox', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_outbox_public_id')) + batch_op.drop_index(batch_op.f('ix_outbox_id')) + batch_op.drop_index(batch_op.f('ix_outbox_ap_type')) + batch_op.drop_index(batch_op.f('ix_outbox_ap_id')) + batch_op.drop_index(batch_op.f('ix_outbox_activity_object_ap_id')) + + op.drop_table('outbox') + with op.batch_alter_table('indieauth_authorization_request', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_indieauth_authorization_request_id')) + batch_op.drop_index(batch_op.f('ix_indieauth_authorization_request_code')) + + op.drop_table('indieauth_authorization_request') + with op.batch_alter_table('incoming_activity', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_incoming_activity_id')) + batch_op.drop_index(batch_op.f('ix_incoming_activity_ap_id')) + + op.drop_table('incoming_activity') + with op.batch_alter_table('inbox', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_inbox_id')) + batch_op.drop_index(batch_op.f('ix_inbox_ap_type')) + batch_op.drop_index(batch_op.f('ix_inbox_ap_id')) + batch_op.drop_index(batch_op.f('ix_inbox_activity_object_ap_id')) + + op.drop_table('inbox') + with op.batch_alter_table('actor', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_actor_id')) + batch_op.drop_index(batch_op.f('ix_actor_handle')) + batch_op.drop_index(batch_op.f('ix_actor_ap_id')) + + op.drop_table('actor') + # ### end Alembic commands ### diff --git a/alembic/versions/2022_07_31_0815-50d26a370a65_add_is_blocked_attribute_on_actors.py b/alembic/versions/2022_07_31_0815-50d26a370a65_add_is_blocked_attribute_on_actors.py new file mode 100644 index 0000000..81ce1f6 --- /dev/null +++ b/alembic/versions/2022_07_31_0815-50d26a370a65_add_is_blocked_attribute_on_actors.py @@ -0,0 +1,32 @@ +"""Add is_blocked attribute on actors + +Revision ID: 50d26a370a65 +Revises: f5717d82b3ff +Create Date: 2022-07-31 08:15:27.226340+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = '50d26a370a65' +down_revision = 'f5717d82b3ff' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('actor', schema=None) as batch_op: + batch_op.add_column(sa.Column('is_blocked', sa.Boolean(), server_default='0', nullable=False)) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('actor', schema=None) as batch_op: + batch_op.drop_column('is_blocked') + + # ### end Alembic commands ### diff --git a/alembic/versions/2022_08_02_1519-1702e88016db_tweak_notification_model.py b/alembic/versions/2022_08_02_1519-1702e88016db_tweak_notification_model.py new file mode 100644 index 0000000..f4d6f48 --- /dev/null +++ b/alembic/versions/2022_08_02_1519-1702e88016db_tweak_notification_model.py @@ -0,0 +1,34 @@ +"""Tweak notification model + +Revision ID: 1702e88016db +Revises: 50d26a370a65 +Create Date: 2022-08-02 15:19:57.221421+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = '1702e88016db' +down_revision = '50d26a370a65' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('notifications', schema=None) as batch_op: + batch_op.add_column(sa.Column('is_accepted', sa.Boolean(), nullable=True)) + batch_op.add_column(sa.Column('is_rejected', sa.Boolean(), nullable=True)) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('notifications', schema=None) as batch_op: + batch_op.drop_column('is_rejected') + batch_op.drop_column('is_accepted') + + # ### end Alembic commands ### diff --git a/alembic/versions/2022_08_14_1638-9bc69ed947e2_new_conversation_field.py b/alembic/versions/2022_08_14_1638-9bc69ed947e2_new_conversation_field.py new file mode 100644 index 0000000..41d70de --- /dev/null +++ b/alembic/versions/2022_08_14_1638-9bc69ed947e2_new_conversation_field.py @@ -0,0 +1,40 @@ +"""New conversation field + +Revision ID: 9bc69ed947e2 +Revises: 1702e88016db +Create Date: 2022-08-14 16:38:37.688377+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = '9bc69ed947e2' +down_revision = '1702e88016db' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('inbox', schema=None) as batch_op: + batch_op.add_column(sa.Column('conversation', sa.String(), nullable=True)) + + with op.batch_alter_table('outbox', schema=None) as batch_op: + batch_op.add_column(sa.Column('conversation', sa.String(), nullable=True)) + + op.execute("UPDATE inbox SET conversation = ap_context") + op.execute("UPDATE outbox SET conversation = ap_context") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('outbox', schema=None) as batch_op: + batch_op.drop_column('conversation') + + with op.batch_alter_table('inbox', schema=None) as batch_op: + batch_op.drop_column('conversation') + + # ### end Alembic commands ### diff --git a/alembic/versions/2022_08_14_2059-6286262bb466_enable_wal_mode.py b/alembic/versions/2022_08_14_2059-6286262bb466_enable_wal_mode.py new file mode 100644 index 0000000..970b11d --- /dev/null +++ b/alembic/versions/2022_08_14_2059-6286262bb466_enable_wal_mode.py @@ -0,0 +1,22 @@ +"""Enable WAL mode + +Revision ID: 6286262bb466 +Revises: 9bc69ed947e2 +Create Date: 2022-08-14 20:59:26.427796+00:00 + +""" +from alembic import op + +# revision identifiers, used by Alembic. +revision = '6286262bb466' +down_revision = '9bc69ed947e2' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.execute("PRAGMA journal_mode=WAL") + + +def downgrade() -> None: + op.execute("PRAGMA journal_mode=DELETE") diff --git a/alembic/versions/2022_08_17_1758-5d3e3f2b9b4e_add_actor_is_deleted.py b/alembic/versions/2022_08_17_1758-5d3e3f2b9b4e_add_actor_is_deleted.py new file mode 100644 index 0000000..e8e9e29 --- /dev/null +++ b/alembic/versions/2022_08_17_1758-5d3e3f2b9b4e_add_actor_is_deleted.py @@ -0,0 +1,32 @@ +"""Add Actor.is_deleted + +Revision ID: 5d3e3f2b9b4e +Revises: 6286262bb466 +Create Date: 2022-08-17 17:58:24.813194+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = '5d3e3f2b9b4e' +down_revision = '6286262bb466' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('actor', schema=None) as batch_op: + batch_op.add_column(sa.Column('is_deleted', sa.Boolean(), server_default='0', nullable=False)) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('actor', schema=None) as batch_op: + batch_op.drop_column('is_deleted') + + # ### end Alembic commands ### diff --git a/alembic/versions/2022_08_19_1246-604d125ea2fb_add_inboxobject_has_local_mention.py b/alembic/versions/2022_08_19_1246-604d125ea2fb_add_inboxobject_has_local_mention.py new file mode 100644 index 0000000..8cc9fd2 --- /dev/null +++ b/alembic/versions/2022_08_19_1246-604d125ea2fb_add_inboxobject_has_local_mention.py @@ -0,0 +1,33 @@ +"""Add InboxObject.has_local_mention + +Revision ID: 604d125ea2fb +Revises: 5d3e3f2b9b4e +Create Date: 2022-08-19 12:46:22.239989+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = '604d125ea2fb' +down_revision = '5d3e3f2b9b4e' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('inbox', schema=None) as batch_op: + batch_op.add_column(sa.Column('has_local_mention', sa.Boolean(), server_default='0', nullable=False)) + + # ### end Alembic commands ### + op.execute("UPDATE inbox SET has_local_mention = 1 WHERE id IN (select inbox_object_id from notifications where notification_type = 'MENTION')") + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('inbox', schema=None) as batch_op: + batch_op.drop_column('has_local_mention') + + # ### end Alembic commands ### diff --git a/alembic/versions/2022_10_30_1409-b28c0551c236_add_a_slug_field_for_outbox_objects.py b/alembic/versions/2022_10_30_1409-b28c0551c236_add_a_slug_field_for_outbox_objects.py new file mode 100644 index 0000000..d48f18c --- /dev/null +++ b/alembic/versions/2022_10_30_1409-b28c0551c236_add_a_slug_field_for_outbox_objects.py @@ -0,0 +1,48 @@ +"""Add a slug field for outbox objects + +Revision ID: b28c0551c236 +Revises: 604d125ea2fb +Create Date: 2022-10-30 14:09:14.540461+00:00 + +""" +import sqlalchemy as sa +from sqlalchemy import select +from sqlalchemy.orm.session import Session + +from alembic import op + +# revision identifiers, used by Alembic. +revision = 'b28c0551c236' +down_revision = '604d125ea2fb' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('outbox', schema=None) as batch_op: + batch_op.add_column(sa.Column('slug', sa.String(), nullable=True)) + batch_op.create_index(batch_op.f('ix_outbox_slug'), ['slug'], unique=False) + + # ### end Alembic commands ### + + # Backfill the slug for existing articles + from app.models import OutboxObject + from app.utils.text import slugify + sess = Session(op.get_bind()) + articles = sess.execute(select(OutboxObject).where( + OutboxObject.ap_type == "Article") + ).scalars() + for article in articles: + title = article.ap_object["name"] + article.slug = slugify(title) + sess.commit() + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('outbox', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_outbox_slug')) + batch_op.drop_column('slug') + + # ### end Alembic commands ### diff --git a/alembic/versions/2022_11_16_1942-fadfd359ce78_add_webmention_webmention_type.py b/alembic/versions/2022_11_16_1942-fadfd359ce78_add_webmention_webmention_type.py new file mode 100644 index 0000000..a7753be --- /dev/null +++ b/alembic/versions/2022_11_16_1942-fadfd359ce78_add_webmention_webmention_type.py @@ -0,0 +1,32 @@ +"""Add Webmention.webmention_type + +Revision ID: fadfd359ce78 +Revises: b28c0551c236 +Create Date: 2022-11-16 19:42:56.925512+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = 'fadfd359ce78' +down_revision = 'b28c0551c236' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('webmention', schema=None) as batch_op: + batch_op.add_column(sa.Column('webmention_type', sa.Enum('UNKNOWN', 'LIKE', 'REPLY', 'REPOST', name='webmentiontype'), nullable=True)) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('webmention', schema=None) as batch_op: + batch_op.drop_column('webmention_type') + + # ### end Alembic commands ### diff --git a/alembic/versions/2022_12_12_1926-9b404c47970a_add_option_to_hide_announces_from_actor.py b/alembic/versions/2022_12_12_1926-9b404c47970a_add_option_to_hide_announces_from_actor.py new file mode 100644 index 0000000..059129c --- /dev/null +++ b/alembic/versions/2022_12_12_1926-9b404c47970a_add_option_to_hide_announces_from_actor.py @@ -0,0 +1,32 @@ +"""Add option to hide announces from actor + +Revision ID: 9b404c47970a +Revises: fadfd359ce78 +Create Date: 2022-12-12 19:26:36.912763+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = '9b404c47970a' +down_revision = 'fadfd359ce78' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('actor', schema=None) as batch_op: + batch_op.add_column(sa.Column('are_announces_hidden_from_stream', sa.Boolean(), server_default='0', nullable=False)) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('actor', schema=None) as batch_op: + batch_op.drop_column('are_announces_hidden_from_stream') + + # ### end Alembic commands ### diff --git a/alembic/versions/2022_12_16_1730-4ab54becec04_add_oauth_client.py b/alembic/versions/2022_12_16_1730-4ab54becec04_add_oauth_client.py new file mode 100644 index 0000000..3858bce --- /dev/null +++ b/alembic/versions/2022_12_16_1730-4ab54becec04_add_oauth_client.py @@ -0,0 +1,48 @@ +"""Add OAuth client + +Revision ID: 4ab54becec04 +Revises: 9b404c47970a +Create Date: 2022-12-16 17:30:54.520477+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = '4ab54becec04' +down_revision = '9b404c47970a' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('oauth_client', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('client_name', sa.String(), nullable=False), + sa.Column('redirect_uris', sa.JSON(), nullable=True), + sa.Column('client_uri', sa.String(), nullable=True), + sa.Column('logo_uri', sa.String(), nullable=True), + sa.Column('scope', sa.String(), nullable=True), + sa.Column('client_id', sa.String(), nullable=False), + sa.Column('client_secret', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('client_secret') + ) + with op.batch_alter_table('oauth_client', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_oauth_client_client_id'), ['client_id'], unique=True) + batch_op.create_index(batch_op.f('ix_oauth_client_id'), ['id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('oauth_client', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_oauth_client_id')) + batch_op.drop_index(batch_op.f('ix_oauth_client_client_id')) + + op.drop_table('oauth_client') + # ### end Alembic commands ### diff --git a/alembic/versions/2022_12_18_1126-a209f0333f5a_add_oauth_refresh_token_support.py b/alembic/versions/2022_12_18_1126-a209f0333f5a_add_oauth_refresh_token_support.py new file mode 100644 index 0000000..8e486b9 --- /dev/null +++ b/alembic/versions/2022_12_18_1126-a209f0333f5a_add_oauth_refresh_token_support.py @@ -0,0 +1,36 @@ +"""Add OAuth refresh token support + +Revision ID: a209f0333f5a +Revises: 4ab54becec04 +Create Date: 2022-12-18 11:26:31.976348+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = 'a209f0333f5a' +down_revision = '4ab54becec04' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op: + batch_op.add_column(sa.Column('refresh_token', sa.String(), nullable=True)) + batch_op.add_column(sa.Column('was_refreshed', sa.Boolean(), server_default='0', nullable=False)) + batch_op.create_index(batch_op.f('ix_indieauth_access_token_refresh_token'), ['refresh_token'], unique=True) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_indieauth_access_token_refresh_token')) + batch_op.drop_column('was_refreshed') + batch_op.drop_column('refresh_token') + + # ### end Alembic commands ### diff --git a/app.py b/app.py deleted file mode 100644 index 4debba9..0000000 --- a/app.py +++ /dev/null @@ -1,1065 +0,0 @@ -import json -import logging -import os -import traceback -from datetime import datetime -from typing import Any -from uuid import uuid4 - -import requests -from bson.errors import InvalidId -from bson.objectid import ObjectId -from flask import Flask -from flask import Response -from flask import abort -from flask import g -from flask import redirect -from flask import render_template -from flask import request -from flask import session -from flask import url_for -from gridfs.errors import NoFile -from itsdangerous import BadSignature -from little_boxes import activitypub as ap -from little_boxes.activitypub import ActivityType -from little_boxes.activitypub import clean_activity -from little_boxes.activitypub import get_backend -from little_boxes.errors import ActivityGoneError -from little_boxes.errors import Error -from little_boxes.httpsig import verify_request -from little_boxes.webfinger import get_remote_follow_template -from werkzeug.exceptions import InternalServerError - -import blueprints.admin -import blueprints.indieauth -import blueprints.tasks -import blueprints.well_known -import config -from blueprints.api import _api_required -from blueprints.api import api_required -from blueprints.tasks import TaskError -from config import DB -from config import ID -from config import ME -from config import MEDIA_CACHE -from config import VERSION -from core import activitypub -from core import feed -from core import jsonld -from core.activitypub import activity_from_doc -from core.activitypub import activity_url -from core.activitypub import post_to_inbox -from core.activitypub import post_to_outbox -from core.activitypub import remove_context -from core.db import find_one_activity -from core.meta import Box -from core.meta import MetaKey -from core.meta import _meta -from core.meta import by_hashtag -from core.meta import by_object_id -from core.meta import by_remote_id -from core.meta import by_type -from core.meta import by_visibility -from core.meta import follow_request_accepted -from core.meta import in_inbox -from core.meta import in_outbox -from core.meta import is_public -from core.meta import not_deleted -from core.meta import not_poll_answer -from core.meta import not_undo -from core.meta import pinned -from core.shared import _build_thread -from core.shared import _get_ip -from core.shared import activitypubify -from core.shared import csrf -from core.shared import htmlify -from core.shared import is_api_request -from core.shared import jsonify -from core.shared import login_required -from core.shared import noindex -from core.shared import paginated_query -from utils.blacklist import is_blacklisted -from utils.emojis import EMOJIS -from utils.highlight import HIGHLIGHT_CSS -from utils.key import get_secret_key -from utils.template_filters import filters - -app = Flask(__name__) -app.secret_key = get_secret_key("flask") -app.register_blueprint(filters) -app.register_blueprint(blueprints.admin.blueprint) -app.register_blueprint(blueprints.api.blueprint, url_prefix="/api") -app.register_blueprint(blueprints.indieauth.blueprint) -app.register_blueprint(blueprints.tasks.blueprint) -app.register_blueprint(blueprints.well_known.blueprint) -app.config.update(WTF_CSRF_CHECK_DEFAULT=False) - -app.config.update(SESSION_COOKIE_SECURE=True if config.SCHEME == "https" else False) - -csrf.init_app(app) - -logger = logging.getLogger(__name__) - -# Hook up Flask logging with gunicorn -root_logger = logging.getLogger() -if os.getenv("FLASK_DEBUG"): - logger.setLevel(logging.DEBUG) - root_logger.setLevel(logging.DEBUG) - root_logger.handlers = app.logger.handlers -else: - gunicorn_logger = logging.getLogger("gunicorn.error") - root_logger.handlers = gunicorn_logger.handlers - root_logger.setLevel(gunicorn_logger.level) - - -@app.context_processor -def inject_config(): - q = { - **in_outbox(), - "$or": [ - { - **by_type(ActivityType.CREATE), - **not_deleted(), - **by_visibility(ap.Visibility.PUBLIC), - }, - {**by_type(ActivityType.ANNOUNCE), **not_undo()}, - ], - } - notes_count = DB.activities.count(q) - # FIXME(tsileo): rename to all_count, and remove poll answers from it - all_q = { - **in_outbox(), - **by_type([ActivityType.CREATE, ActivityType.ANNOUNCE]), - **not_deleted(), - **not_undo(), - **not_poll_answer(), - } - liked_q = { - **in_outbox(), - **by_type(ActivityType.LIKE), - **not_undo(), - **not_deleted(), - } - followers_q = { - **in_inbox(), - **by_type(ActivityType.FOLLOW), - **not_undo(), - **not_deleted(), - } - following_q = { - **in_outbox(), - **by_type(ActivityType.FOLLOW), - **follow_request_accepted(), - **not_undo(), - **not_deleted(), - } - unread_notifications_q = {_meta(MetaKey.NOTIFICATION_UNREAD): True} - - logged_in = session.get("logged_in", False) - - return dict( - microblogpub_version=VERSION, - config=config, - logged_in=logged_in, - followers_count=DB.activities.count(followers_q), - following_count=DB.activities.count(following_q) - if logged_in or not config.HIDE_FOLLOWING - else 0, - notes_count=notes_count, - liked_count=DB.activities.count(liked_q) if logged_in else 0, - with_replies_count=DB.activities.count(all_q) if logged_in else 0, - unread_notifications_count=DB.activities.count(unread_notifications_q) - if logged_in - else 0, - me=ME, - base_url=config.BASE_URL, - highlight_css=HIGHLIGHT_CSS, - ) - - -@app.before_request -def generate_request_id(): - g.request_id = uuid4().hex - - -@app.after_request -def set_x_powered_by(response): - response.headers["X-Powered-By"] = "microblog.pub" - response.headers["X-Request-ID"] = g.request_id - return response - - -@app.errorhandler(ValueError) -def handle_value_error(error): - logger.error( - f"caught value error for {g.request_id}: {error!r}, {traceback.format_tb(error.__traceback__)}" - ) - response = jsonify({"message": error.args[0], "request_id": g.request_id}) - response.status_code = 400 - return response - - -@app.errorhandler(Error) -def handle_activitypub_error(error): - logger.error( - f"caught activitypub error for {g.request_id}: {error!r}, {traceback.format_tb(error.__traceback__)}" - ) - response = jsonify({**error.to_dict(), "request_id": g.request_id}) - response.status_code = error.status_code - return response - - -@app.errorhandler(TaskError) -def handle_task_error(error): - logger.error( - f"caught activitypub error for {g.request_id}: {error!r}, {traceback.format_tb(error.__traceback__)}" - ) - response = jsonify({"traceback": error.message, "request_id": g.request_id}) - response.status_code = 500 - return response - - -@app.errorhandler(InternalServerError) -def handle_500(e): - tb = "".join(traceback.format_tb(e.__traceback__)) - logger.error(f"caught error {e!r}, {tb}") - if not session.get("logged_in", False): - tb = None - - return render_template( - "error.html", code=500, status_text="Internal Server Error", tb=tb - ) - - -# @app.errorhandler(Exception) -# def handle_other_error(error): -# logger.error( -# f"caught error {error!r}, {traceback.format_tb(error.__traceback__)}" -# ) -# response = flask_jsonify({}) -# response.status_code = 500 -# return response - - -def _log_sig(): - sig = request.headers.get("Signature") - if sig: - app.logger.info(f"received an authenticated fetch: {sig}") - try: - req_verified, actor_id = verify_request( - request.method, request.path, request.headers, None - ) - app.logger.info( - f"authenticated fetch: {req_verified}: {actor_id} {request.headers}" - ) - except Exception: - app.logger.exception("failed to verify authenticated fetch") - - -# App routes - -ROBOTS_TXT = """User-agent: * -Disallow: /login -Disallow: /admin/ -Disallow: /static/ -Disallow: /media/ -Disallow: /p/ -Disallow: /uploads/""" - - -@app.route("/robots.txt") -def robots_txt(): - return Response(response=ROBOTS_TXT, headers={"Content-Type": "text/plain"}) - - -@app.route("/microblogpub-0.1.jsonld") -def microblogpub_jsonld(): - """Returns our AP context (embedded in activities @context).""" - return Response( - response=json.dumps(jsonld.MICROBLOGPUB), - headers={"Content-Type": "application/ld+json"}, - ) - - -@app.route("/p//") -@noindex -def proxy(scheme: str, url: str) -> Any: - url = f"{scheme}://{url}" - req_headers = { - k: v - for k, v in dict(request.headers).items() - if k.lower() not in ["host", "cookie", "", "x-forwarded-for", "x-real-ip"] - and not k.lower().startswith("broxy-") - } - # req_headers["Host"] = urlparse(url).netloc - resp = requests.get(url, stream=True, headers=req_headers) - app.logger.info(f"proxied req {url} {req_headers}: {resp!r}") - - def data(): - for chunk in resp.raw.stream(decode_content=False): - yield chunk - - resp_headers = { - k: v - for k, v in dict(resp.raw.headers).items() - if k.lower() - in [ - "content-length", - "content-type", - "etag", - "cache-control", - "expires", - "date", - "last-modified", - ] - } - return Response(data(), headers=resp_headers, status=resp.status_code) - - -@app.route("/media/") -@noindex -def serve_media(media_id): - try: - f = MEDIA_CACHE.fs.get(ObjectId(media_id)) - except (InvalidId, NoFile): - abort(404) - - resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type) - resp.headers.set("Content-Length", f.length) - resp.headers.set("ETag", f.md5) - resp.headers.set( - "Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT") - ) - resp.headers.set("Cache-Control", "public,max-age=31536000,immutable") - resp.headers.set("Content-Encoding", "gzip") - return resp - - -@app.route("/uploads//") -def serve_uploads(oid, fname): - try: - f = MEDIA_CACHE.fs.get(ObjectId(oid)) - except (InvalidId, NoFile): - abort(404) - - resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type) - resp.headers.set("Content-Length", f.length) - resp.headers.set("ETag", f.md5) - resp.headers.set( - "Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT") - ) - resp.headers.set("Cache-Control", "public,max-age=31536000,immutable") - resp.headers.set("Content-Encoding", "gzip") - return resp - - -@app.route("/remote_follow", methods=["GET", "POST"]) -def remote_follow(): - """Form to allow visitor to perform the remote follow dance.""" - if request.method == "GET": - return htmlify(render_template("remote_follow.html")) - - csrf.protect() - profile = request.form.get("profile") - if not profile.startswith("@"): - profile = f"@{profile}" - return redirect(get_remote_follow_template(profile).format(uri=ID)) - - -####### -# Activity pub routes - - -@app.route("/") -def index(): - if is_api_request(): - _log_sig() - return activitypubify(**ME) - - q = { - **in_outbox(), - "$or": [ - { - **by_type(ActivityType.CREATE), - **not_deleted(), - **by_visibility(ap.Visibility.PUBLIC), - "$or": [{"meta.pinned": False}, {"meta.pinned": {"$exists": False}}], - }, - {**by_type(ActivityType.ANNOUNCE), **not_undo()}, - ], - } - - apinned = [] - # Only fetch the pinned notes if we're on the first page - if not request.args.get("older_than") and not request.args.get("newer_than"): - q_pinned = { - **in_outbox(), - **by_type(ActivityType.CREATE), - **not_deleted(), - **pinned(), - **by_visibility(ap.Visibility.PUBLIC), - } - apinned = list(DB.activities.find(q_pinned)) - - outbox_data, older_than, newer_than = paginated_query( - DB.activities, q, limit=25 - len(apinned) - ) - - return htmlify( - render_template( - "index.html", - outbox_data=outbox_data, - older_than=older_than, - newer_than=newer_than, - pinned=apinned, - ) - ) - - -@app.route("/all") -@login_required -def all(): - q = { - **in_outbox(), - **by_type([ActivityType.CREATE, ActivityType.ANNOUNCE]), - **not_deleted(), - **not_undo(), - **not_poll_answer(), - } - outbox_data, older_than, newer_than = paginated_query(DB.activities, q) - - return htmlify( - render_template( - "index.html", - outbox_data=outbox_data, - older_than=older_than, - newer_than=newer_than, - ) - ) - - -@app.route("/note/") -def note_by_id(note_id): - if is_api_request(): - return redirect(url_for("outbox_activity", item_id=note_id)) - - query = {} - # Prevent displaying direct messages on the public frontend - if not session.get("logged_in", False): - query = is_public() - - data = DB.activities.find_one( - {**in_outbox(), **by_remote_id(activity_url(note_id)), **query} - ) - if not data: - abort(404) - if data["meta"].get("deleted", False): - abort(410) - - thread = _build_thread(data, query=query) - app.logger.info(f"thread={thread!r}") - - raw_likes = list( - DB.activities.find( - { - **not_undo(), - **not_deleted(), - **by_type(ActivityType.LIKE), - **by_object_id(data["activity"]["object"]["id"]), - } - ) - ) - likes = [] - for doc in raw_likes: - try: - likes.append(doc["meta"]["actor"]) - except Exception: - app.logger.exception(f"invalid doc: {doc!r}") - app.logger.info(f"likes={likes!r}") - - raw_shares = list( - DB.activities.find( - { - **not_undo(), - **not_deleted(), - **by_type(ActivityType.ANNOUNCE), - **by_object_id(data["activity"]["object"]["id"]), - } - ) - ) - shares = [] - for doc in raw_shares: - try: - shares.append(doc["meta"]["actor"]) - except Exception: - app.logger.exception(f"invalid doc: {doc!r}") - app.logger.info(f"shares={shares!r}") - - return htmlify( - render_template( - "note.html", likes=likes, shares=shares, thread=thread, note=data - ) - ) - - -@app.route("/outbox", methods=["GET", "POST"]) -def outbox(): - if request.method == "GET": - if not is_api_request(): - abort(404) - _log_sig() - # TODO(tsileo): returns the whole outbox if authenticated and look at OCAP support - q = { - **in_outbox(), - "$or": [ - { - **by_type(ActivityType.CREATE), - **not_deleted(), - **by_visibility(ap.Visibility.PUBLIC), - }, - {**by_type(ActivityType.ANNOUNCE), **not_undo()}, - ], - } - return activitypubify( - **activitypub.build_ordered_collection( - DB.activities, - q=q, - cursor=request.args.get("cursor"), - map_func=lambda doc: activity_from_doc(doc, embed=True), - col_name="outbox", - ) - ) - - # Handle POST request aka C2S API - try: - _api_required() - except BadSignature: - abort(401) - - data = request.get_json(force=True) - activity = ap.parse_activity(data) - activity_id = post_to_outbox(activity) - - return Response(status=201, headers={"Location": activity_id}) - - -@app.route("/emoji/") -def ap_emoji(name): - if name in EMOJIS: - return activitypubify(**{**EMOJIS[name], "@context": config.DEFAULT_CTX}) - abort(404) - - -@app.route("/outbox/") -def outbox_detail(item_id): - if "text/html" in request.headers.get("Accept", ""): - return redirect(url_for("note_by_id", note_id=item_id)) - - doc = DB.activities.find_one( - { - **in_outbox(), - **by_remote_id(activity_url(item_id)), - **not_deleted(), - **is_public(), - } - ) - if not doc: - abort(404) - - _log_sig() - if doc["meta"].get("deleted", False): - abort(404) - - return activitypubify(**activity_from_doc(doc)) - - -@app.route("/outbox//activity") -def outbox_activity(item_id): - if "text/html" in request.headers.get("Accept", ""): - return redirect(url_for("note_by_id", note_id=item_id)) - - data = find_one_activity( - {**in_outbox(), **by_remote_id(activity_url(item_id)), **is_public()} - ) - if not data: - abort(404) - - _log_sig() - obj = activity_from_doc(data) - if data["meta"].get("deleted", False): - abort(404) - - if obj["type"] != ActivityType.CREATE.value: - abort(404) - return activitypubify(**obj["object"]) - - -@app.route("/outbox//replies") -def outbox_activity_replies(item_id): - if not is_api_request(): - abort(404) - _log_sig() - data = DB.activities.find_one( - { - **in_outbox(), - **by_remote_id(activity_url(item_id)), - **not_deleted(), - **is_public(), - } - ) - if not data: - abort(404) - obj = ap.parse_activity(data["activity"]) - if obj.ACTIVITY_TYPE != ActivityType.CREATE: - abort(404) - - q = { - **is_public(), - **not_deleted(), - **by_type(ActivityType.CREATE), - "activity.object.inReplyTo": obj.get_object().id, - } - - return activitypubify( - **activitypub.build_ordered_collection( - DB.activities, - q=q, - cursor=request.args.get("cursor"), - map_func=lambda doc: doc["activity"]["object"], - col_name=f"outbox/{item_id}/replies", - first_page=request.args.get("page") == "first", - ) - ) - - -@app.route("/outbox//likes") -def outbox_activity_likes(item_id): - if not is_api_request(): - abort(404) - _log_sig() - data = DB.activities.find_one( - { - "box": Box.OUTBOX.value, - "remote_id": activity_url(item_id), - "meta.deleted": False, - "meta.public": True, - } - ) - if not data: - abort(404) - obj = ap.parse_activity(data["activity"]) - if obj.ACTIVITY_TYPE != ActivityType.CREATE: - abort(404) - - q = { - "meta.undo": False, - "type": ActivityType.LIKE.value, - "$or": [ - {"activity.object.id": obj.get_object().id}, - {"activity.object": obj.get_object().id}, - ], - } - - return activitypubify( - **activitypub.build_ordered_collection( - DB.activities, - q=q, - cursor=request.args.get("cursor"), - map_func=lambda doc: remove_context(doc["activity"]), - col_name=f"outbox/{item_id}/likes", - first_page=request.args.get("page") == "first", - ) - ) - - -@app.route("/outbox//shares") -def outbox_activity_shares(item_id): - if not is_api_request(): - abort(404) - data = DB.activities.find_one( - { - "box": Box.OUTBOX.value, - "remote_id": activity_url(item_id), - "meta.deleted": False, - } - ) - if not data: - abort(404) - _log_sig() - obj = ap.parse_activity(data["activity"]) - if obj.ACTIVITY_TYPE != ActivityType.CREATE: - abort(404) - - q = { - "meta.undo": False, - "type": ActivityType.ANNOUNCE.value, - "$or": [ - {"activity.object.id": obj.get_object().id}, - {"activity.object": obj.get_object().id}, - ], - } - - return activitypubify( - **activitypub.build_ordered_collection( - DB.activities, - q=q, - cursor=request.args.get("cursor"), - map_func=lambda doc: remove_context(doc["activity"]), - col_name=f"outbox/{item_id}/shares", - first_page=request.args.get("page") == "first", - ) - ) - - -@app.route("/inbox", methods=["GET", "POST"]) # noqa: C901 -def inbox(): - # GET /inbox - if request.method == "GET": - if not is_api_request(): - abort(404) - try: - _api_required() - except BadSignature: - abort(404) - - return activitypubify( - **activitypub.build_ordered_collection( - DB.activities, - q={"meta.deleted": False, "box": Box.INBOX.value}, - cursor=request.args.get("cursor"), - map_func=lambda doc: remove_context(doc["activity"]), - col_name="inbox", - ) - ) - - # POST/ inbox - try: - data = request.get_json(force=True) - if not isinstance(data, dict): - raise ValueError("not a dict") - except Exception: - return Response( - status=422, - headers={"Content-Type": "application/json"}, - response=json.dumps( - { - "error": "failed to decode request body as JSON", - "request_id": g.request_id, - } - ), - ) - - # Check the blacklist now to see if we can return super early - if is_blacklisted(data): - logger.info(f"dropping activity from blacklisted host: {data['id']}") - return Response(status=201) - - logger.info(f"request_id={g.request_id} req_headers={request.headers!r}") - logger.info(f"request_id={g.request_id} raw_data={data}") - try: - req_verified, actor_id = verify_request( - request.method, request.path, request.headers, request.data - ) - if not req_verified: - raise Exception("failed to verify request") - logger.info(f"request_id={g.request_id} signed by {actor_id}") - except Exception: - logger.exception( - f"failed to verify request {g.request_id}, trying to verify the payload by fetching the remote" - ) - try: - remote_data = get_backend().fetch_iri(data["id"]) - except ActivityGoneError: - # XXX Mastodon sends Delete activities that are not dereferencable, it's the actor url with #delete - # appended, so an `ActivityGoneError` kind of ensure it's "legit" - if data["type"] == ActivityType.DELETE.value and data["id"].startswith( - data["object"] - ): - # If we're here, this means the key is not saved, so we cannot verify the object - logger.info(f"received a Delete for an unknown actor {data!r}, drop it") - - return Response(status=201) - except Exception: - logger.exception(f"failed to fetch remote for payload {data!r}") - - if "type" in data: - # Friendica does not returns a 410, but a 302 that redirect to an HTML page - if ap._has_type(data["type"], ActivityType.DELETE): - logger.info( - f"received a Delete for an unknown actor {data!r}, drop it" - ) - return Response(status=201) - - if "id" in data: - if DB.trash.find_one({"activity.id": data["id"]}): - # It's already stored in trash, returns early - return Response( - status=422, - headers={"Content-Type": "application/json"}, - response=json.dumps( - { - "error": "failed to verify request (using HTTP signatures or fetching the IRI)", - "request_id": g.request_id, - } - ), - ) - - # Now we can store this activity in the trash for later analysis - - # Track/store the payload for analysis - ip, geoip = _get_ip() - - DB.trash.insert( - { - "activity": data, - "meta": { - "ts": datetime.now().timestamp(), - "ip_address": ip, - "geoip": geoip, - "tb": traceback.format_exc(), - "headers": dict(request.headers), - "request_id": g.request_id, - }, - } - ) - - return Response( - status=422, - headers={"Content-Type": "application/json"}, - response=json.dumps( - { - "error": "failed to verify request (using HTTP signatures or fetching the IRI)", - "request_id": g.request_id, - } - ), - ) - - # We fetched the remote data successfully - data = remote_data - try: - activity = ap.parse_activity(data) - except ValueError: - logger.exception("failed to parse activity for req {g.request_id}: {data!r}") - - # Track/store the payload for analysis - ip, geoip = _get_ip() - - DB.trash.insert( - { - "activity": data, - "meta": { - "ts": datetime.now().timestamp(), - "ip_address": ip, - "geoip": geoip, - "tb": traceback.format_exc(), - "headers": dict(request.headers), - "request_id": g.request_id, - }, - } - ) - - return Response(status=201) - - logger.debug(f"inbox activity={g.request_id}/{activity}/{data}") - - post_to_inbox(activity) - - return Response(status=201) - - -@app.route("/followers") -def followers(): - q = {"box": Box.INBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False} - - if is_api_request(): - _log_sig() - return activitypubify( - **activitypub.build_ordered_collection( - DB.activities, - q=q, - cursor=request.args.get("cursor"), - map_func=lambda doc: doc["activity"]["actor"], - col_name="followers", - ) - ) - - raw_followers, older_than, newer_than = paginated_query(DB.activities, q) - followers = [doc["meta"] for doc in raw_followers if "actor" in doc.get("meta", {})] - return htmlify( - render_template( - "followers.html", - followers_data=followers, - older_than=older_than, - newer_than=newer_than, - ) - ) - - -@app.route("/following") -def following(): - q = { - **in_outbox(), - **by_type(ActivityType.FOLLOW), - **not_deleted(), - **follow_request_accepted(), - **not_undo(), - } - - if is_api_request(): - _log_sig() - if config.HIDE_FOLLOWING: - return activitypubify( - **activitypub.simple_build_ordered_collection("following", []) - ) - - return activitypubify( - **activitypub.build_ordered_collection( - DB.activities, - q=q, - cursor=request.args.get("cursor"), - map_func=lambda doc: doc["activity"]["object"], - col_name="following", - ) - ) - - if config.HIDE_FOLLOWING and not session.get("logged_in", False): - abort(404) - - following, older_than, newer_than = paginated_query(DB.activities, q) - following = [ - (doc["remote_id"], doc["meta"]) - for doc in following - if "remote_id" in doc and "object" in doc.get("meta", {}) - ] - lists = list(DB.lists.find()) - return htmlify( - render_template( - "following.html", - following_data=following, - older_than=older_than, - newer_than=newer_than, - lists=lists, - ) - ) - - -@app.route("/tags/") -def tags(tag): - if not DB.activities.count( - { - **in_outbox(), - **by_hashtag(tag), - **by_visibility(ap.Visibility.PUBLIC), - **not_deleted(), - } - ): - abort(404) - if not is_api_request(): - return htmlify( - render_template( - "tags.html", - tag=tag, - outbox_data=DB.activities.find( - { - **in_outbox(), - **by_hashtag(tag), - **by_visibility(ap.Visibility.PUBLIC), - **not_deleted(), - } - ).sort("meta.published", -1), - ) - ) - _log_sig() - q = { - **in_outbox(), - **by_hashtag(tag), - **by_visibility(ap.Visibility.PUBLIC), - **not_deleted(), - } - return activitypubify( - **activitypub.build_ordered_collection( - DB.activities, - q=q, - cursor=request.args.get("cursor"), - map_func=lambda doc: doc["activity"]["object"]["id"], - col_name=f"tags/{tag}", - ) - ) - - -@app.route("/featured") -def featured(): - if not is_api_request(): - abort(404) - - _log_sig() - q = { - "box": Box.OUTBOX.value, - "type": ActivityType.CREATE.value, - "meta.deleted": False, - "meta.undo": False, - "meta.pinned": True, - } - data = [clean_activity(doc["activity"]["object"]) for doc in DB.activities.find(q)] - return activitypubify( - **activitypub.simple_build_ordered_collection("featured", data) - ) - - -@app.route("/liked") -@api_required -def liked(): - if not is_api_request(): - q = { - "box": Box.OUTBOX.value, - "type": ActivityType.LIKE.value, - "meta.deleted": False, - "meta.undo": False, - } - - liked, older_than, newer_than = paginated_query(DB.activities, q) - - return htmlify( - render_template( - "liked.html", liked=liked, older_than=older_than, newer_than=newer_than - ) - ) - - q = {"meta.deleted": False, "meta.undo": False, "type": ActivityType.LIKE.value} - return activitypubify( - **activitypub.build_ordered_collection( - DB.activities, - q=q, - cursor=request.args.get("cursor"), - map_func=lambda doc: doc["activity"]["object"], - col_name="liked", - ) - ) - - -################# -# Feeds - - -@app.route("/feed.json") -def json_feed(): - return Response( - response=json.dumps(feed.json_feed("/feed.json")), - headers={"Content-Type": "application/json"}, - ) - - -@app.route("/feed.atom") -def atom_feed(): - return Response( - response=feed.gen_feed().atom_str(), - headers={"Content-Type": "application/atom+xml"}, - ) - - -@app.route("/feed.rss") -def rss_feed(): - return Response( - response=feed.gen_feed().rss_str(), - headers={"Content-Type": "application/rss+xml"}, - ) diff --git a/blueprints/__init__.py b/app/__init__.py similarity index 100% rename from blueprints/__init__.py rename to app/__init__.py diff --git a/app/activitypub.py b/app/activitypub.py new file mode 100644 index 0000000..3a96e8b --- /dev/null +++ b/app/activitypub.py @@ -0,0 +1,408 @@ +import enum +import json +import mimetypes +from typing import TYPE_CHECKING +from typing import Any + +import httpx +from loguru import logger + +from app import config +from app.config import ALSO_KNOWN_AS +from app.config import AP_CONTENT_TYPE # noqa: F401 +from app.config import MOVED_TO +from app.httpsig import auth +from app.key import get_pubkey_as_pem +from app.source import dedup_tags +from app.source import hashtagify +from app.utils.url import check_url + +if TYPE_CHECKING: + from app.actor import Actor + +RawObject = dict[str, Any] +AS_CTX = "https://www.w3.org/ns/activitystreams" +AS_PUBLIC = "https://www.w3.org/ns/activitystreams#Public" + +ACTOR_TYPES = ["Application", "Group", "Organization", "Person", "Service"] + +AS_EXTENDED_CTX = [ + "https://www.w3.org/ns/activitystreams", + "https://w3id.org/security/v1", + { + # AS ext + "Hashtag": "as:Hashtag", + "sensitive": "as:sensitive", + "manuallyApprovesFollowers": "as:manuallyApprovesFollowers", + "alsoKnownAs": {"@id": "as:alsoKnownAs", "@type": "@id"}, + "movedTo": {"@id": "as:movedTo", "@type": "@id"}, + # toot + "toot": "http://joinmastodon.org/ns#", + "featured": {"@id": "toot:featured", "@type": "@id"}, + "Emoji": "toot:Emoji", + "blurhash": "toot:blurhash", + "votersCount": "toot:votersCount", + # schema + "schema": "http://schema.org#", + "PropertyValue": "schema:PropertyValue", + "value": "schema:value", + # ostatus + "ostatus": "http://ostatus.org#", + "conversation": "ostatus:conversation", + }, +] + + +class FetchError(Exception): + def __init__(self, url: str, resp: httpx.Response | None = None) -> None: + resp_part = "" + if resp: + resp_part = f", got HTTP {resp.status_code}: {resp.text}" + message = f"Failed to fetch {url}{resp_part}" + super().__init__(message) + self.resp = resp + self.url = url + + +class ObjectIsGoneError(FetchError): + pass + + +class ObjectNotFoundError(FetchError): + pass + + +class ObjectUnavailableError(FetchError): + pass + + +class FetchErrorTypeEnum(str, enum.Enum): + TIMEOUT = "TIMEOUT" + NOT_FOUND = "NOT_FOUND" + UNAUHTORIZED = "UNAUTHORIZED" + + INTERNAL_ERROR = "INTERNAL_ERROR" + + +class VisibilityEnum(str, enum.Enum): + PUBLIC = "public" + UNLISTED = "unlisted" + FOLLOWERS_ONLY = "followers-only" + DIRECT = "direct" + + @staticmethod + def get_display_name(key: "VisibilityEnum") -> str: + return { + VisibilityEnum.PUBLIC: "Public - sent to followers and visible on the homepage", # noqa: E501 + VisibilityEnum.UNLISTED: "Unlisted - like public, but hidden from the homepage", # noqa: E501, + VisibilityEnum.FOLLOWERS_ONLY: "Followers only", + VisibilityEnum.DIRECT: "Direct - only visible for mentioned actors", + }[key] + + +_LOCAL_ACTOR_SUMMARY, _LOCAL_ACTOR_TAGS = hashtagify(config.CONFIG.summary) +_LOCAL_ACTOR_METADATA = [] +if config.CONFIG.metadata: + for kv in config.CONFIG.metadata: + kv_value, kv_tags = hashtagify(kv.value) + _LOCAL_ACTOR_METADATA.append( + { + "name": kv.key, + "type": "PropertyValue", + "value": kv_value, + } + ) + _LOCAL_ACTOR_TAGS.extend(kv_tags) + + +ME = { + "@context": AS_EXTENDED_CTX, + "type": "Person", + "id": config.ID, + "following": config.BASE_URL + "/following", + "followers": config.BASE_URL + "/followers", + "featured": config.BASE_URL + "/featured", + "inbox": config.BASE_URL + "/inbox", + "outbox": config.BASE_URL + "/outbox", + "preferredUsername": config.USERNAME, + "name": config.CONFIG.name, + "summary": _LOCAL_ACTOR_SUMMARY, + "endpoints": { + # For compat with servers expecting a sharedInbox... + "sharedInbox": config.BASE_URL + + "/inbox", + }, + "url": config.ID + "/", # XXX: the path is important for Mastodon compat + "manuallyApprovesFollowers": config.CONFIG.manually_approves_followers, + "attachment": _LOCAL_ACTOR_METADATA, + "publicKey": { + "id": f"{config.ID}#main-key", + "owner": config.ID, + "publicKeyPem": get_pubkey_as_pem(config.KEY_PATH), + }, + "tag": dedup_tags(_LOCAL_ACTOR_TAGS), +} + +if config.CONFIG.icon_url: + ME["icon"] = { + "mediaType": mimetypes.guess_type(config.CONFIG.icon_url)[0], + "type": "Image", + "url": config.CONFIG.icon_url, + } + +if ALSO_KNOWN_AS: + ME["alsoKnownAs"] = [ALSO_KNOWN_AS] + +if MOVED_TO: + ME["movedTo"] = MOVED_TO + +if config.CONFIG.image_url: + ME["image"] = { + "mediaType": mimetypes.guess_type(config.CONFIG.image_url)[0], + "type": "Image", + "url": config.CONFIG.image_url, + } + + +class NotAnObjectError(Exception): + def __init__(self, url: str, resp: httpx.Response | None = None) -> None: + message = f"{url} is not an AP activity" + super().__init__(message) + self.url = url + self.resp = resp + + +async def fetch( + url: str, + params: dict[str, Any] | None = None, + disable_httpsig: bool = False, +) -> RawObject: + logger.info(f"Fetching {url} ({params=})") + check_url(url) + + async with httpx.AsyncClient() as client: + resp = await client.get( + url, + headers={ + "User-Agent": config.USER_AGENT, + "Accept": config.AP_CONTENT_TYPE, + }, + params=params, + follow_redirects=True, + auth=None if disable_httpsig else auth, + ) + + # Special handling for deleted object + if resp.status_code == 410: + raise ObjectIsGoneError(url, resp) + elif resp.status_code in [401, 403]: + raise ObjectUnavailableError(url, resp) + elif resp.status_code == 404: + raise ObjectNotFoundError(url, resp) + + try: + resp.raise_for_status() + except httpx.HTTPError as http_error: + raise FetchError(url, resp) from http_error + + try: + return resp.json() + except json.JSONDecodeError: + raise NotAnObjectError(url, resp) + + +async def parse_collection( # noqa: C901 + url: str | None = None, + payload: RawObject | None = None, + level: int = 0, + limit: int = 0, +) -> list[RawObject]: + """Resolve/fetch a `Collection`/`OrderedCollection`.""" + if level > 3: + raise ValueError("recursion limit exceeded") + + # Go through all the pages + out: list[RawObject] = [] + if url: + payload = await fetch(url) + if not payload: + raise ValueError("must at least prove a payload or an URL") + + ap_type = payload.get("type") + if not ap_type: + raise ValueError(f"Missing type: {payload=}") + + if level == 0 and ap_type not in ["Collection", "OrderedCollection"]: + raise ValueError(f"Unexpected type {ap_type}") + + if payload["type"] in ["Collection", "OrderedCollection"]: + if "orderedItems" in payload: + return payload["orderedItems"] + if "items" in payload: + return payload["items"] + if "first" in payload: + if isinstance(payload["first"], str): + out.extend( + await parse_collection( + url=payload["first"], level=level + 1, limit=limit + ) + ) + else: + if "orderedItems" in payload["first"]: + out.extend(payload["first"]["orderedItems"]) + if "items" in payload["first"]: + out.extend(payload["first"]["items"]) + n = payload["first"].get("next") + if n: + out.extend( + await parse_collection(url=n, level=level + 1, limit=limit) + ) + return out + + while payload: + if ap_type in ["CollectionPage", "OrderedCollectionPage"]: + if "orderedItems" in payload: + out.extend(payload["orderedItems"]) + if "items" in payload: + out.extend(payload["items"]) + n = payload.get("next") + if n is None or (limit > 0 and len(out) >= limit): + break + payload = await fetch(n) + else: + raise ValueError("unexpected activity type {}".format(payload["type"])) + + return out + + +def as_list(val: Any | list[Any]) -> list[Any]: + if isinstance(val, list): + return val + + return [val] + + +def get_id(val: str | dict[str, Any]) -> str: + if isinstance(val, dict): + val = val["id"] + + if not isinstance(val, str): + raise ValueError(f"Invalid ID type: {val}") + + return val + + +def object_visibility(ap_activity: RawObject, actor: "Actor") -> VisibilityEnum: + to = as_list(ap_activity.get("to", [])) + cc = as_list(ap_activity.get("cc", [])) + if AS_PUBLIC in to: + return VisibilityEnum.PUBLIC + elif AS_PUBLIC in cc: + return VisibilityEnum.UNLISTED + elif actor.followers_collection_id and actor.followers_collection_id in to + cc: + return VisibilityEnum.FOLLOWERS_ONLY + else: + return VisibilityEnum.DIRECT + + +def get_actor_id(activity: RawObject) -> str: + if "attributedTo" in activity: + attributed_to = as_list(activity["attributedTo"]) + return get_id(attributed_to[0]) + else: + return get_id(activity["actor"]) + + +async def get_object(activity: RawObject) -> RawObject: + if "object" not in activity: + raise ValueError(f"No object in {activity}") + + raw_activity_object = activity["object"] + if isinstance(raw_activity_object, dict): + return raw_activity_object + elif isinstance(raw_activity_object, str): + return await fetch(raw_activity_object) + else: + raise ValueError(f"Unexpected object {raw_activity_object}") + + +def get_object_id(activity: RawObject) -> str: + if "object" not in activity: + raise ValueError(f"No object in {activity}") + + return get_id(activity["object"]) + + +def wrap_object(activity: RawObject) -> RawObject: + # TODO(tsileo): improve Create VS Update with a `update=True` flag + if "updated" in activity: + return { + "@context": AS_EXTENDED_CTX, + "actor": config.ID, + "to": activity.get("to", []), + "cc": activity.get("cc", []), + "id": activity["id"] + "/update_activity/" + activity["updated"], + "object": remove_context(activity), + "published": activity["published"], + "updated": activity["updated"], + "type": "Update", + } + else: + return { + "@context": AS_EXTENDED_CTX, + "actor": config.ID, + "to": activity.get("to", []), + "cc": activity.get("cc", []), + "id": activity["id"] + "/activity", + "object": remove_context(activity), + "published": activity["published"], + "type": "Create", + } + + +def wrap_object_if_needed(raw_object: RawObject) -> RawObject: + if raw_object["type"] in ["Note", "Article", "Question"]: + return wrap_object(raw_object) + + return raw_object + + +def unwrap_activity(activity: RawObject) -> RawObject: + # FIXME(ts): deprecate this + if activity["type"] in ["Create", "Update"]: + unwrapped_object = activity["object"] + + # Sanity check, ensure the wrapped object actor matches the activity + if get_actor_id(unwrapped_object) != get_actor_id(activity): + raise ValueError( + f"Unwrapped object actor does not match activity: {activity}" + ) + return unwrapped_object + + return activity + + +def remove_context(raw_object: RawObject) -> RawObject: + if "@context" not in raw_object: + return raw_object + a = dict(raw_object) + del a["@context"] + return a + + +async def post(url: str, payload: dict[str, Any]) -> httpx.Response: + logger.info(f"Posting {url} ({payload=})") + check_url(url) + + async with httpx.AsyncClient() as client: + resp = await client.post( + url, + headers={ + "User-Agent": config.USER_AGENT, + "Content-Type": config.AP_CONTENT_TYPE, + }, + json=payload, + auth=auth, + ) + resp.raise_for_status() + return resp diff --git a/app/actor.py b/app/actor.py new file mode 100644 index 0000000..b961856 --- /dev/null +++ b/app/actor.py @@ -0,0 +1,455 @@ +import hashlib +import typing +from dataclasses import dataclass +from datetime import timedelta +from functools import cached_property +from typing import Union +from urllib.parse import urlparse + +import httpx +from loguru import logger +from sqlalchemy import select +from sqlalchemy.orm import joinedload + +from app import activitypub as ap +from app import media +from app.config import BASE_URL +from app.config import USER_AGENT +from app.config import USERNAME +from app.config import WEBFINGER_DOMAIN +from app.database import AsyncSession +from app.utils.datetime import as_utc +from app.utils.datetime import now + +if typing.TYPE_CHECKING: + from app.models import Actor as ActorModel + + +def _handle(raw_actor: ap.RawObject) -> str: + ap_id = ap.get_id(raw_actor["id"]) + domain = urlparse(ap_id) + if not domain.hostname: + raise ValueError(f"Invalid actor ID {ap_id}") + + handle = f'@{raw_actor["preferredUsername"]}@{domain.hostname}' # type: ignore + + # TODO: cleanup this + # Next, check for custom webfinger domains + resp: httpx.Response | None = None + for url in { + f"https://{domain.hostname}/.well-known/webfinger", + f"http://{domain.hostname}/.well-known/webfinger", + }: + try: + logger.info(f"Webfinger {handle} at {url}") + resp = httpx.get( + url, + params={"resource": f"acct:{handle[1:]}"}, + headers={ + "User-Agent": USER_AGENT, + }, + follow_redirects=True, + ) + resp.raise_for_status() + break + except Exception: + logger.exception(f"Failed to webfinger {handle}") + + if resp: + try: + json_resp = resp.json() + if json_resp.get("subject", "").startswith("acct:"): + return "@" + json_resp["subject"].removeprefix("acct:") + except Exception: + logger.exception(f"Failed to parse webfinger response for {handle}") + return handle + + +class Actor: + @property + def ap_actor(self) -> ap.RawObject: + raise NotImplementedError() + + @property + def ap_id(self) -> str: + return ap.get_id(self.ap_actor["id"]) + + @property + def name(self) -> str | None: + return self.ap_actor.get("name") + + @property + def summary(self) -> str | None: + return self.ap_actor.get("summary") + + @property + def url(self) -> str | None: + return self.ap_actor.get("url") or self.ap_actor["id"] + + @property + def preferred_username(self) -> str: + return self.ap_actor["preferredUsername"] + + @property + def display_name(self) -> str: + if self.name: + return self.name + return self.preferred_username + + @cached_property + def handle(self) -> str: + return _handle(self.ap_actor) + + @property + def ap_type(self) -> str: + raise NotImplementedError() + + @property + def inbox_url(self) -> str: + return self.ap_actor["inbox"] + + @property + def outbox_url(self) -> str: + return self.ap_actor["outbox"] + + @property + def shared_inbox_url(self) -> str: + return self.ap_actor.get("endpoints", {}).get("sharedInbox") or self.inbox_url + + @property + def icon_url(self) -> str | None: + if icon := self.ap_actor.get("icon"): + return icon.get("url") + return None + + @property + def icon_media_type(self) -> str | None: + if icon := self.ap_actor.get("icon"): + return icon.get("mediaType") + return None + + @property + def image_url(self) -> str | None: + if image := self.ap_actor.get("image"): + return image.get("url") + return None + + @property + def public_key_as_pem(self) -> str: + return self.ap_actor["publicKey"]["publicKeyPem"] + + @property + def public_key_id(self) -> str: + return self.ap_actor["publicKey"]["id"] + + @property + def proxied_icon_url(self) -> str: + if self.icon_url: + return media.proxied_media_url(self.icon_url) + else: + return BASE_URL + "/static/nopic.png" + + @property + def resized_icon_url(self) -> str: + if self.icon_url: + return media.resized_media_url(self.icon_url, 50) + else: + return BASE_URL + "/static/nopic.png" + + @property + def tags(self) -> list[ap.RawObject]: + return ap.as_list(self.ap_actor.get("tag", [])) + + @property + def followers_collection_id(self) -> str | None: + return self.ap_actor.get("followers") + + @cached_property + def attachments(self) -> list[ap.RawObject]: + return ap.as_list(self.ap_actor.get("attachment", [])) + + @cached_property + def moved_to(self) -> str | None: + return self.ap_actor.get("movedTo") + + @cached_property + def server(self) -> str: + return urlparse(self.ap_id).hostname # type: ignore + + +class RemoteActor(Actor): + def __init__(self, ap_actor: ap.RawObject, handle: str | None = None) -> None: + if (ap_type := ap_actor.get("type")) not in ap.ACTOR_TYPES: + raise ValueError(f"Unexpected actor type: {ap_type}") + + self._ap_actor = ap_actor + self._ap_type = ap_type + + if handle is None: + handle = _handle(ap_actor) + + self._handle = handle + + @property + def ap_actor(self) -> ap.RawObject: + return self._ap_actor + + @property + def ap_type(self) -> str: + return self._ap_type + + @property + def is_from_db(self) -> bool: + return False + + @property + def handle(self) -> str: + return self._handle + + +LOCAL_ACTOR = RemoteActor(ap_actor=ap.ME, handle=f"@{USERNAME}@{WEBFINGER_DOMAIN}") + + +async def save_actor(db_session: AsyncSession, ap_actor: ap.RawObject) -> "ActorModel": + from app import models + + if ap_type := ap_actor.get("type") not in ap.ACTOR_TYPES: + raise ValueError(f"Invalid type {ap_type} for actor {ap_actor}") + + actor = models.Actor( + ap_id=ap.get_id(ap_actor["id"]), + ap_actor=ap_actor, + ap_type=ap.as_list(ap_actor["type"])[0], + handle=_handle(ap_actor), + ) + db_session.add(actor) + await db_session.flush() + await db_session.refresh(actor) + return actor + + +async def fetch_actor( + db_session: AsyncSession, + actor_id: str, + save_if_not_found: bool = True, +) -> "ActorModel": + if actor_id == LOCAL_ACTOR.ap_id: + raise ValueError("local actor should not be fetched") + from app import models + + existing_actor = ( + await db_session.scalars( + select(models.Actor).where( + models.Actor.ap_id == actor_id, + ) + ) + ).one_or_none() + if existing_actor: + if existing_actor.is_deleted: + raise ap.ObjectNotFoundError(f"{actor_id} was deleted") + + if now() - as_utc(existing_actor.updated_at) > timedelta(hours=24): + logger.info( + f"Refreshing {actor_id=} last updated {existing_actor.updated_at}" + ) + try: + ap_actor = await ap.fetch(actor_id) + await update_actor_if_needed( + db_session, + existing_actor, + RemoteActor(ap_actor), + ) + return existing_actor + except Exception: + logger.exception(f"Failed to refresh {actor_id}") + # If we fail to refresh the actor, return the cached one + return existing_actor + else: + return existing_actor + + if save_if_not_found: + ap_actor = await ap.fetch(actor_id) + # Some softwares uses URL when we expect ID or uses a different casing + # (like Birdsite LIVE) , which mean we may already have it in DB + existing_actor_by_url = ( + await db_session.scalars( + select(models.Actor).where( + models.Actor.ap_id == ap.get_id(ap_actor), + ) + ) + ).one_or_none() + if existing_actor_by_url: + # Update the actor as we had to fetch it anyway + await update_actor_if_needed( + db_session, + existing_actor_by_url, + RemoteActor(ap_actor), + ) + return existing_actor_by_url + + return await save_actor(db_session, ap_actor) + else: + raise ap.ObjectNotFoundError(actor_id) + + +async def update_actor_if_needed( + db_session: AsyncSession, + actor_in_db: "ActorModel", + ra: RemoteActor, +) -> None: + # Check if we actually need to udpte the actor in DB + if _actor_hash(ra) != _actor_hash(actor_in_db): + actor_in_db.ap_actor = ra.ap_actor + actor_in_db.handle = ra.handle + actor_in_db.ap_type = ra.ap_type + + actor_in_db.updated_at = now() + await db_session.flush() + + +@dataclass +class ActorMetadata: + ap_actor_id: str + is_following: bool + is_follower: bool + is_follow_request_sent: bool + is_follow_request_rejected: bool + outbox_follow_ap_id: str | None + inbox_follow_ap_id: str | None + moved_to: typing.Optional["ActorModel"] + has_blocked_local_actor: bool + + +ActorsMetadata = dict[str, ActorMetadata] + + +async def get_actors_metadata( + db_session: AsyncSession, + actors: list[Union["ActorModel", "RemoteActor"]], +) -> ActorsMetadata: + from app import models + + ap_actor_ids = [actor.ap_id for actor in actors] + followers = { + follower.ap_actor_id: follower.inbox_object.ap_id + for follower in ( + await db_session.scalars( + select(models.Follower) + .where(models.Follower.ap_actor_id.in_(ap_actor_ids)) + .options(joinedload(models.Follower.inbox_object)) + ) + ) + .unique() + .all() + } + following = { + following.ap_actor_id + for following in await db_session.execute( + select(models.Following.ap_actor_id).where( + models.Following.ap_actor_id.in_(ap_actor_ids) + ) + ) + } + sent_follow_requests = { + follow_req.ap_object["object"]: follow_req.ap_id + for follow_req in await db_session.execute( + select(models.OutboxObject.ap_object, models.OutboxObject.ap_id).where( + models.OutboxObject.ap_type == "Follow", + models.OutboxObject.undone_by_outbox_object_id.is_(None), + models.OutboxObject.activity_object_ap_id.in_(ap_actor_ids), + ) + ) + } + rejected_follow_requests = { + reject.activity_object_ap_id + for reject in await db_session.execute( + select(models.InboxObject.activity_object_ap_id).where( + models.InboxObject.ap_type == "Reject", + models.InboxObject.ap_actor_id.in_(ap_actor_ids), + ) + ) + } + blocks = { + block.ap_actor_id + for block in await db_session.execute( + select(models.InboxObject.ap_actor_id).where( + models.InboxObject.ap_type == "Block", + models.InboxObject.undone_by_inbox_object_id.is_(None), + models.InboxObject.ap_actor_id.in_(ap_actor_ids), + ) + ) + } + + idx: ActorsMetadata = {} + for actor in actors: + if not actor.ap_id: + raise ValueError("Should never happen") + moved_to = None + if actor.moved_to: + try: + moved_to = await fetch_actor( + db_session, + actor.moved_to, + save_if_not_found=False, + ) + except ap.ObjectNotFoundError: + pass + except Exception: + logger.exception(f"Failed to fetch {actor.moved_to=}") + + idx[actor.ap_id] = ActorMetadata( + ap_actor_id=actor.ap_id, + is_following=actor.ap_id in following, + is_follower=actor.ap_id in followers, + is_follow_request_sent=actor.ap_id in sent_follow_requests, + is_follow_request_rejected=bool( + sent_follow_requests[actor.ap_id] in rejected_follow_requests + ) + if actor.ap_id in sent_follow_requests + else False, + outbox_follow_ap_id=sent_follow_requests.get(actor.ap_id), + inbox_follow_ap_id=followers.get(actor.ap_id), + moved_to=moved_to, + has_blocked_local_actor=actor.ap_id in blocks, + ) + return idx + + +def _actor_hash(actor: Actor) -> bytes: + """Used to detect when an actor is updated""" + h = hashlib.blake2b(digest_size=32) + h.update(actor.ap_id.encode()) + h.update(actor.handle.encode()) + + if actor.name: + h.update(actor.name.encode()) + + if actor.summary: + h.update(actor.summary.encode()) + + if actor.url: + h.update(actor.url.encode()) + + h.update(actor.display_name.encode()) + + if actor.icon_url: + h.update(actor.icon_url.encode()) + + if actor.image_url: + h.update(actor.image_url.encode()) + + if actor.attachments: + for a in actor.attachments: + if a.get("type") != "PropertyValue": + continue + + h.update(a["name"].encode()) + h.update(a["value"].encode()) + + h.update(actor.public_key_id.encode()) + h.update(actor.public_key_as_pem.encode()) + + if actor.moved_to: + h.update(actor.moved_to.encode()) + + return h.digest() diff --git a/app/admin.py b/app/admin.py new file mode 100644 index 0000000..226b193 --- /dev/null +++ b/app/admin.py @@ -0,0 +1,1282 @@ +from datetime import datetime +from urllib.parse import quote + +import httpx +from fastapi import APIRouter +from fastapi import Cookie +from fastapi import Depends +from fastapi import Form +from fastapi import Request +from fastapi import UploadFile +from fastapi.exceptions import HTTPException +from fastapi.responses import RedirectResponse +from loguru import logger +from sqlalchemy import and_ +from sqlalchemy import delete +from sqlalchemy import func +from sqlalchemy import or_ +from sqlalchemy import select +from sqlalchemy.orm import joinedload + +from app import activitypub as ap +from app import boxes +from app import models +from app import templates +from app.actor import LOCAL_ACTOR +from app.actor import fetch_actor +from app.actor import get_actors_metadata +from app.boxes import get_inbox_object_by_ap_id +from app.boxes import get_outbox_object_by_ap_id +from app.boxes import send_block +from app.boxes import send_follow +from app.boxes import send_unblock +from app.config import EMOJIS +from app.config import SESSION_TIMEOUT +from app.config import generate_csrf_token +from app.config import session_serializer +from app.config import verify_csrf_token +from app.config import verify_password +from app.database import AsyncSession +from app.database import get_db_session +from app.lookup import lookup +from app.templates import is_current_user_admin +from app.uploads import save_upload +from app.utils import pagination +from app.utils.emoji import EMOJIS_BY_NAME + + +async def user_session_or_redirect( + request: Request, + session: str | None = Cookie(default=None), +) -> None: + if request.method == "POST": + form_data = await request.form() + if "redirect_url" in form_data: + redirect_url = form_data["redirect_url"] + else: + redirect_url = request.url_for("admin_stream") + else: + redirect_url = str(request.url) + + _RedirectToLoginPage = HTTPException( + status_code=302, + headers={ + "Location": request.url_for("login") + f"?redirect={quote(redirect_url)}" + }, + ) + + if not session: + logger.info("No existing admin session") + raise _RedirectToLoginPage + + try: + loaded_session = session_serializer.loads(session, max_age=SESSION_TIMEOUT) + except Exception: + logger.exception("Failed to validate admin session") + raise _RedirectToLoginPage + + if not loaded_session.get("is_logged_in"): + logger.info(f"Admin session invalidated: {loaded_session}") + raise _RedirectToLoginPage + + return None + + +router = APIRouter( + dependencies=[Depends(user_session_or_redirect)], +) +unauthenticated_router = APIRouter() + + +@router.get("/lookup") +async def get_lookup( + request: Request, + query: str | None = None, + db_session: AsyncSession = Depends(get_db_session), +) -> templates.TemplateResponse | RedirectResponse: + error = None + ap_object = None + actors_metadata = {} + if query: + try: + ap_object = await lookup(db_session, query) + except httpx.TimeoutException: + error = ap.FetchErrorTypeEnum.TIMEOUT + except (ap.ObjectNotFoundError, ap.ObjectIsGoneError): + error = ap.FetchErrorTypeEnum.NOT_FOUND + except (ap.ObjectUnavailableError): + error = ap.FetchErrorTypeEnum.UNAUHTORIZED + except Exception: + logger.exception(f"Failed to lookup {query}") + error = ap.FetchErrorTypeEnum.INTERNAL_ERROR + else: + if ap_object.ap_type in ap.ACTOR_TYPES: + try: + await fetch_actor( + db_session, ap_object.ap_id, save_if_not_found=False + ) + except ap.ObjectNotFoundError: + pass + else: + return RedirectResponse( + request.url_for("admin_profile") + + f"?actor_id={ap_object.ap_id}", + status_code=302, + ) + + actors_metadata = await get_actors_metadata( + db_session, [ap_object] # type: ignore + ) + else: + # Check if the object is in the inbox + requested_object = await boxes.get_anybox_object_by_ap_id( + db_session, ap_object.ap_id + ) + if requested_object: + return RedirectResponse( + request.url_for("admin_object") + + f"?ap_id={ap_object.ap_id}#" + + requested_object.permalink_id, + status_code=302, + ) + + actors_metadata = await get_actors_metadata( + db_session, [ap_object.actor] # type: ignore + ) + + return await templates.render_template( + db_session, + request, + "lookup.html", + { + "query": query, + "ap_object": ap_object, + "actors_metadata": actors_metadata, + "error": error, + }, + ) + + +@router.get("/new") +async def admin_new( + request: Request, + query: str | None = None, + in_reply_to: str | None = None, + with_content: str | None = None, + with_visibility: str | None = None, + db_session: AsyncSession = Depends(get_db_session), +) -> templates.TemplateResponse: + content = "" + content_warning = None + in_reply_to_object = None + if in_reply_to: + in_reply_to_object = await boxes.get_anybox_object_by_ap_id( + db_session, in_reply_to + ) + if not in_reply_to_object: + logger.info(f"Saving unknwown object {in_reply_to}") + raw_object = await ap.fetch(in_reply_to) + await boxes.save_object_to_inbox(db_session, raw_object) + await db_session.commit() + in_reply_to_object = await boxes.get_anybox_object_by_ap_id( + db_session, in_reply_to + ) + + # Add mentions to the initial note content + if not in_reply_to_object: + raise ValueError(f"Unknown object {in_reply_to=}") + if in_reply_to_object.actor.ap_id != LOCAL_ACTOR.ap_id: + content += f"{in_reply_to_object.actor.handle} " + for tag in in_reply_to_object.tags: + if tag.get("type") == "Mention" and tag["name"] != LOCAL_ACTOR.handle: + try: + mentioned_actor = await fetch_actor(db_session, tag["href"]) + content += f"{mentioned_actor.handle} " + except Exception: + logger.exception(f"Failed to lookup {mentioned_actor}") + + # Copy the content warning if any + if in_reply_to_object.summary: + content_warning = in_reply_to_object.summary + elif with_content: + content += f"{with_content} " + + return await templates.render_template( + db_session, + request, + "admin_new.html", + { + "in_reply_to_object": in_reply_to_object, + "content": content, + "content_warning": content_warning, + "visibility_choices": [ + (v.name, ap.VisibilityEnum.get_display_name(v)) + for v in ap.VisibilityEnum + ], + "visibility": with_visibility, + "emojis": EMOJIS.split(" "), + "custom_emojis": sorted( + [dat for name, dat in EMOJIS_BY_NAME.items()], + key=lambda obj: obj["name"], + ), + }, + ) + + +@router.get("/bookmarks") +async def admin_bookmarks( + request: Request, + db_session: AsyncSession = Depends(get_db_session), +) -> templates.TemplateResponse: + # TODO: support pagination + stream = ( + ( + await db_session.scalars( + select(models.InboxObject) + .where( + models.InboxObject.ap_type.in_( + ["Note", "Article", "Video", "Announce"] + ), + models.InboxObject.is_bookmarked.is_(True), + models.InboxObject.is_deleted.is_(False), + ) + .options( + joinedload(models.InboxObject.relates_to_inbox_object), + joinedload(models.InboxObject.relates_to_outbox_object).options( + joinedload( + models.OutboxObject.outbox_object_attachments + ).options(joinedload(models.OutboxObjectAttachment.upload)), + ), + joinedload(models.InboxObject.actor), + ) + .order_by(models.InboxObject.ap_published_at.desc()) + .limit(20) + ) + ) + .unique() + .all() + ) + return await templates.render_template( + db_session, + request, + "admin_stream.html", + { + "stream": stream, + }, + ) + + +@router.get("/stream") +async def admin_stream( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + cursor: str | None = None, +) -> templates.TemplateResponse: + where = [ + models.InboxObject.is_hidden_from_stream.is_(False), + models.InboxObject.is_deleted.is_(False), + ] + if cursor: + where.append( + models.InboxObject.ap_published_at < pagination.decode_cursor(cursor) + ) + + page_size = 20 + remaining_count = await db_session.scalar( + select(func.count(models.InboxObject.id)).where(*where) + ) + q = select(models.InboxObject).where(*where) + + inbox = ( + ( + await db_session.scalars( + q.options( + joinedload(models.InboxObject.relates_to_inbox_object).options( + joinedload(models.InboxObject.actor) + ), + joinedload(models.InboxObject.relates_to_outbox_object).options( + joinedload( + models.OutboxObject.outbox_object_attachments + ).options(joinedload(models.OutboxObjectAttachment.upload)), + ), + joinedload(models.InboxObject.actor), + ) + .order_by(models.InboxObject.ap_published_at.desc()) + .limit(20) + ) + ) + .unique() + .all() + ) + + next_cursor = ( + pagination.encode_cursor(inbox[-1].ap_published_at) + if inbox and remaining_count > page_size + else None + ) + + actors_metadata = await get_actors_metadata( + db_session, + [ + inbox_object.actor + for inbox_object in inbox + if inbox_object.ap_type == "Follow" + ], + ) + + return await templates.render_template( + db_session, + request, + "admin_inbox.html", + { + "inbox": inbox, + "actors_metadata": actors_metadata, + "next_cursor": next_cursor, + "show_filters": False, + }, + ) + + +@router.get("/inbox") +async def admin_inbox( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + filter_by: str | None = None, + cursor: str | None = None, +) -> templates.TemplateResponse: + where = [ + models.InboxObject.ap_type.not_in( + [ + "Accept", + "Delete", + "Create", + "Update", + "Undo", + "Read", + "Reject", + "Add", + "Remove", + "EmojiReact", + ] + ), + models.InboxObject.is_deleted.is_(False), + models.InboxObject.is_transient.is_(False), + ] + if filter_by: + where.append(models.InboxObject.ap_type == filter_by) + if cursor: + where.append( + models.InboxObject.ap_published_at < pagination.decode_cursor(cursor) + ) + + page_size = 20 + remaining_count = await db_session.scalar( + select(func.count(models.InboxObject.id)).where(*where) + ) + q = select(models.InboxObject).where(*where) + + inbox = ( + ( + await db_session.scalars( + q.options( + joinedload(models.InboxObject.relates_to_inbox_object).options( + joinedload(models.InboxObject.actor) + ), + joinedload(models.InboxObject.relates_to_outbox_object).options( + joinedload( + models.OutboxObject.outbox_object_attachments + ).options(joinedload(models.OutboxObjectAttachment.upload)), + ), + joinedload(models.InboxObject.actor), + ) + .order_by(models.InboxObject.ap_published_at.desc()) + .limit(20) + ) + ) + .unique() + .all() + ) + + next_cursor = ( + pagination.encode_cursor(inbox[-1].ap_published_at) + if inbox and remaining_count > page_size + else None + ) + + actors_metadata = await get_actors_metadata( + db_session, + [ + inbox_object.actor + for inbox_object in inbox + if inbox_object.ap_type == "Follow" + ], + ) + + return await templates.render_template( + db_session, + request, + "admin_inbox.html", + { + "inbox": inbox, + "actors_metadata": actors_metadata, + "next_cursor": next_cursor, + "show_filters": True, + }, + ) + + +@router.get("/direct_messages") +async def admin_direct_messages( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + cursor: str | None = None, +) -> templates.TemplateResponse: + # The process for building DMs thread is a bit compex in term of query + # but it does not require an extra tables to index/manage threads + + inbox_convos = ( + ( + await db_session.execute( + select( + models.InboxObject.ap_context, + models.InboxObject.actor_id, + func.count(1).label("count"), + func.max(models.InboxObject.ap_published_at).label( + "most_recent_date" + ), + ) + .where( + models.InboxObject.visibility == ap.VisibilityEnum.DIRECT, + models.InboxObject.ap_context.is_not(None), + # Skip transient object like poll relies + models.InboxObject.is_transient.is_(False), + models.InboxObject.is_deleted.is_(False), + ) + .group_by(models.InboxObject.ap_context, models.InboxObject.actor_id) + ) + ) + .unique() + .all() + ) + outbox_convos = ( + ( + await db_session.execute( + select( + models.OutboxObject.ap_context, + func.count(1).label("count"), + func.max(models.OutboxObject.ap_published_at).label( + "most_recent_date" + ), + ) + .where( + models.OutboxObject.visibility == ap.VisibilityEnum.DIRECT, + models.OutboxObject.ap_context.is_not(None), + # Skip transient object like poll relies + models.OutboxObject.is_transient.is_(False), + models.OutboxObject.is_deleted.is_(False), + ) + .group_by(models.OutboxObject.ap_context) + ) + ) + .unique() + .all() + ) + + # Build a "threads index" by combining objects from the inbox and outbox + convos = {} + for inbox_convo in inbox_convos: + if inbox_convo.ap_context not in convos: + convos[inbox_convo.ap_context] = { + "actor_ids": {inbox_convo.actor_id}, + "count": inbox_convo.count, + "most_recent_from_inbox": inbox_convo.most_recent_date, + "most_recent_from_outbox": datetime.min, + } + else: + convos[inbox_convo.ap_context]["actor_ids"].add(inbox_convo.actor_id) + convos[inbox_convo.ap_context]["count"] += inbox_convo.count + convos[inbox_convo.ap_context]["most_recent_from_inbox"] = max( + inbox_convo.most_recent_date, + convos[inbox_convo.ap_context]["most_recent_from_inbox"], + ) + + for outbox_convo in outbox_convos: + if outbox_convo.ap_context not in convos: + convos[outbox_convo.ap_context] = { + "actor_ids": set(), + "count": outbox_convo.count, + "most_recent_from_inbox": datetime.min, + "most_recent_from_outbox": outbox_convo.most_recent_date, + } + else: + convos[outbox_convo.ap_context]["count"] += outbox_convo.count + convos[outbox_convo.ap_context]["most_recent_from_outbox"] = max( + outbox_convo.most_recent_date, + convos[outbox_convo.ap_context]["most_recent_from_outbox"], + ) + + # Fetch the latest object for each threads + convos_with_last_from_inbox = [] + convos_with_last_from_outbox = [] + for context, convo in convos.items(): + if convo["most_recent_from_inbox"] > convo["most_recent_from_outbox"]: + convos_with_last_from_inbox.append( + and_( + models.InboxObject.ap_context == context, + models.InboxObject.ap_published_at + == convo["most_recent_from_inbox"], + ) + ) + else: + convos_with_last_from_outbox.append( + and_( + models.OutboxObject.ap_context == context, + models.OutboxObject.ap_published_at + == convo["most_recent_from_outbox"], + ) + ) + last_from_inbox = ( + ( + ( + await db_session.scalars( + select(models.InboxObject) + .where(or_(*convos_with_last_from_inbox)) + .options( + joinedload(models.InboxObject.actor), + ) + ) + ) + .unique() + .all() + ) + if convos_with_last_from_inbox + else [] + ) + last_from_outbox = ( + ( + ( + await db_session.scalars( + select(models.OutboxObject) + .where(or_(*convos_with_last_from_outbox)) + .options( + joinedload( + models.OutboxObject.outbox_object_attachments + ).options(joinedload(models.OutboxObjectAttachment.upload)), + ) + ) + ) + .unique() + .all() + ) + if convos_with_last_from_outbox + else [] + ) + + # Build the template response + threads = [] + for anybox_object in sorted( + last_from_inbox + last_from_outbox, + key=lambda x: x.ap_published_at, + reverse=True, + ): + convo = convos[anybox_object.ap_context] + actors = list( + ( + await db_session.execute( + select(models.Actor).where(models.Actor.id.in_(convo["actor_ids"])) + ) + ).scalars() + ) + # If this message from outbox starts a thread with no replies, look + # at the mentions + if not actors and anybox_object.is_from_outbox: + actors = ( # type: ignore + await db_session.execute( + select(models.Actor).where( + models.Actor.ap_id.in_( + mention["href"] + for mention in anybox_object.tags + if mention["type"] == "Mention" + ) + ) + ) + ).scalars() + threads.append((anybox_object, convo, actors)) + + return await templates.render_template( + db_session, + request, + "admin_direct_messages.html", + { + "threads": threads, + }, + ) + + +@router.get("/outbox") +async def admin_outbox( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + filter_by: str | None = None, + cursor: str | None = None, +) -> templates.TemplateResponse: + where = [ + models.OutboxObject.ap_type.not_in(["Accept", "Delete", "Update"]), + models.OutboxObject.is_deleted.is_(False), + models.OutboxObject.is_transient.is_(False), + ] + if filter_by: + where.append(models.OutboxObject.ap_type == filter_by) + if cursor: + where.append( + models.OutboxObject.ap_published_at < pagination.decode_cursor(cursor) + ) + + page_size = 20 + remaining_count = await db_session.scalar( + select(func.count(models.OutboxObject.id)).where(*where) + ) + q = select(models.OutboxObject).where(*where) + + outbox = ( + ( + await db_session.scalars( + q.options( + joinedload(models.OutboxObject.relates_to_inbox_object).options( + joinedload(models.InboxObject.actor), + ), + joinedload(models.OutboxObject.relates_to_outbox_object), + joinedload(models.OutboxObject.relates_to_actor), + joinedload(models.OutboxObject.outbox_object_attachments).options( + joinedload(models.OutboxObjectAttachment.upload) + ), + ) + .order_by(models.OutboxObject.ap_published_at.desc()) + .limit(page_size) + ) + ) + .unique() + .all() + ) + + next_cursor = ( + pagination.encode_cursor(outbox[-1].ap_published_at) + if outbox and remaining_count > page_size + else None + ) + + actors_metadata = await get_actors_metadata( + db_session, + [ + outbox_object.relates_to_actor + for outbox_object in outbox + if outbox_object.relates_to_actor + ], + ) + + return await templates.render_template( + db_session, + request, + "admin_outbox.html", + { + "actors_metadata": actors_metadata, + "outbox": outbox, + "next_cursor": next_cursor, + }, + ) + + +@router.get("/notifications") +async def get_notifications( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + cursor: str | None = None, +) -> templates.TemplateResponse: + where = [] + if cursor: + decoded_cursor = pagination.decode_cursor(cursor) + where.append(models.Notification.created_at < decoded_cursor) + + page_size = 20 + remaining_count = await db_session.scalar( + select(func.count(models.Notification.id)).where(*where) + ) + + notifications = ( + ( + await db_session.scalars( + select(models.Notification) + .where(*where) + .options( + joinedload(models.Notification.actor), + joinedload(models.Notification.inbox_object).options( + joinedload(models.InboxObject.actor) + ), + joinedload(models.Notification.outbox_object).options( + joinedload( + models.OutboxObject.outbox_object_attachments + ).options(joinedload(models.OutboxObjectAttachment.upload)), + ), + joinedload(models.Notification.webmention), + ) + .order_by(models.Notification.created_at.desc()) + .limit(page_size) + ) + ) + .unique() + .all() + ) + actors_metadata = await get_actors_metadata( + db_session, [notif.actor for notif in notifications if notif.actor] + ) + more_unread_count = 0 + next_cursor = None + + if notifications and remaining_count > page_size: + decoded_next_cursor = notifications[-1].created_at + next_cursor = pagination.encode_cursor(decoded_next_cursor) + + # If on the "see more" page there's more unread notification, we want + # to display it next to the link + more_unread_count = await db_session.scalar( + select(func.count(models.Notification.id)).where( + models.Notification.is_new.is_(True), + models.Notification.created_at < decoded_next_cursor, + ) + ) + + # Render the template before we change the new flag on notifications + tpl_resp = await templates.render_template( + db_session, + request, + "notifications.html", + { + "notifications": notifications, + "actors_metadata": actors_metadata, + "next_cursor": next_cursor, + "more_unread_count": more_unread_count, + }, + ) + + if len({notif.id for notif in notifications if notif.is_new}): + for notif in notifications: + notif.is_new = False + await db_session.commit() + + return tpl_resp + + +@router.get("/object") +async def admin_object( + request: Request, + ap_id: str, + db_session: AsyncSession = Depends(get_db_session), +) -> templates.TemplateResponse: + requested_object = await boxes.get_anybox_object_by_ap_id(db_session, ap_id) + if not requested_object or requested_object.is_deleted: + raise HTTPException(status_code=404) + + replies_tree = await boxes.get_replies_tree( + db_session, + requested_object, + is_current_user_admin=True, + ) + + return await templates.render_template( + db_session, + request, + "object.html", + {"replies_tree": replies_tree}, + ) + + +@router.get("/profile") +async def admin_profile( + request: Request, + actor_id: str, + cursor: str | None = None, + db_session: AsyncSession = Depends(get_db_session), +) -> templates.TemplateResponse: + # TODO: show featured/pinned + actor = ( + await db_session.execute( + select(models.Actor).where(models.Actor.ap_id == actor_id) + ) + ).scalar_one_or_none() + if not actor: + raise HTTPException(status_code=404) + + actors_metadata = await get_actors_metadata(db_session, [actor]) + + where = [ + models.InboxObject.is_deleted.is_(False), + models.InboxObject.actor_id == actor.id, + models.InboxObject.ap_type.in_( + ["Note", "Article", "Video", "Page", "Announce"] + ), + ] + if cursor: + decoded_cursor = pagination.decode_cursor(cursor) + where.append(models.InboxObject.ap_published_at < decoded_cursor) + + page_size = 20 + remaining_count = await db_session.scalar( + select(func.count(models.InboxObject.id)).where(*where) + ) + + inbox_objects = ( + ( + await db_session.scalars( + select(models.InboxObject) + .where(*where) + .options( + joinedload(models.InboxObject.relates_to_inbox_object).options( + joinedload(models.InboxObject.actor) + ), + joinedload(models.InboxObject.relates_to_outbox_object).options( + joinedload( + models.OutboxObject.outbox_object_attachments + ).options(joinedload(models.OutboxObjectAttachment.upload)), + ), + joinedload(models.InboxObject.actor), + ) + .order_by(models.InboxObject.ap_published_at.desc()) + .limit(page_size) + ) + ) + .unique() + .all() + ) + + next_cursor = ( + pagination.encode_cursor(inbox_objects[-1].created_at) + if inbox_objects and remaining_count > page_size + else None + ) + + return await templates.render_template( + db_session, + request, + "admin_profile.html", + { + "actors_metadata": actors_metadata, + "actor": actor, + "inbox_objects": inbox_objects, + "next_cursor": next_cursor, + }, + ) + + +@router.post("/actions/force_delete") +async def admin_actions_force_delete( + request: Request, + ap_object_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + ap_object_to_delete = await get_inbox_object_by_ap_id(db_session, ap_object_id) + if not ap_object_to_delete: + raise ValueError(f"Cannot find {ap_object_id}") + + logger.info(f"Deleting {ap_object_to_delete.ap_type}/{ap_object_to_delete.ap_id}") + await boxes._revert_side_effect_for_deleted_object( + db_session, + None, + ap_object_to_delete, + None, + ) + ap_object_to_delete.is_deleted = True + await db_session.commit() + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/force_delete_webmention") +async def admin_actions_force_delete_webmention( + request: Request, + webmention_id: int = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + webmention = await boxes.get_webmention_by_id(db_session, webmention_id) + if not webmention: + raise ValueError(f"Cannot find {webmention_id}") + if not webmention.outbox_object: + raise ValueError(f"Missing related outbox object for {webmention_id}") + + # TODO: move this + logger.info(f"Deleting {webmention_id}") + webmention.is_deleted = True + await db_session.flush() + from app.webmentions import _handle_webmention_side_effects + + await _handle_webmention_side_effects( + db_session, webmention, webmention.outbox_object + ) + # Delete related notifications + notif_deletion_result = await db_session.execute( + delete(models.Notification) + .where(models.Notification.webmention_id == webmention.id) + .execution_options(synchronize_session=False) + ) + logger.info( + f"Deleted {notif_deletion_result.rowcount} notifications" # type: ignore + ) + await db_session.commit() + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/follow") +async def admin_actions_follow( + request: Request, + ap_actor_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + logger.info(f"Following {ap_actor_id}") + await send_follow(db_session, ap_actor_id) + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/block") +async def admin_actions_block( + request: Request, + ap_actor_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + await send_block(db_session, ap_actor_id) + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/unblock") +async def admin_actions_unblock( + request: Request, + ap_actor_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + logger.info(f"Unblocking {ap_actor_id}") + await send_unblock(db_session, ap_actor_id) + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/hide_announces") +async def admin_actions_hide_announces( + request: Request, + ap_actor_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + actor = await fetch_actor(db_session, ap_actor_id) + actor.are_announces_hidden_from_stream = True + await db_session.commit() + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/show_announces") +async def admin_actions_show_announces( + request: Request, + ap_actor_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + actor = await fetch_actor(db_session, ap_actor_id) + actor.are_announces_hidden_from_stream = False + await db_session.commit() + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/delete") +async def admin_actions_delete( + request: Request, + ap_object_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + await boxes.send_delete(db_session, ap_object_id) + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/accept_incoming_follow") +async def admin_actions_accept_incoming_follow( + request: Request, + notification_id: int = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + await boxes.send_accept(db_session, notification_id) + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/reject_incoming_follow") +async def admin_actions_reject_incoming_follow( + request: Request, + notification_id: int = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + await boxes.send_reject(db_session, notification_id) + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/like") +async def admin_actions_like( + request: Request, + ap_object_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + await boxes.send_like(db_session, ap_object_id) + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/undo") +async def admin_actions_undo( + request: Request, + ap_object_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + await boxes.send_undo(db_session, ap_object_id) + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/announce") +async def admin_actions_announce( + request: Request, + ap_object_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + await boxes.send_announce(db_session, ap_object_id) + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/bookmark") +async def admin_actions_bookmark( + request: Request, + ap_object_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + inbox_object = await get_inbox_object_by_ap_id(db_session, ap_object_id) + if not inbox_object: + logger.info(f"Saving unknwown object {ap_object_id}") + raw_object = await ap.fetch(ap_object_id) + inbox_object = await boxes.save_object_to_inbox(db_session, raw_object) + inbox_object.is_bookmarked = True + await db_session.commit() + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/unbookmark") +async def admin_actions_unbookmark( + request: Request, + ap_object_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + inbox_object = await get_inbox_object_by_ap_id(db_session, ap_object_id) + if not inbox_object: + raise ValueError("Should never happen") + inbox_object.is_bookmarked = False + await db_session.commit() + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/pin") +async def admin_actions_pin( + request: Request, + ap_object_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + outbox_object = await get_outbox_object_by_ap_id(db_session, ap_object_id) + if not outbox_object: + raise ValueError("Should never happen") + outbox_object.is_pinned = True + await db_session.commit() + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/unpin") +async def admin_actions_unpin( + request: Request, + ap_object_id: str = Form(), + redirect_url: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + outbox_object = await get_outbox_object_by_ap_id(db_session, ap_object_id) + if not outbox_object: + raise ValueError("Should never happen") + outbox_object.is_pinned = False + await db_session.commit() + return RedirectResponse(redirect_url, status_code=302) + + +@router.post("/actions/new") +async def admin_actions_new( + request: Request, + files: list[UploadFile] = [], + content: str | None = Form(None), + redirect_url: str = Form(), + in_reply_to: str | None = Form(None), + content_warning: str | None = Form(None), + is_sensitive: bool = Form(False), + visibility: str = Form(), + poll_type: str | None = Form(None), + name: str | None = Form(None), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + if not content and not content_warning: + raise HTTPException(status_code=422, detail="Error: object must have a content") + + # Do like Mastodon, if there's only a CW with no content and some attachments, + # swap the CW and the content + if not content and content_warning and len(files) >= 1: + content = content_warning + is_sensitive = True + content_warning = None + + if not content: + raise HTTPException(status_code=422, detail="Error: objec must have a content") + + # XXX: for some reason, no files restuls in an empty single file + uploads = [] + raw_form_data = await request.form() + if len(files) >= 1 and files[0].filename: + for f in files: + upload = await save_upload(db_session, f) + uploads.append((upload, f.filename, raw_form_data.get("alt_" + f.filename))) + + ap_type = "Note" + + poll_duration_in_minutes = None + poll_answers = None + if poll_type: + ap_type = "Question" + poll_answers = [] + for i in ["1", "2", "3", "4"]: + if answer := raw_form_data.get(f"poll_answer_{i}"): + poll_answers.append(answer) + + if not poll_answers or len(poll_answers) < 2: + raise ValueError("Question must have at least 2 answers") + + poll_duration_in_minutes = int(raw_form_data["poll_duration"]) + elif name: + ap_type = "Article" + + public_id, _ = await boxes.send_create( + db_session, + ap_type=ap_type, + source=content, + uploads=uploads, + in_reply_to=in_reply_to or None, + visibility=ap.VisibilityEnum[visibility], + content_warning=content_warning or None, + is_sensitive=True if content_warning else is_sensitive, + poll_type=poll_type, + poll_answers=poll_answers, + poll_duration_in_minutes=poll_duration_in_minutes, + name=name, + ) + return RedirectResponse( + request.url_for("outbox_by_public_id", public_id=public_id), + status_code=302, + ) + + +@router.post("/actions/vote") +async def admin_actions_vote( + request: Request, + redirect_url: str = Form(), + in_reply_to: str = Form(), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse: + form_data = await request.form() + names = form_data.getlist("name") + logger.info(f"{names=}") + await boxes.send_vote( + db_session, + in_reply_to=in_reply_to, + names=names, + ) + return RedirectResponse(redirect_url, status_code=302) + + +@unauthenticated_router.get("/login") +async def login( + request: Request, + db_session: AsyncSession = Depends(get_db_session), +) -> templates.TemplateResponse | RedirectResponse: + if is_current_user_admin(request): + return RedirectResponse(request.url_for("admin_stream"), status_code=302) + + return await templates.render_template( + db_session, + request, + "login.html", + { + "csrf_token": generate_csrf_token(), + "redirect": request.query_params.get("redirect", ""), + }, + ) + + +@unauthenticated_router.post("/login") +async def login_validation( + request: Request, + password: str = Form(), + redirect: str | None = Form(None), + csrf_check: None = Depends(verify_csrf_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse | templates.TemplateResponse: + if not verify_password(password): + logger.warning("Invalid password") + return await templates.render_template( + db_session, + request, + "login.html", + { + "error": "Invalid password", + "csrf_token": generate_csrf_token(), + "redirect": request.query_params.get("redirect", ""), + }, + status_code=403, + ) + + resp = RedirectResponse( + redirect or request.url_for("admin_stream"), status_code=302 + ) + resp.set_cookie("session", session_serializer.dumps({"is_logged_in": True})) # type: ignore # noqa: E501 + + return resp + + +@router.get("/logout") +async def logout( + request: Request, +) -> RedirectResponse: + resp = RedirectResponse(request.url_for("index"), status_code=302) + resp.set_cookie("session", session_serializer.dumps({"is_logged_in": False})) # type: ignore # noqa: E501 + return resp diff --git a/app/ap_object.py b/app/ap_object.py new file mode 100644 index 0000000..77586aa --- /dev/null +++ b/app/ap_object.py @@ -0,0 +1,349 @@ +import hashlib +import mimetypes +from datetime import datetime +from functools import cached_property +from typing import Any + +import pydantic +from bs4 import BeautifulSoup # type: ignore +from mistletoe import markdown # type: ignore + +from app import activitypub as ap +from app.actor import LOCAL_ACTOR +from app.actor import Actor +from app.actor import RemoteActor +from app.config import ID +from app.media import proxied_media_url +from app.utils.datetime import now +from app.utils.datetime import parse_isoformat + + +class Object: + @property + def is_from_db(self) -> bool: + return False + + @property + def is_from_outbox(self) -> bool: + return False + + @property + def is_from_inbox(self) -> bool: + return False + + @cached_property + def ap_type(self) -> str: + return ap.as_list(self.ap_object["type"])[0] + + @property + def ap_object(self) -> ap.RawObject: + raise NotImplementedError + + @property + def ap_id(self) -> str: + return ap.get_id(self.ap_object["id"]) + + @property + def ap_actor_id(self) -> str: + return ap.get_actor_id(self.ap_object) + + @cached_property + def ap_published_at(self) -> datetime | None: + # TODO: default to None? or now()? + if "published" in self.ap_object: + return parse_isoformat(self.ap_object["published"]) + elif "created" in self.ap_object: + return parse_isoformat(self.ap_object["created"]) + return None + + @property + def actor(self) -> Actor: + raise NotImplementedError() + + @cached_property + def visibility(self) -> ap.VisibilityEnum: + return ap.object_visibility(self.ap_object, self.actor) + + @property + def ap_context(self) -> str | None: + return self.ap_object.get("context") or self.ap_object.get("conversation") + + @property + def sensitive(self) -> bool: + return self.ap_object.get("sensitive", False) + + @property + def tags(self) -> list[ap.RawObject]: + return ap.as_list(self.ap_object.get("tag", [])) + + @cached_property + def inlined_images(self) -> set[str]: + image_urls: set[str] = set() + if not self.content: + return image_urls + + soup = BeautifulSoup(self.content, "html5lib") + imgs = soup.find_all("img") + + for img in imgs: + if not img.attrs.get("src"): + continue + + image_urls.add(img.attrs["src"]) + + return image_urls + + @cached_property + def attachments(self) -> list["Attachment"]: + attachments = [] + for obj in ap.as_list(self.ap_object.get("attachment", [])): + if obj.get("type") == "PropertyValue": + continue + + if obj.get("type") == "Link": + attachments.append( + Attachment.parse_obj( + { + "proxiedUrl": None, + "resizedUrl": None, + "mediaType": None, + "type": "Link", + "url": obj["href"], + } + ) + ) + continue + + proxied_url = proxied_media_url(obj["url"]) + attachments.append( + Attachment.parse_obj( + { + "proxiedUrl": proxied_url, + "resizedUrl": proxied_url + "/740" + if obj.get("mediaType", "").startswith("image") + else None, + **obj, + } + ) + ) + + # Also add any video Link (for PeerTube compat) + if self.ap_type == "Video": + for link in ap.as_list(self.ap_object.get("url", [])): + if (isinstance(link, dict)) and link.get("type") == "Link": + if link.get("mediaType", "").startswith("video"): + proxied_url = proxied_media_url(link["href"]) + attachments.append( + Attachment( + type="Video", + mediaType=link["mediaType"], + url=link["href"], + proxiedUrl=proxied_url, + ) + ) + break + elif link.get("mediaType", "") == "application/x-mpegURL": + for tag in ap.as_list(link.get("tag", [])): + if tag.get("mediaType", "").startswith("video"): + proxied_url = proxied_media_url(tag["href"]) + attachments.append( + Attachment( + type="Video", + mediaType=tag["mediaType"], + url=tag["href"], + proxiedUrl=proxied_url, + ) + ) + break + return attachments + + @cached_property + def url(self) -> str | None: + obj_url = self.ap_object.get("url") + if isinstance(obj_url, str) and obj_url: + return obj_url + elif obj_url: + for u in ap.as_list(obj_url): + if u.get("type") == "Link": + return u["href"] + + if u["mediaType"] == "text/html": + return u["href"] + + return self.ap_id + + @cached_property + def content(self) -> str | None: + content = self.ap_object.get("content") + if not content: + return None + + # PeerTube returns the content as markdown + if self.ap_object.get("mediaType") == "text/markdown": + content = markdown(content) + + return content + + @property + def summary(self) -> str | None: + return self.ap_object.get("summary") + + @property + def name(self) -> str | None: + return self.ap_object.get("name") + + @cached_property + def permalink_id(self) -> str: + return ( + "permalink-" + + hashlib.md5( + self.ap_id.encode(), + usedforsecurity=False, + ).hexdigest() + ) + + @property + def activity_object_ap_id(self) -> str | None: + if "object" in self.ap_object: + return ap.get_id(self.ap_object["object"]) + + return None + + @property + def in_reply_to(self) -> str | None: + return self.ap_object.get("inReplyTo") + + @property + def is_local_reply(self) -> bool: + if not self.in_reply_to: + return False + + return bool( + self.in_reply_to.startswith(ID) and self.content # Hide votes from Question + ) + + @property + def is_in_reply_to_from_inbox(self) -> bool | None: + if not self.in_reply_to: + return None + + return not self.in_reply_to.startswith(LOCAL_ACTOR.ap_id) + + @property + def has_ld_signature(self) -> bool: + return bool(self.ap_object.get("signature")) + + @property + def is_poll_ended(self) -> bool: + if self.poll_end_time: + return now() > self.poll_end_time + return False + + @cached_property + def poll_items(self) -> list[ap.RawObject] | None: + return self.ap_object.get("oneOf") or self.ap_object.get("anyOf") + + @cached_property + def poll_end_time(self) -> datetime | None: + # Some polls may not have an end time + if self.ap_object.get("endTime"): + return parse_isoformat(self.ap_object["endTime"]) + + return None + + @cached_property + def poll_voters_count(self) -> int | None: + if not self.poll_items: + return None + # Only Mastodon set this attribute + if self.ap_object.get("votersCount"): + return self.ap_object["votersCount"] + else: + voters_count = 0 + for item in self.poll_items: + voters_count += item.get("replies", {}).get("totalItems", 0) + + return voters_count + + @cached_property + def is_one_of_poll(self) -> bool: + return bool(self.ap_object.get("oneOf")) + + +def _to_camel(string: str) -> str: + cased = "".join(word.capitalize() for word in string.split("_")) + return cased[0:1].lower() + cased[1:] + + +class BaseModel(pydantic.BaseModel): + class Config: + alias_generator = _to_camel + + +class Attachment(BaseModel): + type: str + media_type: str | None + name: str | None + url: str + + # Extra fields for the templates (and only for media) + proxied_url: str | None = None + resized_url: str | None = None + + width: int | None = None + height: int | None = None + + @property + def mimetype(self) -> str: + mimetype = self.media_type + if not mimetype: + mimetype, _ = mimetypes.guess_type(self.url) + + if not mimetype: + return "unknown" + + return mimetype.split("/")[-1] + + +class RemoteObject(Object): + def __init__(self, raw_object: ap.RawObject, actor: Actor): + self._raw_object = raw_object + self._actor = actor + + if self._actor.ap_id != ap.get_actor_id(self._raw_object): + raise ValueError(f"Invalid actor {self._actor.ap_id}") + + @classmethod + async def from_raw_object( + cls, + raw_object: ap.RawObject, + actor: Actor | None = None, + ): + # Pre-fetch the actor + actor_id = ap.get_actor_id(raw_object) + if actor_id == LOCAL_ACTOR.ap_id: + _actor = LOCAL_ACTOR + elif actor: + if actor.ap_id != actor_id: + raise ValueError( + f"Invalid actor, got {actor.ap_id}, " f"expected {actor_id}" + ) + _actor = actor # type: ignore + else: + _actor = RemoteActor( + ap_actor=await ap.fetch(ap.get_actor_id(raw_object)), + ) + + return cls(raw_object, _actor) + + @property + def og_meta(self) -> list[dict[str, Any]] | None: + return None + + @property + def ap_object(self) -> ap.RawObject: + return self._raw_object + + @property + def actor(self) -> Actor: + return self._actor diff --git a/app/boxes.py b/app/boxes.py new file mode 100644 index 0000000..109c210 --- /dev/null +++ b/app/boxes.py @@ -0,0 +1,2761 @@ +"""Actions related to the AP inbox/outbox.""" +import datetime +import uuid +from collections import defaultdict +from dataclasses import dataclass +from datetime import timedelta +from urllib.parse import urlparse + +import fastapi +import httpx +from loguru import logger +from sqlalchemy import delete +from sqlalchemy import func +from sqlalchemy import select +from sqlalchemy import update +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import joinedload + +from app import activitypub as ap +from app import config +from app import ldsig +from app import models +from app.actor import LOCAL_ACTOR +from app.actor import Actor +from app.actor import RemoteActor +from app.actor import fetch_actor +from app.actor import save_actor +from app.actor import update_actor_if_needed +from app.ap_object import RemoteObject +from app.config import BASE_URL +from app.config import ID +from app.config import MANUALLY_APPROVES_FOLLOWERS +from app.config import set_moved_to +from app.config import stream_visibility_callback +from app.customization import ObjectInfo +from app.database import AsyncSession +from app.outgoing_activities import new_outgoing_activity +from app.source import dedup_tags +from app.source import markdownify +from app.uploads import upload_to_attachment +from app.utils import opengraph +from app.utils import webmentions +from app.utils.datetime import as_utc +from app.utils.datetime import now +from app.utils.datetime import parse_isoformat +from app.utils.facepile import WebmentionReply +from app.utils.text import slugify +from app.utils.url import is_hostname_blocked + +AnyboxObject = models.InboxObject | models.OutboxObject + + +def is_notification_enabled(notification_type: models.NotificationType) -> bool: + """Checks if a given notification type is enabled.""" + if notification_type.value == "pending_incoming_follower": + # This one cannot be disabled as it would prevent manually reviewing + # follow requests. + return True + if notification_type.value in config.CONFIG.disabled_notifications: + return False + return True + + +def allocate_outbox_id() -> str: + return uuid.uuid4().hex + + +def outbox_object_id(outbox_id) -> str: + return f"{BASE_URL}/o/{outbox_id}" + + +async def save_outbox_object( + db_session: AsyncSession, + public_id: str, + raw_object: ap.RawObject, + relates_to_inbox_object_id: int | None = None, + relates_to_outbox_object_id: int | None = None, + relates_to_actor_id: int | None = None, + source: str | None = None, + is_transient: bool = False, + conversation: str | None = None, + slug: str | None = None, +) -> models.OutboxObject: + ro = await RemoteObject.from_raw_object(raw_object) + + outbox_object = models.OutboxObject( + public_id=public_id, + ap_type=ro.ap_type, + ap_id=ro.ap_id, + ap_context=ro.ap_context, + ap_object=ro.ap_object, + visibility=ro.visibility, + og_meta=await opengraph.og_meta_from_note(db_session, ro), + relates_to_inbox_object_id=relates_to_inbox_object_id, + relates_to_outbox_object_id=relates_to_outbox_object_id, + relates_to_actor_id=relates_to_actor_id, + activity_object_ap_id=ro.activity_object_ap_id, + is_hidden_from_homepage=True if ro.in_reply_to else False, + source=source, + is_transient=is_transient, + conversation=conversation, + slug=slug, + ) + db_session.add(outbox_object) + await db_session.flush() + await db_session.refresh(outbox_object) + + return outbox_object + + +async def send_unblock(db_session: AsyncSession, ap_actor_id: str) -> None: + actor = await fetch_actor(db_session, ap_actor_id) + + block_activity = ( + await db_session.scalars( + select(models.OutboxObject).where( + models.OutboxObject.activity_object_ap_id == actor.ap_id, + models.OutboxObject.is_deleted.is_(False), + ) + ) + ).one_or_none() + if not block_activity: + raise ValueError(f"No Block activity for {ap_actor_id}") + + await _send_undo(db_session, block_activity.ap_id) + + await db_session.commit() + + +async def send_block(db_session: AsyncSession, ap_actor_id: str) -> None: + logger.info(f"Blocking {ap_actor_id}") + actor = await fetch_actor(db_session, ap_actor_id) + actor.is_blocked = True + + # 1. Unfollow the actor + following = ( + await db_session.scalars( + select(models.Following) + .options(joinedload(models.Following.outbox_object)) + .where( + models.Following.ap_actor_id == actor.ap_id, + ) + ) + ).one_or_none() + if following: + await _send_undo(db_session, following.outbox_object.ap_id) + + # 2. If the blocked actor is a follower, reject the follow request + follower = ( + await db_session.scalars( + select(models.Follower) + .options(joinedload(models.Follower.inbox_object)) + .where( + models.Follower.ap_actor_id == actor.ap_id, + ) + ) + ).one_or_none() + if follower: + await _send_reject(db_session, actor, follower.inbox_object) + await db_session.delete(follower) + + # 3. Send a block + block_id = allocate_outbox_id() + block = { + "@context": ap.AS_EXTENDED_CTX, + "id": outbox_object_id(block_id), + "type": "Block", + "actor": LOCAL_ACTOR.ap_id, + "object": actor.ap_id, + } + outbox_object = await save_outbox_object( + db_session, + block_id, + block, + ) + if not outbox_object.id: + raise ValueError("Should never happen") + + await new_outgoing_activity(db_session, actor.inbox_url, outbox_object.id) + + # 4. Create a notification + if is_notification_enabled(models.NotificationType.BLOCK): + notif = models.Notification( + notification_type=models.NotificationType.BLOCK, + actor_id=actor.id, + outbox_object_id=outbox_object.id, + ) + db_session.add(notif) + + await db_session.commit() + + +async def send_delete(db_session: AsyncSession, ap_object_id: str) -> None: + outbox_object_to_delete = await get_outbox_object_by_ap_id(db_session, ap_object_id) + if not outbox_object_to_delete: + raise ValueError(f"{ap_object_id} not found in the outbox") + + delete_id = allocate_outbox_id() + # FIXME addressing + delete = { + "@context": ap.AS_EXTENDED_CTX, + "id": outbox_object_id(delete_id), + "type": "Delete", + "actor": ID, + "object": { + "type": "Tombstone", + "id": ap_object_id, + }, + } + outbox_object = await save_outbox_object( + db_session, + delete_id, + delete, + relates_to_outbox_object_id=outbox_object_to_delete.id, + ) + if not outbox_object.id: + raise ValueError("Should never happen") + + outbox_object_to_delete.is_deleted = True + await db_session.flush() + + # Compute the original recipients + recipients = await _compute_recipients( + db_session, outbox_object_to_delete.ap_object + ) + for rcp in recipients: + await new_outgoing_activity(db_session, rcp, outbox_object.id) + + # Revert side effects + if outbox_object_to_delete.in_reply_to: + replied_object = await get_anybox_object_by_ap_id( + db_session, outbox_object_to_delete.in_reply_to + ) + if replied_object: + if replied_object.is_from_outbox: + # Different helper here because we also count webmentions + new_replies_count = await _get_outbox_replies_count( + db_session, replied_object # type: ignore + ) + else: + new_replies_count = await _get_replies_count( + db_session, replied_object.ap_id + ) + + replied_object.replies_count = new_replies_count + else: + logger.info(f"{outbox_object_to_delete.in_reply_to} not found") + + await db_session.commit() + + +async def send_like(db_session: AsyncSession, ap_object_id: str) -> None: + inbox_object = await get_inbox_object_by_ap_id(db_session, ap_object_id) + if not inbox_object: + logger.info(f"Saving unknwown object {ap_object_id}") + raw_object = await ap.fetch(ap.get_id(ap_object_id)) + await save_object_to_inbox(db_session, raw_object) + await db_session.commit() + # XXX: we need to reload it as lazy-loading the actor will fail + # (asyncio SQLAlchemy issue) + inbox_object = await get_inbox_object_by_ap_id(db_session, ap_object_id) + if not inbox_object: + raise ValueError("Should never happen") + + like_id = allocate_outbox_id() + like = { + "@context": ap.AS_CTX, + "id": outbox_object_id(like_id), + "type": "Like", + "actor": ID, + "object": ap_object_id, + } + outbox_object = await save_outbox_object( + db_session, like_id, like, relates_to_inbox_object_id=inbox_object.id + ) + if not outbox_object.id: + raise ValueError("Should never happen") + + inbox_object.liked_via_outbox_object_ap_id = outbox_object.ap_id + + await new_outgoing_activity( + db_session, inbox_object.actor.inbox_url, outbox_object.id + ) + await db_session.commit() + + +async def send_announce(db_session: AsyncSession, ap_object_id: str) -> None: + inbox_object = await get_inbox_object_by_ap_id(db_session, ap_object_id) + if not inbox_object: + logger.info(f"Saving unknwown object {ap_object_id}") + raw_object = await ap.fetch(ap.get_id(ap_object_id)) + await save_object_to_inbox(db_session, raw_object) + await db_session.commit() + # XXX: we need to reload it as lazy-loading the actor will fail + # (asyncio SQLAlchemy issue) + inbox_object = await get_inbox_object_by_ap_id(db_session, ap_object_id) + if not inbox_object: + raise ValueError("Should never happen") + + if inbox_object.visibility not in [ + ap.VisibilityEnum.PUBLIC, + ap.VisibilityEnum.UNLISTED, + ]: + raise ValueError("Cannot announce non-public object") + + announce_id = allocate_outbox_id() + announce = { + "@context": ap.AS_CTX, + "id": outbox_object_id(announce_id), + "type": "Announce", + "actor": ID, + "object": ap_object_id, + "to": [ap.AS_PUBLIC], + "cc": [ + f"{BASE_URL}/followers", + inbox_object.ap_actor_id, + ], + } + outbox_object = await save_outbox_object( + db_session, announce_id, announce, relates_to_inbox_object_id=inbox_object.id + ) + if not outbox_object.id: + raise ValueError("Should never happen") + + inbox_object.announced_via_outbox_object_ap_id = outbox_object.ap_id + + recipients = await _compute_recipients(db_session, announce) + for rcp in recipients: + await new_outgoing_activity(db_session, rcp, outbox_object.id) + + await db_session.commit() + + +async def send_follow(db_session: AsyncSession, ap_actor_id: str) -> None: + await _send_follow(db_session, ap_actor_id) + await db_session.commit() + + +async def _send_follow(db_session: AsyncSession, ap_actor_id: str) -> None: + actor = await fetch_actor(db_session, ap_actor_id) + + follow_id = allocate_outbox_id() + follow = { + "@context": ap.AS_CTX, + "id": outbox_object_id(follow_id), + "type": "Follow", + "actor": ID, + "object": ap_actor_id, + } + + outbox_object = await save_outbox_object( + db_session, follow_id, follow, relates_to_actor_id=actor.id + ) + if not outbox_object.id: + raise ValueError("Should never happen") + + await new_outgoing_activity(db_session, actor.inbox_url, outbox_object.id) + + # Caller should commit + + +async def send_undo(db_session: AsyncSession, ap_object_id: str) -> None: + await _send_undo(db_session, ap_object_id) + await db_session.commit() + + +async def _send_undo(db_session: AsyncSession, ap_object_id: str) -> None: + outbox_object_to_undo = await get_outbox_object_by_ap_id(db_session, ap_object_id) + if not outbox_object_to_undo: + raise ValueError(f"{ap_object_id} not found in the outbox") + + if outbox_object_to_undo.ap_type not in ["Follow", "Like", "Announce", "Block"]: + raise ValueError( + f"Cannot build Undo for {outbox_object_to_undo.ap_type} activity" + ) + + undo_id = allocate_outbox_id() + undo = { + "@context": ap.AS_CTX, + "id": outbox_object_id(undo_id), + "type": "Undo", + "actor": ID, + "object": ap.remove_context(outbox_object_to_undo.ap_object), + } + + outbox_object = await save_outbox_object( + db_session, + undo_id, + undo, + relates_to_outbox_object_id=outbox_object_to_undo.id, + ) + if not outbox_object.id: + raise ValueError("Should never happen") + + outbox_object_to_undo.undone_by_outbox_object_id = outbox_object.id + outbox_object_to_undo.is_deleted = True + + if outbox_object_to_undo.ap_type == "Follow": + if not outbox_object_to_undo.activity_object_ap_id: + raise ValueError("Should never happen") + followed_actor = await fetch_actor( + db_session, outbox_object_to_undo.activity_object_ap_id + ) + await new_outgoing_activity( + db_session, + followed_actor.inbox_url, + outbox_object.id, + ) + # Also remove the follow from the following collection + await db_session.execute( + delete(models.Following).where( + models.Following.ap_actor_id == followed_actor.ap_id + ) + ) + elif outbox_object_to_undo.ap_type == "Like": + liked_object_ap_id = outbox_object_to_undo.activity_object_ap_id + if not liked_object_ap_id: + raise ValueError("Should never happen") + liked_object = await get_inbox_object_by_ap_id(db_session, liked_object_ap_id) + if not liked_object: + raise ValueError(f"Cannot find liked object {liked_object_ap_id}") + liked_object.liked_via_outbox_object_ap_id = None + + # Send the Undo to the liked object's actor + await new_outgoing_activity( + db_session, + liked_object.actor.inbox_url, # type: ignore + outbox_object.id, + ) + elif outbox_object_to_undo.ap_type == "Announce": + announced_object_ap_id = outbox_object_to_undo.activity_object_ap_id + if not announced_object_ap_id: + raise ValueError("Should never happen") + announced_object = await get_inbox_object_by_ap_id( + db_session, announced_object_ap_id + ) + if not announced_object: + raise ValueError(f"Cannot find announced object {announced_object_ap_id}") + announced_object.announced_via_outbox_object_ap_id = None + + # Send the Undo to the original recipients + recipients = await _compute_recipients( + db_session, outbox_object_to_undo.ap_object + ) + for rcp in recipients: + await new_outgoing_activity(db_session, rcp, outbox_object.id) + elif outbox_object_to_undo.ap_type == "Block": + if not outbox_object_to_undo.activity_object_ap_id: + raise ValueError(f"Invalid block activity {outbox_object_to_undo.ap_id}") + + # Send the Undo to the blocked actor + blocked_actor = await fetch_actor( + db_session, outbox_object_to_undo.activity_object_ap_id + ) + + blocked_actor.is_blocked = False + + await new_outgoing_activity( + db_session, + blocked_actor.inbox_url, # type: ignore + outbox_object.id, + ) + + if is_notification_enabled(models.NotificationType.UNBLOCK): + notif = models.Notification( + notification_type=models.NotificationType.UNBLOCK, + actor_id=blocked_actor.id, + outbox_object_id=outbox_object.id, + ) + db_session.add(notif) + + else: + raise ValueError("Should never happen") + + # called should commit + + +async def fetch_conversation_root( + db_session: AsyncSession, + obj: AnyboxObject | RemoteObject, + is_root: bool = False, + depth: int = 0, +) -> str: + """Some softwares do not set the context/conversation field (like Misskey). + This means we have to track conversation ourselves. To do so, we fetch + the root of the conversation and either: + - use the context field if set + - or build a custom conversation ID + """ + logger.info(f"Fetching convo root for ap_id={obj.ap_id}/{depth=}") + if obj.ap_context: + return obj.ap_context + + if not obj.in_reply_to or is_root or depth > 10: + # Use the root AP ID if there'no context + return f"microblogpub:root:{obj.ap_id}" + else: + in_reply_to_object: AnyboxObject | RemoteObject | None = ( + await get_anybox_object_by_ap_id(db_session, obj.in_reply_to) + ) + if not in_reply_to_object: + try: + raw_reply = await ap.fetch(ap.get_id(obj.in_reply_to)) + raw_reply_actor = await fetch_actor( + db_session, ap.get_actor_id(raw_reply) + ) + in_reply_to_object = RemoteObject(raw_reply, actor=raw_reply_actor) + except ( + ap.FetchError, + ap.NotAnObjectError, + ): + return await fetch_conversation_root( + db_session, obj, is_root=True, depth=depth + 1 + ) + except httpx.HTTPStatusError as http_status_error: + if 400 <= http_status_error.response.status_code < 500: + # We may not have access, in this case consider if root + return await fetch_conversation_root( + db_session, obj, is_root=True, depth=depth + 1 + ) + else: + raise + + return await fetch_conversation_root( + db_session, in_reply_to_object, depth=depth + 1 + ) + + +async def send_move( + db_session: AsyncSession, + target: str, +) -> None: + move_id = allocate_outbox_id() + obj = { + "@context": ap.AS_CTX, + "type": "Move", + "id": outbox_object_id(move_id), + "actor": LOCAL_ACTOR.ap_id, + "object": LOCAL_ACTOR.ap_id, + "target": target, + } + + outbox_object = await save_outbox_object(db_session, move_id, obj) + if not outbox_object.id: + raise ValueError("Should never happen") + + recipients = await _get_followers_recipients(db_session) + for rcp in recipients: + await new_outgoing_activity(db_session, rcp, outbox_object.id) + + # Store the moved to in order to update the profile + set_moved_to(target) + + await db_session.commit() + + +async def send_self_destruct(db_session: AsyncSession) -> None: + delete_id = allocate_outbox_id() + delete = { + "@context": ap.AS_EXTENDED_CTX, + "id": outbox_object_id(delete_id), + "type": "Delete", + "actor": ID, + "object": ID, + "to": [ap.AS_PUBLIC], + } + outbox_object = await save_outbox_object( + db_session, + delete_id, + delete, + ) + if not outbox_object.id: + raise ValueError("Should never happen") + + recipients = await compute_all_known_recipients(db_session) + for rcp in recipients: + await new_outgoing_activity(db_session, rcp, outbox_object.id) + + await db_session.commit() + + +async def send_create( + db_session: AsyncSession, + ap_type: str, + source: str, + uploads: list[tuple[models.Upload, str, str | None]], + in_reply_to: str | None, + visibility: ap.VisibilityEnum, + content_warning: str | None = None, + is_sensitive: bool = False, + poll_type: str | None = None, + poll_answers: list[str] | None = None, + poll_duration_in_minutes: int | None = None, + name: str | None = None, +) -> tuple[str, models.OutboxObject]: + note_id = allocate_outbox_id() + published = now().replace(microsecond=0).isoformat().replace("+00:00", "Z") + context = f"{ID}/contexts/" + uuid.uuid4().hex + conversation = context + content, tags, mentioned_actors = await markdownify(db_session, source) + attachments = [] + + in_reply_to_object: AnyboxObject | None = None + if in_reply_to: + in_reply_to_object = await get_anybox_object_by_ap_id(db_session, in_reply_to) + if not in_reply_to_object: + raise ValueError(f"Invalid in reply to {in_reply_to=}") + if not in_reply_to_object.ap_context: + logger.warning(f"Replied object {in_reply_to} has no context") + try: + conversation = await fetch_conversation_root( + db_session, + in_reply_to_object, + ) + except Exception: + logger.exception(f"Failed to fetch convo root {in_reply_to}") + else: + context = in_reply_to_object.ap_context + conversation = in_reply_to_object.ap_context + + for (upload, filename, alt_text) in uploads: + attachments.append(upload_to_attachment(upload, filename, alt_text)) + + to = [] + cc = [] + mentioned_actor_ap_ids = [actor.ap_id for actor in mentioned_actors] + if visibility == ap.VisibilityEnum.PUBLIC: + to = [ap.AS_PUBLIC] + cc = [f"{BASE_URL}/followers"] + mentioned_actor_ap_ids + elif visibility == ap.VisibilityEnum.UNLISTED: + to = [f"{BASE_URL}/followers"] + cc = [ap.AS_PUBLIC] + mentioned_actor_ap_ids + elif visibility == ap.VisibilityEnum.FOLLOWERS_ONLY: + to = [f"{BASE_URL}/followers"] + cc = mentioned_actor_ap_ids + elif visibility == ap.VisibilityEnum.DIRECT: + to = mentioned_actor_ap_ids + cc = [] + else: + raise ValueError(f"Unhandled visibility {visibility}") + + slug = None + url = outbox_object_id(note_id) + + extra_obj_attrs = {} + if ap_type == "Question": + if not poll_answers or len(poll_answers) < 2: + raise ValueError("Question must have at least 2 possible answers") + + if not poll_type: + raise ValueError("Mising poll_type") + + if not poll_duration_in_minutes: + raise ValueError("Missing poll_duration_in_minutes") + + extra_obj_attrs = { + "votersCount": 0, + "endTime": (now() + timedelta(minutes=poll_duration_in_minutes)) + .isoformat() + .replace("+00:00", "Z"), + poll_type: [ + { + "type": "Note", + "name": answer, + "replies": {"type": "Collection", "totalItems": 0}, + } + for answer in poll_answers + ], + } + elif ap_type == "Article": + if not name: + raise ValueError("Article must have a name") + + slug = slugify(name) + url = f"{BASE_URL}/articles/{note_id[:7]}/{slug}" + extra_obj_attrs = {"name": name} + + obj = { + "@context": ap.AS_EXTENDED_CTX, + "type": ap_type, + "id": outbox_object_id(note_id), + "attributedTo": ID, + "content": content, + "to": to, + "cc": cc, + "published": published, + "context": context, + "conversation": context, + "url": url, + "tag": dedup_tags(tags), + "summary": content_warning, + "inReplyTo": in_reply_to, + "sensitive": is_sensitive, + "attachment": attachments, + **extra_obj_attrs, # type: ignore + } + outbox_object = await save_outbox_object( + db_session, + note_id, + obj, + source=source, + conversation=conversation, + slug=slug, + ) + if not outbox_object.id: + raise ValueError("Should never happen") + + for tag in tags: + if tag["type"] == "Hashtag": + tagged_object = models.TaggedOutboxObject( + tag=tag["name"][1:].lower(), + outbox_object_id=outbox_object.id, + ) + db_session.add(tagged_object) + + for (upload, filename, alt) in uploads: + outbox_object_attachment = models.OutboxObjectAttachment( + filename=filename, + alt=alt, + outbox_object_id=outbox_object.id, + upload_id=upload.id, + ) + db_session.add(outbox_object_attachment) + + recipients = await _compute_recipients(db_session, obj) + for rcp in recipients: + await new_outgoing_activity(db_session, rcp, outbox_object.id) + + # If the note is public, check if we need to send any webmentions + if visibility == ap.VisibilityEnum.PUBLIC: + possible_targets = await opengraph.external_urls(db_session, outbox_object) + logger.info(f"webmentions possible targert {possible_targets}") + for target in possible_targets: + webmention_endpoint = await webmentions.discover_webmention_endpoint(target) + logger.info(f"{target=} {webmention_endpoint=}") + if webmention_endpoint: + await new_outgoing_activity( + db_session, + webmention_endpoint, + outbox_object_id=outbox_object.id, + webmention_target=target, + ) + + await db_session.commit() + + # Refresh the replies counter if needed + if in_reply_to_object: + new_replies_count = await _get_replies_count( + db_session, in_reply_to_object.ap_id + ) + if in_reply_to_object.is_from_outbox: + await db_session.execute( + update(models.OutboxObject) + .where( + models.OutboxObject.ap_id == in_reply_to_object.ap_id, + ) + .values(replies_count=new_replies_count) + ) + elif in_reply_to_object.is_from_inbox: + await db_session.execute( + update(models.InboxObject) + .where( + models.InboxObject.ap_id == in_reply_to_object.ap_id, + ) + .values(replies_count=new_replies_count) + ) + + await db_session.commit() + + return note_id, outbox_object + + +async def send_vote( + db_session: AsyncSession, + in_reply_to: str, + names: list[str], +) -> str: + logger.info(f"Send vote {names}") + published = now().replace(microsecond=0).isoformat().replace("+00:00", "Z") + + in_reply_to_object = await get_inbox_object_by_ap_id(db_session, in_reply_to) + if not in_reply_to_object: + raise ValueError(f"Invalid in reply to {in_reply_to=}") + if not in_reply_to_object.ap_context: + raise ValueError("Object has no context") + context = in_reply_to_object.ap_context + + # TODO: ensure the name are valid? + + # Save the answers + in_reply_to_object.voted_for_answers = names + + to = [in_reply_to_object.actor.ap_id] + + for name in names: + vote_id = allocate_outbox_id() + note = { + "@context": ap.AS_EXTENDED_CTX, + "type": "Note", + "id": outbox_object_id(vote_id), + "attributedTo": ID, + "name": name, + "to": to, + "cc": [], + "published": published, + "context": context, + "conversation": context, + "url": outbox_object_id(vote_id), + "inReplyTo": in_reply_to, + } + outbox_object = await save_outbox_object( + db_session, vote_id, note, is_transient=True + ) + if not outbox_object.id: + raise ValueError("Should never happen") + + recipients = await _compute_recipients(db_session, note) + for rcp in recipients: + await new_outgoing_activity(db_session, rcp, outbox_object.id) + + await db_session.commit() + return vote_id + + +async def send_update( + db_session: AsyncSession, + ap_id: str, + source: str, +) -> str: + outbox_object = await get_outbox_object_by_ap_id(db_session, ap_id) + if not outbox_object: + raise ValueError(f"{ap_id} not found") + + revisions = outbox_object.revisions or [] + revisions.append( + { + "ap_object": outbox_object.ap_object, + "source": outbox_object.source, + "updated": ( + outbox_object.ap_object.get("updated") + or outbox_object.ap_object.get("published") + ), + } + ) + + updated = now().replace(microsecond=0).isoformat().replace("+00:00", "Z") + content, tags, mentioned_actors = await markdownify(db_session, source) + + note = { + "@context": ap.AS_EXTENDED_CTX, + "type": outbox_object.ap_type, + "id": outbox_object.ap_id, + "attributedTo": ID, + "content": content, + "to": outbox_object.ap_object["to"], + "cc": outbox_object.ap_object["cc"], + "published": outbox_object.ap_object["published"], + "context": outbox_object.ap_context, + "conversation": outbox_object.ap_context, + "url": outbox_object.url, + "tag": tags, + "summary": outbox_object.summary, + "inReplyTo": outbox_object.in_reply_to, + "sensitive": outbox_object.sensitive, + "attachment": outbox_object.ap_object["attachment"], + "updated": updated, + } + + outbox_object.ap_object = note + outbox_object.source = source + outbox_object.revisions = revisions + + recipients = await _compute_recipients(db_session, note) + for rcp in recipients: + await new_outgoing_activity(db_session, rcp, outbox_object.id) + + # If the note is public, check if we need to send any webmentions + if outbox_object.visibility == ap.VisibilityEnum.PUBLIC: + + possible_targets = await opengraph.external_urls(db_session, outbox_object) + logger.info(f"webmentions possible targert {possible_targets}") + for target in possible_targets: + webmention_endpoint = await webmentions.discover_webmention_endpoint(target) + logger.info(f"{target=} {webmention_endpoint=}") + if webmention_endpoint: + await new_outgoing_activity( + db_session, + webmention_endpoint, + outbox_object_id=outbox_object.id, + webmention_target=target, + ) + + await db_session.commit() + return outbox_object.public_id # type: ignore + + +async def _compute_recipients( + db_session: AsyncSession, ap_object: ap.RawObject +) -> set[str]: + _recipients = [] + for field in ["to", "cc", "bto", "bcc"]: + if field in ap_object: + _recipients.extend(ap.as_list(ap_object[field])) + + recipients = set() + logger.info(f"{_recipients}") + for r in _recipients: + if r in [ap.AS_PUBLIC, ID]: + continue + + # If we got a local collection, assume it's a collection of actors + if r.startswith(BASE_URL): + for actor in await fetch_actor_collection(db_session, r): + recipients.add(actor.shared_inbox_url) + + continue + + # Is it a known actor? + known_actor = ( + await db_session.execute( + select(models.Actor).where(models.Actor.ap_id == r) + ) + ).scalar_one_or_none() # type: ignore + if known_actor: + recipients.add(known_actor.shared_inbox_url) + continue + + # Fetch the object + raw_object = await ap.fetch(r) + if raw_object.get("type") in ap.ACTOR_TYPES: + saved_actor = await save_actor(db_session, raw_object) + recipients.add(saved_actor.shared_inbox_url) + else: + # Assume it's a collection of actors + for raw_actor in await ap.parse_collection(payload=raw_object): + actor = RemoteActor(raw_actor) + recipients.add(actor.shared_inbox_url) + + return recipients + + +async def compute_all_known_recipients(db_session: AsyncSession) -> set[str]: + return { + actor.shared_inbox_url or actor.inbox_url + for actor in ( + await db_session.scalars( + select(models.Actor).where(models.Actor.is_deleted.is_(False)) + ) + ).all() + } + + +async def _get_following(db_session: AsyncSession) -> list[models.Following]: + return ( + ( + await db_session.scalars( + select(models.Following).options(joinedload(models.Following.actor)) + ) + ) + .unique() + .all() + ) + + +async def _get_followers(db_session: AsyncSession) -> list[models.Follower]: + return ( + ( + await db_session.scalars( + select(models.Follower).options(joinedload(models.Follower.actor)) + ) + ) + .unique() + .all() + ) + + +async def _get_followers_recipients( + db_session: AsyncSession, + skip_actors: list[models.Actor] | None = None, +) -> set[str]: + """Returns all the recipients from the local follower collection.""" + actor_ap_ids_to_skip = [] + if skip_actors: + actor_ap_ids_to_skip = [actor.ap_id for actor in skip_actors] + + followers = await _get_followers(db_session) + return { + follower.actor.shared_inbox_url # type: ignore + for follower in followers + if follower.actor.ap_id not in actor_ap_ids_to_skip + } + + +async def get_notification_by_id( + db_session: AsyncSession, notification_id: int +) -> models.Notification | None: + return ( + await db_session.execute( + select(models.Notification) + .where(models.Notification.id == notification_id) + .options( + joinedload(models.Notification.inbox_object).options( + joinedload(models.InboxObject.actor) + ), + ) + ) + ).scalar_one_or_none() # type: ignore + + +async def get_inbox_object_by_ap_id( + db_session: AsyncSession, ap_id: str +) -> models.InboxObject | None: + return ( + await db_session.execute( + select(models.InboxObject) + .where(models.InboxObject.ap_id == ap_id) + .options( + joinedload(models.InboxObject.actor), + joinedload(models.InboxObject.relates_to_inbox_object), + joinedload(models.InboxObject.relates_to_outbox_object), + ) + ) + ).scalar_one_or_none() # type: ignore + + +async def get_inbox_delete_for_activity_object_ap_id( + db_session: AsyncSession, activity_object_ap_id: str +) -> models.InboxObject | None: + return ( + await db_session.execute( + select(models.InboxObject) + .where( + models.InboxObject.ap_type == "Delete", + models.InboxObject.activity_object_ap_id == activity_object_ap_id, + ) + .options( + joinedload(models.InboxObject.actor), + joinedload(models.InboxObject.relates_to_inbox_object), + joinedload(models.InboxObject.relates_to_outbox_object), + ) + ) + ).scalar_one_or_none() # type: ignore + + +async def get_outbox_object_by_ap_id( + db_session: AsyncSession, ap_id: str +) -> models.OutboxObject | None: + return ( + ( + await db_session.execute( + select(models.OutboxObject) + .where(models.OutboxObject.ap_id == ap_id) + .options( + joinedload(models.OutboxObject.outbox_object_attachments).options( + joinedload(models.OutboxObjectAttachment.upload) + ), + joinedload(models.OutboxObject.relates_to_inbox_object).options( + joinedload(models.InboxObject.actor), + ), + joinedload(models.OutboxObject.relates_to_outbox_object).options( + joinedload( + models.OutboxObject.outbox_object_attachments + ).options(joinedload(models.OutboxObjectAttachment.upload)), + ), + ) + ) + ) + .unique() + .scalar_one_or_none() + ) # type: ignore + + +async def get_outbox_object_by_slug_and_short_id( + db_session: AsyncSession, + slug: str, + short_id: str, +) -> models.OutboxObject | None: + return ( + ( + await db_session.execute( + select(models.OutboxObject) + .options( + joinedload(models.OutboxObject.outbox_object_attachments).options( + joinedload(models.OutboxObjectAttachment.upload) + ) + ) + .where( + models.OutboxObject.public_id.like(f"{short_id}%"), + models.OutboxObject.slug == slug, + models.OutboxObject.is_deleted.is_(False), + ) + ) + ) + .unique() + .scalar_one_or_none() + ) + + +async def get_anybox_object_by_ap_id( + db_session: AsyncSession, ap_id: str +) -> AnyboxObject | None: + if ap_id.startswith(BASE_URL): + return await get_outbox_object_by_ap_id(db_session, ap_id) + else: + return await get_inbox_object_by_ap_id(db_session, ap_id) + + +async def get_webmention_by_id( + db_session: AsyncSession, webmention_id: int +) -> models.Webmention | None: + return ( + await db_session.execute( + select(models.Webmention) + .where(models.Webmention.id == webmention_id) + .options( + joinedload(models.Webmention.outbox_object), + ) + ) + ).scalar_one_or_none() # type: ignore + + +async def _handle_delete_activity( + db_session: AsyncSession, + from_actor: models.Actor, + delete_activity: models.InboxObject, + relates_to_inbox_object: models.InboxObject | None, + forwarded_by_actor: models.Actor | None, +) -> None: + ap_object_to_delete: models.InboxObject | models.Actor | None = None + if relates_to_inbox_object: + ap_object_to_delete = relates_to_inbox_object + elif delete_activity.activity_object_ap_id: + # If it's not a Delete for an inbox object, it may be related to + # an actor + try: + ap_object_to_delete = await fetch_actor( + db_session, + delete_activity.activity_object_ap_id, + save_if_not_found=False, + ) + except ap.ObjectNotFoundError: + pass + + if ap_object_to_delete is None or not ap_object_to_delete.is_from_db: + logger.info( + "Received Delete for an unknown object " + f"{delete_activity.activity_object_ap_id}" + ) + return + + if isinstance(ap_object_to_delete, models.InboxObject): + if from_actor.ap_id != ap_object_to_delete.actor.ap_id: + logger.warning( + "Actor mismatch between the activity and the object: " + f"{from_actor.ap_id}/{ap_object_to_delete.actor.ap_id}" + ) + return + + logger.info( + f"Deleting {ap_object_to_delete.ap_type}/{ap_object_to_delete.ap_id}" + ) + await _revert_side_effect_for_deleted_object( + db_session, + delete_activity, + ap_object_to_delete, + forwarded_by_actor, + ) + ap_object_to_delete.is_deleted = True + elif isinstance(ap_object_to_delete, models.Actor): + if from_actor.ap_id != ap_object_to_delete.ap_id: + logger.warning( + "Actor mismatch between the activity and the object: " + f"{from_actor.ap_id}/{ap_object_to_delete.ap_id}" + ) + return + + logger.info(f"Deleting actor {ap_object_to_delete.ap_id}") + follower = ( + await db_session.scalars( + select(models.Follower).where( + models.Follower.ap_actor_id == ap_object_to_delete.ap_id, + ) + ) + ).one_or_none() + if follower: + logger.info("Removing actor from follower") + await db_session.delete(follower) + + # Also mark Follow activities for this actor as deleted + follow_activities = ( + await db_session.scalars( + select(models.OutboxObject).where( + models.OutboxObject.ap_type == "Follow", + models.OutboxObject.relates_to_actor_id + == ap_object_to_delete.id, + models.OutboxObject.is_deleted.is_(False), + ) + ) + ).all() + for follow_activity in follow_activities: + logger.info( + f"Marking Follow activity {follow_activity.ap_id} as deleted" + ) + follow_activity.is_deleted = True + + following = ( + await db_session.scalars( + select(models.Following).where( + models.Following.ap_actor_id == ap_object_to_delete.ap_id, + ) + ) + ).one_or_none() + if following: + logger.info("Removing actor from following") + await db_session.delete(following) + + # Mark the actor as deleted + ap_object_to_delete.is_deleted = True + + inbox_objects = ( + await db_session.scalars( + select(models.InboxObject).where( + models.InboxObject.actor_id == ap_object_to_delete.id, + models.InboxObject.is_deleted.is_(False), + ) + ) + ).all() + logger.info(f"Deleting {len(inbox_objects)} objects") + for inbox_object in inbox_objects: + await _revert_side_effect_for_deleted_object( + db_session, + delete_activity, + inbox_object, + forwarded_by_actor=None, + ) + inbox_object.is_deleted = True + else: + raise ValueError("Should never happen") + + await db_session.flush() + + +async def _get_replies_count( + db_session: AsyncSession, + replied_object_ap_id: str, +) -> int: + return ( + await db_session.scalar( + select(func.count(models.InboxObject.id)).where( + func.json_extract(models.InboxObject.ap_object, "$.inReplyTo") + == replied_object_ap_id, + models.InboxObject.is_deleted.is_(False), + ) + ) + ) + ( + await db_session.scalar( + select(func.count(models.OutboxObject.id)).where( + func.json_extract(models.OutboxObject.ap_object, "$.inReplyTo") + == replied_object_ap_id, + models.OutboxObject.is_deleted.is_(False), + ) + ) + ) + + +async def _get_outbox_replies_count( + db_session: AsyncSession, + outbox_object: models.OutboxObject, +) -> int: + return (await _get_replies_count(db_session, outbox_object.ap_id)) + ( + await db_session.scalar( + select(func.count(models.Webmention.id)).where( + models.Webmention.is_deleted.is_(False), + models.Webmention.outbox_object_id == outbox_object.id, + models.Webmention.webmention_type == models.WebmentionType.REPLY, + ) + ) + ) + + +async def _get_outbox_likes_count( + db_session: AsyncSession, + outbox_object: models.OutboxObject, +) -> int: + return ( + await db_session.scalar( + select(func.count(models.InboxObject.id)).where( + models.InboxObject.ap_type == "Like", + models.InboxObject.relates_to_outbox_object_id == outbox_object.id, + models.InboxObject.is_deleted.is_(False), + ) + ) + ) + ( + await db_session.scalar( + select(func.count(models.Webmention.id)).where( + models.Webmention.is_deleted.is_(False), + models.Webmention.outbox_object_id == outbox_object.id, + models.Webmention.webmention_type == models.WebmentionType.LIKE, + ) + ) + ) + + +async def _get_outbox_announces_count( + db_session: AsyncSession, + outbox_object: models.OutboxObject, +) -> int: + return ( + await db_session.scalar( + select(func.count(models.InboxObject.id)).where( + models.InboxObject.ap_type == "Announce", + models.InboxObject.relates_to_outbox_object_id == outbox_object.id, + models.InboxObject.is_deleted.is_(False), + ) + ) + ) + ( + await db_session.scalar( + select(func.count(models.Webmention.id)).where( + models.Webmention.is_deleted.is_(False), + models.Webmention.outbox_object_id == outbox_object.id, + models.Webmention.webmention_type == models.WebmentionType.REPOST, + ) + ) + ) + + +async def _revert_side_effect_for_deleted_object( + db_session: AsyncSession, + delete_activity: models.InboxObject | None, + deleted_ap_object: models.InboxObject, + forwarded_by_actor: models.Actor | None, +) -> None: + is_delete_needs_to_be_forwarded = False + + # Delete related notifications + notif_deletion_result = await db_session.execute( + delete(models.Notification) + .where(models.Notification.inbox_object_id == deleted_ap_object.id) + .execution_options(synchronize_session=False) + ) + logger.info( + f"Deleted {notif_deletion_result.rowcount} notifications" # type: ignore + ) + + # Decrement/refresh the replies counter if needed + if deleted_ap_object.in_reply_to: + replied_object = await get_anybox_object_by_ap_id( + db_session, + deleted_ap_object.in_reply_to, + ) + if replied_object: + if replied_object.is_from_outbox: + # It's a local reply that was likely forwarded, the Delete + # also needs to be forwarded + is_delete_needs_to_be_forwarded = True + + new_replies_count = await _get_outbox_replies_count( + db_session, replied_object # type: ignore + ) + + await db_session.execute( + update(models.OutboxObject) + .where( + models.OutboxObject.id == replied_object.id, + ) + .values(replies_count=new_replies_count - 1) + ) + else: + new_replies_count = await _get_replies_count( + db_session, replied_object.ap_id + ) + + await db_session.execute( + update(models.InboxObject) + .where( + models.InboxObject.id == replied_object.id, + ) + .values(replies_count=new_replies_count - 1) + ) + + if deleted_ap_object.ap_type == "Like" and deleted_ap_object.activity_object_ap_id: + related_object = await get_outbox_object_by_ap_id( + db_session, + deleted_ap_object.activity_object_ap_id, + ) + if related_object: + if related_object.is_from_outbox: + likes_count = await _get_outbox_likes_count(db_session, related_object) + await db_session.execute( + update(models.OutboxObject) + .where( + models.OutboxObject.id == related_object.id, + ) + .values(likes_count=likes_count - 1) + ) + elif ( + deleted_ap_object.ap_type == "Announce" + and deleted_ap_object.activity_object_ap_id + ): + related_object = await get_outbox_object_by_ap_id( + db_session, + deleted_ap_object.activity_object_ap_id, + ) + if related_object: + if related_object.is_from_outbox: + announces_count = await _get_outbox_announces_count( + db_session, related_object + ) + await db_session.execute( + update(models.OutboxObject) + .where( + models.OutboxObject.id == related_object.id, + ) + .values(announces_count=announces_count - 1) + ) + + # Delete any Like/Announce + await db_session.execute( + update(models.OutboxObject) + .where( + models.OutboxObject.activity_object_ap_id == deleted_ap_object.ap_id, + ) + .values(is_deleted=True) + ) + + # If it's a local replies, it was forwarded, so we also need to forward + # the Delete activity if possible + if ( + delete_activity + and delete_activity.activity_object_ap_id == deleted_ap_object.ap_id + and delete_activity.has_ld_signature + and is_delete_needs_to_be_forwarded + ): + logger.info("Forwarding Delete activity as it's a local reply") + + # Don't forward to the forwarding actor and the original Delete actor + skip_actors = [delete_activity.actor] + if forwarded_by_actor: + skip_actors.append(forwarded_by_actor) + recipients = await _get_followers_recipients( + db_session, + skip_actors=skip_actors, + ) + for rcp in recipients: + await new_outgoing_activity( + db_session, + rcp, + outbox_object_id=None, + inbox_object_id=delete_activity.id, + ) + + +async def _handle_follow_follow_activity( + db_session: AsyncSession, + from_actor: models.Actor, + follow_activity: models.InboxObject, +) -> None: + if follow_activity.activity_object_ap_id != LOCAL_ACTOR.ap_id: + logger.warning( + f"Dropping Follow activity for {follow_activity.activity_object_ap_id}" + ) + await db_session.delete(follow_activity) + return + + if MANUALLY_APPROVES_FOLLOWERS: + notif = models.Notification( + notification_type=models.NotificationType.PENDING_INCOMING_FOLLOWER, + actor_id=from_actor.id, + inbox_object_id=follow_activity.id, + ) + db_session.add(notif) + return None + + await _send_accept(db_session, from_actor, follow_activity) + + +async def _get_incoming_follow_from_notification_id( + db_session: AsyncSession, + notification_id: int, +) -> tuple[models.Notification, models.InboxObject]: + notif = await get_notification_by_id(db_session, notification_id) + if notif is None: + raise ValueError(f"Notification {notification_id=} not found") + + if notif.inbox_object is None: + raise ValueError("Should never happen") + + if ap_type := notif.inbox_object.ap_type != "Follow": + raise ValueError(f"Unexpected {ap_type=}") + + return notif, notif.inbox_object + + +async def send_accept( + db_session: AsyncSession, + notification_id: int, +) -> None: + notif, incoming_follow_request = await _get_incoming_follow_from_notification_id( + db_session, notification_id + ) + + await _send_accept( + db_session, incoming_follow_request.actor, incoming_follow_request + ) + notif.is_accepted = True + + await db_session.commit() + + +async def _send_accept( + db_session: AsyncSession, + from_actor: models.Actor, + inbox_object: models.InboxObject, +) -> None: + + follower = models.Follower( + actor_id=from_actor.id, + inbox_object_id=inbox_object.id, + ap_actor_id=from_actor.ap_id, + ) + try: + db_session.add(follower) + await db_session.flush() + except IntegrityError: + pass # TODO update the existing followe + + # Reply with an Accept + reply_id = allocate_outbox_id() + reply = { + "@context": ap.AS_CTX, + "id": outbox_object_id(reply_id), + "type": "Accept", + "actor": ID, + "object": inbox_object.ap_id, + } + outbox_activity = await save_outbox_object( + db_session, reply_id, reply, relates_to_inbox_object_id=inbox_object.id + ) + if not outbox_activity.id: + raise ValueError("Should never happen") + await new_outgoing_activity(db_session, from_actor.inbox_url, outbox_activity.id) + + if is_notification_enabled(models.NotificationType.NEW_FOLLOWER): + notif = models.Notification( + notification_type=models.NotificationType.NEW_FOLLOWER, + actor_id=from_actor.id, + ) + db_session.add(notif) + + +async def send_reject( + db_session: AsyncSession, + notification_id: int, +) -> None: + notif, incoming_follow_request = await _get_incoming_follow_from_notification_id( + db_session, notification_id + ) + + await _send_reject( + db_session, incoming_follow_request.actor, incoming_follow_request + ) + notif.is_rejected = True + await db_session.commit() + + +async def _send_reject( + db_session: AsyncSession, + from_actor: models.Actor, + inbox_object: models.InboxObject, +) -> None: + # Reply with an Accept + reply_id = allocate_outbox_id() + reply = { + "@context": ap.AS_CTX, + "id": outbox_object_id(reply_id), + "type": "Reject", + "actor": ID, + "object": inbox_object.ap_id, + } + outbox_activity = await save_outbox_object( + db_session, reply_id, reply, relates_to_inbox_object_id=inbox_object.id + ) + if not outbox_activity.id: + raise ValueError("Should never happen") + await new_outgoing_activity(db_session, from_actor.inbox_url, outbox_activity.id) + + if is_notification_enabled(models.NotificationType.REJECTED_FOLLOWER): + notif = models.Notification( + notification_type=models.NotificationType.REJECTED_FOLLOWER, + actor_id=from_actor.id, + ) + db_session.add(notif) + + +async def _handle_undo_activity( + db_session: AsyncSession, + from_actor: models.Actor, + undo_activity: models.InboxObject, + ap_activity_to_undo: models.InboxObject, +) -> None: + if from_actor.ap_id != ap_activity_to_undo.actor.ap_id: + logger.warning( + "Actor mismatch between the activity and the object: " + f"{from_actor.ap_id}/{ap_activity_to_undo.actor.ap_id}" + ) + return + + ap_activity_to_undo.undone_by_inbox_object_id = undo_activity.id + ap_activity_to_undo.is_deleted = True + + if ap_activity_to_undo.ap_type == "Follow": + logger.info(f"Undo follow from {from_actor.ap_id}") + await db_session.execute( + delete(models.Follower).where( + models.Follower.inbox_object_id == ap_activity_to_undo.id + ) + ) + if is_notification_enabled(models.NotificationType.UNFOLLOW): + notif = models.Notification( + notification_type=models.NotificationType.UNFOLLOW, + actor_id=from_actor.id, + ) + db_session.add(notif) + + elif ap_activity_to_undo.ap_type == "Like": + if not ap_activity_to_undo.activity_object_ap_id: + raise ValueError("Like without object") + liked_obj = await get_outbox_object_by_ap_id( + db_session, + ap_activity_to_undo.activity_object_ap_id, + ) + if not liked_obj: + logger.warning( + "Cannot find liked object: " + f"{ap_activity_to_undo.activity_object_ap_id}" + ) + return + + liked_obj.likes_count = ( + await _get_outbox_likes_count( + db_session, + liked_obj, + ) + - 1 + ) + if is_notification_enabled(models.NotificationType.UNDO_LIKE): + notif = models.Notification( + notification_type=models.NotificationType.UNDO_LIKE, + actor_id=from_actor.id, + outbox_object_id=liked_obj.id, + inbox_object_id=ap_activity_to_undo.id, + ) + db_session.add(notif) + + elif ap_activity_to_undo.ap_type == "Announce": + if not ap_activity_to_undo.activity_object_ap_id: + raise ValueError("Announce witout object") + announced_obj_ap_id = ap_activity_to_undo.activity_object_ap_id + logger.info( + f"Undo for announce {ap_activity_to_undo.ap_id}/{announced_obj_ap_id}" + ) + if announced_obj_ap_id.startswith(BASE_URL): + announced_obj_from_outbox = await get_outbox_object_by_ap_id( + db_session, announced_obj_ap_id + ) + if announced_obj_from_outbox: + logger.info("Found in the oubox") + announced_obj_from_outbox.announces_count = ( + models.OutboxObject.announces_count - 1 + ) + if is_notification_enabled(models.NotificationType.UNDO_ANNOUNCE): + notif = models.Notification( + notification_type=models.NotificationType.UNDO_ANNOUNCE, + actor_id=from_actor.id, + outbox_object_id=announced_obj_from_outbox.id, + inbox_object_id=ap_activity_to_undo.id, + ) + db_session.add(notif) + elif ap_activity_to_undo.ap_type == "Block": + if is_notification_enabled(models.NotificationType.UNBLOCKED): + notif = models.Notification( + notification_type=models.NotificationType.UNBLOCKED, + actor_id=from_actor.id, + inbox_object_id=ap_activity_to_undo.id, + ) + db_session.add(notif) + else: + logger.warning(f"Don't know how to undo {ap_activity_to_undo.ap_type} activity") + + # commit will be perfomed in save_to_inbox + + +async def _handle_move_activity( + db_session: AsyncSession, + from_actor: models.Actor, + move_activity: models.InboxObject, +) -> None: + logger.info("Processing Move activity") + + # Ensure the object matches the actor + old_actor_id = ap.get_object_id(move_activity.ap_object) + if old_actor_id != from_actor.ap_id: + logger.warning( + f"Object does not match the actor: {old_actor_id}/{from_actor.ap_id}" + ) + return None + + # Fetch the target account + target = move_activity.ap_object.get("target") + if not target: + logger.warning("Missing target") + return None + + new_actor_id = ap.get_id(target) + new_actor = await fetch_actor(db_session, new_actor_id) + + logger.info(f"Moving {old_actor_id} to {new_actor_id}") + + # Ensure the target account references the old account + if old_actor_id not in (aks := new_actor.ap_actor.get("alsoKnownAs", [])): + logger.warning( + f"New account does not have have an alias for the old account: {aks}" + ) + return None + + # Unfollow the old account + following = ( + await db_session.execute( + select(models.Following) + .where(models.Following.ap_actor_id == old_actor_id) + .options(joinedload(models.Following.outbox_object)) + ) + ).scalar_one_or_none() + if not following: + logger.warning("Not following the Move actor") + return + + await _send_undo(db_session, following.outbox_object.ap_id) + + # Follow the new one + if not ( + await db_session.execute( + select(models.Following).where(models.Following.ap_actor_id == new_actor_id) + ) + ).scalar(): + await _send_follow(db_session, new_actor_id) + else: + logger.info(f"Already following target {new_actor_id}") + + if is_notification_enabled(models.NotificationType.MOVE): + notif = models.Notification( + notification_type=models.NotificationType.MOVE, + actor_id=new_actor.id, + inbox_object_id=move_activity.id, + ) + db_session.add(notif) + + +async def _handle_update_activity( + db_session: AsyncSession, + from_actor: models.Actor, + update_activity: models.InboxObject, +) -> None: + logger.info("Processing Update activity") + wrapped_object = await ap.get_object(update_activity.ap_object) + if wrapped_object["type"] in ap.ACTOR_TYPES: + logger.info("Updating actor") + + updated_actor = RemoteActor(wrapped_object) + if ( + from_actor.ap_id != updated_actor.ap_id + or ap.as_list(from_actor.ap_type)[0] not in ap.ACTOR_TYPES + or ap.as_list(updated_actor.ap_type)[0] not in ap.ACTOR_TYPES + or from_actor.handle != updated_actor.handle + ): + raise ValueError( + f"Invalid Update activity {from_actor.ap_actor}/" + f"{updated_actor.ap_actor}" + ) + + # Update the actor + await update_actor_if_needed(db_session, from_actor, updated_actor) + elif (ap_type := wrapped_object["type"]) in [ + "Question", + "Note", + "Article", + "Page", + "Video", + ]: + logger.info(f"Updating {ap_type}") + existing_object = await get_inbox_object_by_ap_id( + db_session, wrapped_object["id"] + ) + if not existing_object: + logger.info(f"{ap_type} not found in the inbox") + elif existing_object.actor.ap_id != from_actor.ap_id: + logger.warning( + f"Update actor does not match the {ap_type} actor {from_actor.ap_id}" + f"/{existing_object.actor.ap_id}" + ) + else: + # Everything looks correct, update the object in the inbox + logger.info(f"Updating {existing_object.ap_id}") + existing_object.ap_object = wrapped_object + existing_object.updated_at = now() + else: + # TODO(ts): support updating objects + logger.info(f'Cannot update {wrapped_object["type"]}') + + +async def _handle_create_activity( + db_session: AsyncSession, + from_actor: models.Actor, + create_activity: models.InboxObject, + forwarded_by_actor: models.Actor | None = None, + relates_to_inbox_object: models.InboxObject | None = None, +) -> None: + logger.info("Processing Create activity") + + # Some PeerTube activities make no sense to process + if ( + ap_object_type := ap.as_list( + (await ap.get_object(create_activity.ap_object))["type"] + )[0] + ) in ["CacheFile"]: + logger.info(f"Dropping Create activity for {ap_object_type} object") + await db_session.delete(create_activity) + return None + + if relates_to_inbox_object: + logger.warning(f"{relates_to_inbox_object.ap_id} is already in the inbox") + return None + + wrapped_object = ap.unwrap_activity(create_activity.ap_object) + if create_activity.actor.ap_id != ap.get_actor_id(wrapped_object): + raise ValueError("Object actor does not match activity") + + ro = RemoteObject(wrapped_object, actor=from_actor) + + # Check if we already received a delete for this object (happens often + # with forwarded replies) + delete_object = await get_inbox_delete_for_activity_object_ap_id( + db_session, + ro.ap_id, + ) + if delete_object: + if delete_object.actor.ap_id != from_actor.ap_id: + logger.warning( + f"Got a Delete for {ro.ap_id} from {delete_object.actor.ap_id}??" + ) + return None + else: + logger.info("Already received a Delete for this object, deleting activity") + create_activity.is_deleted = True + await db_session.flush() + return None + + await _process_note_object( + db_session, + create_activity, + from_actor, + ro, + forwarded_by_actor=forwarded_by_actor, + ) + + +async def _handle_read_activity( + db_session: AsyncSession, + from_actor: models.Actor, + read_activity: models.InboxObject, +) -> None: + logger.info("Processing Read activity") + + # Honk uses Read activity to propagate replies, fetch the read object + # from the remote server + wrapped_object = await ap.fetch(ap.get_id(read_activity.ap_object["object"])) + + wrapped_object_actor = await fetch_actor( + db_session, ap.get_actor_id(wrapped_object) + ) + if not wrapped_object_actor.is_blocked: + ro = RemoteObject(wrapped_object, actor=wrapped_object_actor) + + # Check if we already know about this object + if await get_inbox_object_by_ap_id( + db_session, + ro.ap_id, + ): + logger.info(f"{ro.ap_id} is already in the inbox, skipping processing") + return None + + # Then process it likes it's coming from a forwarded activity + await _process_note_object(db_session, read_activity, wrapped_object_actor, ro) + + +async def _process_note_object( + db_session: AsyncSession, + parent_activity: models.InboxObject, + from_actor: models.Actor, + ro: RemoteObject, + forwarded_by_actor: models.Actor | None = None, +) -> None: + if parent_activity.ap_type not in ["Create", "Read"]: + raise ValueError(f"Unexpected parent activity {parent_activity.ap_id}") + + ap_published_at = now() + if "published" in ro.ap_object: + ap_published_at = parse_isoformat(ro.ap_object["published"]) + + following = await _get_following(db_session) + + is_from_following = ro.actor.ap_id in {f.ap_actor_id for f in following} + is_reply = bool(ro.in_reply_to) + is_local_reply = ro.is_local_reply + is_mention = False + hashtags = [] + tags = ro.ap_object.get("tag", []) + for tag in ap.as_list(tags): + if tag.get("name") == LOCAL_ACTOR.handle or tag.get("href") == LOCAL_ACTOR.url: + is_mention = True + if tag.get("type") == "Hashtag": + if tag_name := tag.get("name"): + hashtags.append(tag_name) + + object_info = ObjectInfo( + is_reply=is_reply, + is_local_reply=is_local_reply, + is_mention=is_mention, + is_from_following=is_from_following, + hashtags=hashtags, + actor_handle=ro.actor.handle, + remote_object=ro, + ) + + inbox_object = models.InboxObject( + server=urlparse(ro.ap_id).hostname, + actor_id=from_actor.id, + ap_actor_id=from_actor.ap_id, + ap_type=ro.ap_type, + ap_id=ro.ap_id, + ap_context=ro.ap_context, + conversation=await fetch_conversation_root(db_session, ro), + ap_published_at=ap_published_at, + ap_object=ro.ap_object, + visibility=ro.visibility, + relates_to_inbox_object_id=parent_activity.id, + relates_to_outbox_object_id=None, + activity_object_ap_id=ro.activity_object_ap_id, + og_meta=await opengraph.og_meta_from_note(db_session, ro), + # Hide replies from the stream + is_hidden_from_stream=not stream_visibility_callback(object_info), + # We may already have some replies in DB + replies_count=await _get_replies_count(db_session, ro.ap_id), + ) + + db_session.add(inbox_object) + await db_session.flush() + await db_session.refresh(inbox_object) + + parent_activity.relates_to_inbox_object_id = inbox_object.id + + if inbox_object.in_reply_to: + replied_object = await get_anybox_object_by_ap_id( + db_session, inbox_object.in_reply_to + ) + if replied_object: + if replied_object.is_from_outbox: + if replied_object.ap_type == "Question" and inbox_object.ap_object.get( + "name" + ): + await _handle_vote_answer( + db_session, + inbox_object, + replied_object, # type: ignore # outbox check below + ) + else: + new_replies_count = await _get_outbox_replies_count( + db_session, replied_object # type: ignore + ) + + await db_session.execute( + update(models.OutboxObject) + .where( + models.OutboxObject.id == replied_object.id, + ) + .values(replies_count=new_replies_count) + ) + else: + new_replies_count = await _get_replies_count( + db_session, replied_object.ap_id + ) + + await db_session.execute( + update(models.InboxObject) + .where( + models.InboxObject.id == replied_object.id, + ) + .values(replies_count=new_replies_count) + ) + + # This object is a reply of a local object, we may need to forward it + # to our followers (we can only forward JSON-LD signed activities) + if ( + parent_activity.ap_type == "Create" + and replied_object + and replied_object.is_from_outbox + and replied_object.ap_type != "Question" + and parent_activity.has_ld_signature + ): + logger.info("Forwarding Create activity as it's a local reply") + skip_actors = [parent_activity.actor] + if forwarded_by_actor: + skip_actors.append(forwarded_by_actor) + recipients = await _get_followers_recipients( + db_session, + skip_actors=skip_actors, + ) + for rcp in recipients: + await new_outgoing_activity( + db_session, + rcp, + outbox_object_id=None, + inbox_object_id=parent_activity.id, + ) + + if is_mention and is_notification_enabled(models.NotificationType.MENTION): + notif = models.Notification( + notification_type=models.NotificationType.MENTION, + actor_id=from_actor.id, + inbox_object_id=inbox_object.id, + ) + db_session.add(notif) + + +async def _handle_vote_answer( + db_session: AsyncSession, + answer: models.InboxObject, + question: models.OutboxObject, +) -> None: + logger.info(f"Processing poll answer for {question.ap_id}: {answer.ap_id}") + + if question.is_poll_ended: + logger.warning("Poll is ended, discarding answer") + return + + if not question.poll_items: + raise ValueError("Should never happen") + + answer_name = answer.ap_object["name"] + if answer_name not in {pi["name"] for pi in question.poll_items}: + logger.warning(f"Invalid answer {answer_name=}") + return + + answer.is_transient = True + poll_answer = models.PollAnswer( + outbox_object_id=question.id, + poll_type="oneOf" if question.is_one_of_poll else "anyOf", + inbox_object_id=answer.id, + actor_id=answer.actor.id, + name=answer_name, + ) + db_session.add(poll_answer) + await db_session.flush() + + voters_count = await db_session.scalar( + select(func.count(func.distinct(models.PollAnswer.actor_id))).where( + models.PollAnswer.outbox_object_id == question.id + ) + ) + + all_answers = await db_session.execute( + select( + func.count(models.PollAnswer.name).label("answer_count"), + models.PollAnswer.name, + ) + .where(models.PollAnswer.outbox_object_id == question.id) + .group_by(models.PollAnswer.name) + ) + all_answers_count = {a["name"]: a["answer_count"] for a in all_answers} + + logger.info(f"{voters_count=}") + logger.info(f"{all_answers_count=}") + + question_ap_object = dict(question.ap_object) + question_ap_object["votersCount"] = voters_count + items_key = "oneOf" if question.is_one_of_poll else "anyOf" + question_ap_object[items_key] = [ + { + "type": "Note", + "name": item["name"], + "replies": { + "type": "Collection", + "totalItems": all_answers_count.get(item["name"], 0), + }, + } + for item in question.poll_items + ] + updated = now().replace(microsecond=0).isoformat().replace("+00:00", "Z") + question_ap_object["updated"] = updated + question.ap_object = question_ap_object + + logger.info(f"Updated question: {question.ap_object}") + + await db_session.flush() + + # Finally send an update + recipients = await _compute_recipients(db_session, question.ap_object) + for rcp in recipients: + await new_outgoing_activity(db_session, rcp, question.id) + + +async def _handle_announce_activity( + db_session: AsyncSession, + actor: models.Actor, + announce_activity: models.InboxObject, + relates_to_outbox_object: models.OutboxObject | None, + relates_to_inbox_object: models.InboxObject | None, +): + if relates_to_outbox_object: + # This is an announce for a local object + relates_to_outbox_object.announces_count = ( + models.OutboxObject.announces_count + 1 + ) + + if is_notification_enabled(models.NotificationType.ANNOUNCE): + notif = models.Notification( + notification_type=models.NotificationType.ANNOUNCE, + actor_id=actor.id, + outbox_object_id=relates_to_outbox_object.id, + inbox_object_id=announce_activity.id, + ) + db_session.add(notif) + else: + # Only show the announce in the stream if it comes from an actor + # in the following collection + followings = await _get_following(db_session) + is_from_following = announce_activity.actor.ap_id in { + f.ap_actor_id for f in followings + } + + # This is announce for a maybe unknown object + if relates_to_inbox_object: + # We already know about this object, show the announce in the + # stream if it's not already there, from an followed actor + # and if we haven't seen it recently + skip_delta = timedelta(hours=1) + delta_from_original = now() - as_utc( + relates_to_inbox_object.ap_published_at # type: ignore + ) + dup_count = 0 + if ( + not relates_to_inbox_object.is_hidden_from_stream + and delta_from_original < skip_delta + ) or ( + dup_count := ( + await db_session.scalar( + select(func.count(models.InboxObject.id)).where( + models.InboxObject.ap_type == "Announce", + models.InboxObject.ap_published_at > now() - skip_delta, + models.InboxObject.relates_to_inbox_object_id + == relates_to_inbox_object.id, + models.InboxObject.is_hidden_from_stream.is_(False), + ) + ) + ) + ) > 0: + logger.info(f"Deduping Announce {delta_from_original=}/{dup_count=}") + announce_activity.is_hidden_from_stream = True + else: + announce_activity.is_hidden_from_stream = not is_from_following + + else: + # Save it as an inbox object + if not announce_activity.activity_object_ap_id: + raise ValueError("Should never happen") + announced_raw_object = await ap.fetch( + announce_activity.activity_object_ap_id + ) + + # Some software return objects wrapped in a Create activity (like + # python-federation) + if ap.as_list(announced_raw_object["type"])[0] == "Create": + announced_raw_object = await ap.get_object(announced_raw_object) + + announced_actor = await fetch_actor( + db_session, ap.get_actor_id(announced_raw_object) + ) + if not announced_actor.is_blocked: + announced_object = RemoteObject(announced_raw_object, announced_actor) + announced_inbox_object = models.InboxObject( + server=urlparse(announced_object.ap_id).hostname, + actor_id=announced_actor.id, + ap_actor_id=announced_actor.ap_id, + ap_type=announced_object.ap_type, + ap_id=announced_object.ap_id, + ap_context=announced_object.ap_context, + ap_published_at=announced_object.ap_published_at, + ap_object=announced_object.ap_object, + visibility=announced_object.visibility, + og_meta=await opengraph.og_meta_from_note( + db_session, announced_object + ), + is_hidden_from_stream=True, + ) + db_session.add(announced_inbox_object) + await db_session.flush() + announce_activity.relates_to_inbox_object_id = announced_inbox_object.id + announce_activity.is_hidden_from_stream = ( + not is_from_following + or announce_activity.actor.are_announces_hidden_from_stream + ) + + +async def _handle_like_activity( + db_session: AsyncSession, + actor: models.Actor, + like_activity: models.InboxObject, + relates_to_outbox_object: models.OutboxObject | None, + relates_to_inbox_object: models.InboxObject | None, +): + if not relates_to_outbox_object: + logger.info( + "Received a like for an unknown activity: " + f"{like_activity.activity_object_ap_id}, deleting the activity" + ) + await db_session.delete(like_activity) + else: + relates_to_outbox_object.likes_count = await _get_outbox_likes_count( + db_session, + relates_to_outbox_object, + ) + + if is_notification_enabled(models.NotificationType.LIKE): + notif = models.Notification( + notification_type=models.NotificationType.LIKE, + actor_id=actor.id, + outbox_object_id=relates_to_outbox_object.id, + inbox_object_id=like_activity.id, + ) + db_session.add(notif) + + +async def _handle_block_activity( + db_session: AsyncSession, + actor: models.Actor, + block_activity: models.InboxObject, +): + if block_activity.activity_object_ap_id != LOCAL_ACTOR.ap_id: + logger.warning( + "Received invalid Block activity " + f"{block_activity.activity_object_ap_id=}" + ) + await db_session.delete(block_activity) + return + + # Create a notification + if is_notification_enabled(models.NotificationType.BLOCKED): + notif = models.Notification( + notification_type=models.NotificationType.BLOCKED, + actor_id=actor.id, + inbox_object_id=block_activity.id, + ) + db_session.add(notif) + + +async def _process_transient_object( + db_session: AsyncSession, + raw_object: ap.RawObject, + from_actor: models.Actor, +) -> None: + # TODO: track featured/pinned objects for actors + ap_type = raw_object["type"] + if ap_type in ["Add", "Remove"]: + logger.info(f"Dropping unsupported {ap_type} object") + else: + # FIXME(ts): handle transient create + logger.warning(f"Received unknown {ap_type} object") + + return None + + +async def save_to_inbox( + db_session: AsyncSession, + raw_object: ap.RawObject, + sent_by_ap_actor_id: str, +) -> None: + # Special case for server sending the actor as a payload (like python-federation) + if ap.as_list(raw_object["type"])[0] in ap.ACTOR_TYPES: + if ap.get_id(raw_object) == sent_by_ap_actor_id: + updated_actor = RemoteActor(raw_object) + + try: + actor = await fetch_actor(db_session, sent_by_ap_actor_id) + except ap.ObjectNotFoundError: + logger.warning("Actor not found") + return + + # Update the actor + actor.ap_actor = updated_actor.ap_actor + await db_session.commit() + return + + else: + logger.warning( + f"Reveived an actor payload {raw_object} from " f"{sent_by_ap_actor_id}" + ) + return + + try: + actor = await fetch_actor(db_session, ap.get_id(raw_object["actor"])) + except ap.ObjectNotFoundError: + logger.warning("Actor not found") + return + except ap.FetchError: + logger.exception("Failed to fetch actor") + return + + if is_hostname_blocked(actor.server): + logger.warning(f"Server {actor.server} is blocked") + return + + if "id" not in raw_object or not raw_object["id"]: + await _process_transient_object(db_session, raw_object, actor) + return None + + # If we just blocked an actor, we want to process any undo sent as side + # effects + if actor.is_blocked and ap.as_list(raw_object["type"])[0] != "Undo": + logger.warning(f"Actor {actor.ap_id} is blocked, ignoring object") + return None + + raw_object_id = ap.get_id(raw_object) + forwarded_by_actor = None + + # Ensure forwarded activities have a valid LD sig + if sent_by_ap_actor_id != actor.ap_id: + logger.info( + f"Processing a forwarded activity {sent_by_ap_actor_id=}/{actor.ap_id}" + ) + forwarded_by_actor = await fetch_actor(db_session, sent_by_ap_actor_id) + + is_sig_verified = False + try: + is_sig_verified = await ldsig.verify_signature(db_session, raw_object) + except Exception: + logger.exception("Failed to verify LD sig") + + if not is_sig_verified: + logger.warning( + f"Failed to verify LD sig, fetching remote object {raw_object_id}" + ) + + # Try to fetch the remote object since we failed to verify the LD sig + try: + raw_object = await ap.fetch(raw_object_id) + except Exception: + raise fastapi.HTTPException(status_code=401, detail="Invalid LD sig") + + # Transient activities from Mastodon like Like are not fetchable and + # will return the actor instead + if raw_object["id"] != raw_object_id: + logger.info(f"Unable to fetch {raw_object_id}") + return None + + if ( + await db_session.scalar( + select(func.count(models.InboxObject.id)).where( + models.InboxObject.ap_id == raw_object_id + ) + ) + > 0 + ): + logger.info( + f'Received duplicate {raw_object["type"]} activity: {raw_object_id}' + ) + return + + ap_published_at = now() + if "published" in raw_object: + ap_published_at = parse_isoformat(raw_object["published"]) + + activity_ro = RemoteObject(raw_object, actor=actor) + + relates_to_inbox_object: models.InboxObject | None = None + relates_to_outbox_object: models.OutboxObject | None = None + if activity_ro.activity_object_ap_id: + if activity_ro.activity_object_ap_id.startswith(BASE_URL): + relates_to_outbox_object = await get_outbox_object_by_ap_id( + db_session, + activity_ro.activity_object_ap_id, + ) + else: + relates_to_inbox_object = await get_inbox_object_by_ap_id( + db_session, + activity_ro.activity_object_ap_id, + ) + + inbox_object = models.InboxObject( + server=urlparse(activity_ro.ap_id).hostname, + actor_id=actor.id, + ap_actor_id=actor.ap_id, + ap_type=activity_ro.ap_type, + ap_id=activity_ro.ap_id, + ap_context=activity_ro.ap_context, + ap_published_at=ap_published_at, + ap_object=activity_ro.ap_object, + visibility=activity_ro.visibility, + relates_to_inbox_object_id=relates_to_inbox_object.id + if relates_to_inbox_object + else None, + relates_to_outbox_object_id=relates_to_outbox_object.id + if relates_to_outbox_object + else None, + activity_object_ap_id=activity_ro.activity_object_ap_id, + is_hidden_from_stream=True, + ) + + db_session.add(inbox_object) + await db_session.flush() + await db_session.refresh(inbox_object) + + if activity_ro.ap_type == "Create": + await _handle_create_activity( + db_session, + actor, + inbox_object, + forwarded_by_actor=forwarded_by_actor, + relates_to_inbox_object=relates_to_inbox_object, + ) + elif activity_ro.ap_type == "Read": + await _handle_read_activity(db_session, actor, inbox_object) + elif activity_ro.ap_type == "Update": + await _handle_update_activity(db_session, actor, inbox_object) + elif activity_ro.ap_type == "Move": + await _handle_move_activity(db_session, actor, inbox_object) + elif activity_ro.ap_type == "Delete": + await _handle_delete_activity( + db_session, + actor, + inbox_object, + relates_to_inbox_object, + forwarded_by_actor=forwarded_by_actor, + ) + elif activity_ro.ap_type == "Follow": + await _handle_follow_follow_activity(db_session, actor, inbox_object) + elif activity_ro.ap_type == "Undo": + if relates_to_inbox_object: + await _handle_undo_activity( + db_session, actor, inbox_object, relates_to_inbox_object + ) + else: + logger.info("Received Undo for an unknown activity") + elif activity_ro.ap_type in ["Accept", "Reject"]: + if not relates_to_outbox_object: + logger.info( + f"Received {raw_object['type']} for an unknown activity: " + f"{activity_ro.activity_object_ap_id}" + ) + else: + if relates_to_outbox_object.ap_type == "Follow": + notif_type = ( + models.NotificationType.FOLLOW_REQUEST_ACCEPTED + if activity_ro.ap_type == "Accept" + else models.NotificationType.FOLLOW_REQUEST_REJECTED + ) + if is_notification_enabled(notif_type): + notif = models.Notification( + notification_type=notif_type, + actor_id=actor.id, + inbox_object_id=inbox_object.id, + ) + db_session.add(notif) + + if activity_ro.ap_type == "Accept": + following = models.Following( + actor_id=actor.id, + outbox_object_id=relates_to_outbox_object.id, + ap_actor_id=actor.ap_id, + ) + db_session.add(following) + + # Pre-fetch the latest activities + try: + await _prefetch_actor_outbox(db_session, actor) + except Exception: + logger.exception(f"Failed to prefetch outbox for {actor.ap_id}") + elif activity_ro.ap_type == "Reject": + maybe_following = ( + await db_session.scalars( + select(models.Following).where( + models.Following.ap_actor_id == actor.ap_id, + ) + ) + ).one_or_none() + if maybe_following: + logger.info("Removing actor from following") + await db_session.delete(maybe_following) + + else: + logger.info( + "Received an Accept for an unsupported activity: " + f"{relates_to_outbox_object.ap_type}" + ) + elif activity_ro.ap_type == "EmojiReact": + if not relates_to_outbox_object: + logger.info( + "Received a reaction for an unknown activity: " + f"{activity_ro.activity_object_ap_id}" + ) + await db_session.delete(inbox_object) + else: + # TODO(ts): support reactions + pass + elif activity_ro.ap_type == "Like": + await _handle_like_activity( + db_session, + actor, + inbox_object, + relates_to_outbox_object, + relates_to_inbox_object, + ) + elif activity_ro.ap_type == "Announce": + await _handle_announce_activity( + db_session, + actor, + inbox_object, + relates_to_outbox_object, + relates_to_inbox_object, + ) + elif activity_ro.ap_type == "View": + # View is used by Peertube, there's nothing useful we can do with it + await db_session.delete(inbox_object) + elif activity_ro.ap_type == "Block": + await _handle_block_activity( + db_session, + actor, + inbox_object, + ) + else: + logger.warning(f"Received an unknown {inbox_object.ap_type} object") + + await db_session.commit() + + +async def _prefetch_actor_outbox( + db_session: AsyncSession, + actor: models.Actor, +) -> None: + """Try to fetch some notes to fill the stream""" + saved = 0 + outbox = await ap.parse_collection(actor.outbox_url, limit=20) + for activity in outbox[:20]: + activity_id = ap.get_id(activity) + raw_activity = await ap.fetch(activity_id) + if ap.as_list(raw_activity["type"])[0] == "Create": + obj = await ap.get_object(raw_activity) + saved_inbox_object = await get_inbox_object_by_ap_id( + db_session, ap.get_id(obj) + ) + if not saved_inbox_object: + saved_inbox_object = await save_object_to_inbox(db_session, obj) + + if not saved_inbox_object.in_reply_to: + saved_inbox_object.is_hidden_from_stream = False + + saved += 1 + + if saved >= 5: + break + + # commit is performed by the called + + +async def save_object_to_inbox( + db_session: AsyncSession, + raw_object: ap.RawObject, +) -> models.InboxObject: + """Used to save unknown object before intetacting with them, i.e. to like + an object that was looked up, or prefill the inbox when an actor accepted + a follow request.""" + obj_actor = await fetch_actor(db_session, ap.get_actor_id(raw_object)) + + ro = RemoteObject(raw_object, actor=obj_actor) + + ap_published_at = now() + if "published" in ro.ap_object: + ap_published_at = parse_isoformat(ro.ap_object["published"]) + + inbox_object = models.InboxObject( + server=urlparse(ro.ap_id).hostname, + actor_id=obj_actor.id, + ap_actor_id=obj_actor.ap_id, + ap_type=ro.ap_type, + ap_id=ro.ap_id, + ap_context=ro.ap_context, + conversation=await fetch_conversation_root(db_session, ro), + ap_published_at=ap_published_at, + ap_object=ro.ap_object, + visibility=ro.visibility, + relates_to_inbox_object_id=None, + relates_to_outbox_object_id=None, + activity_object_ap_id=ro.activity_object_ap_id, + og_meta=await opengraph.og_meta_from_note(db_session, ro), + is_hidden_from_stream=True, + ) + + db_session.add(inbox_object) + await db_session.flush() + await db_session.refresh(inbox_object) + return inbox_object + + +async def public_outbox_objects_count(db_session: AsyncSession) -> int: + return await db_session.scalar( + select(func.count(models.OutboxObject.id)).where( + models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, + models.OutboxObject.is_deleted.is_(False), + ) + ) + + +async def fetch_actor_collection(db_session: AsyncSession, url: str) -> list[Actor]: + if url.startswith(config.BASE_URL): + if url == config.BASE_URL + "/followers": + followers = ( + ( + await db_session.scalars( + select(models.Follower).options( + joinedload(models.Follower.actor) + ) + ) + ) + .unique() + .all() + ) + return [follower.actor for follower in followers] + else: + raise ValueError(f"internal collection for {url}) not supported") + + return [RemoteActor(actor) for actor in await ap.parse_collection(url)] + + +@dataclass +class ReplyTreeNode: + ap_object: AnyboxObject | None + wm_reply: WebmentionReply | None + children: list["ReplyTreeNode"] + is_requested: bool = False + is_root: bool = False + + @property + def published_at(self) -> datetime.datetime: + if self.ap_object: + return self.ap_object.ap_published_at # type: ignore + elif self.wm_reply: + return self.wm_reply.published_at + else: + raise ValueError(f"Should never happen: {self}") + + +async def get_replies_tree( + db_session: AsyncSession, + requested_object: AnyboxObject, + is_current_user_admin: bool, +) -> ReplyTreeNode: + # XXX: PeerTube video don't use context + tree_nodes: list[AnyboxObject] = [] + if requested_object.conversation is None: + tree_nodes = [requested_object] + else: + allowed_visibility = [ap.VisibilityEnum.PUBLIC, ap.VisibilityEnum.UNLISTED] + if is_current_user_admin: + allowed_visibility = list(ap.VisibilityEnum) + + tree_nodes.extend( + ( + await db_session.scalars( + select(models.InboxObject) + .where( + models.InboxObject.conversation + == requested_object.conversation, + models.InboxObject.ap_type.in_( + ["Note", "Page", "Article", "Question"] + ), + models.InboxObject.is_deleted.is_(False), + models.InboxObject.visibility.in_(allowed_visibility), + ) + .options(joinedload(models.InboxObject.actor)) + ) + ) + .unique() + .all() + ) + tree_nodes.extend( + ( + await db_session.scalars( + select(models.OutboxObject) + .where( + models.OutboxObject.conversation + == requested_object.conversation, + models.OutboxObject.is_deleted.is_(False), + models.OutboxObject.ap_type.in_( + ["Note", "Page", "Article", "Question"] + ), + models.OutboxObject.visibility.in_(allowed_visibility), + ) + .options( + joinedload( + models.OutboxObject.outbox_object_attachments + ).options(joinedload(models.OutboxObjectAttachment.upload)) + ) + ) + ) + .unique() + .all() + ) + nodes_by_in_reply_to = defaultdict(list) + for node in tree_nodes: + nodes_by_in_reply_to[node.in_reply_to].append(node) + logger.info(nodes_by_in_reply_to) + + if len(nodes_by_in_reply_to.get(None, [])) > 1: + raise ValueError(f"Invalid replies tree: {[n.ap_object for n in tree_nodes]}") + + def _get_reply_node_children( + node: ReplyTreeNode, + index: defaultdict[str | None, list[AnyboxObject]], + ) -> list[ReplyTreeNode]: + children = [] + for child in index.get(node.ap_object.ap_id, []): # type: ignore + child_node = ReplyTreeNode( + ap_object=child, + wm_reply=None, + is_requested=child.ap_id == requested_object.ap_id, # type: ignore + children=[], + ) + child_node.children = _get_reply_node_children(child_node, index) + children.append(child_node) + + return sorted( + children, + key=lambda node: node.published_at, + ) + + if None in nodes_by_in_reply_to: + root_ap_object = nodes_by_in_reply_to[None][0] + else: + root_ap_object = sorted( + tree_nodes, + key=lambda ap_obj: ap_obj.ap_published_at, # type: ignore + )[0] + + root_node = ReplyTreeNode( + ap_object=root_ap_object, + wm_reply=None, + is_root=True, + is_requested=root_ap_object.ap_id == requested_object.ap_id, + children=[], + ) + root_node.children = _get_reply_node_children(root_node, nodes_by_in_reply_to) + return root_node diff --git a/app/config.py b/app/config.py new file mode 100644 index 0000000..1cd593c --- /dev/null +++ b/app/config.py @@ -0,0 +1,289 @@ +import hashlib +import hmac +import os +import secrets +from pathlib import Path + +import bcrypt +import itsdangerous +import pydantic +import tomli +from fastapi import Form +from fastapi import HTTPException +from fastapi import Request +from itsdangerous import URLSafeTimedSerializer +from loguru import logger +from mistletoe import markdown # type: ignore + +from app.customization import _CUSTOM_ROUTES +from app.customization import _StreamVisibilityCallback +from app.customization import default_stream_visibility_callback +from app.utils.emoji import _load_emojis +from app.utils.version import get_version_commit + +ROOT_DIR = Path().parent.resolve() + +_CONFIG_FILE = os.getenv("MICROBLOGPUB_CONFIG_FILE", "profile.toml") + +VERSION_COMMIT = "dev" + +try: + from app._version import VERSION_COMMIT # type: ignore +except ImportError: + VERSION_COMMIT = get_version_commit() + +# Force reloading cache when the CSS is updated +CSS_HASH = "none" +try: + css_data = (ROOT_DIR / "app" / "static" / "css" / "main.css").read_bytes() + CSS_HASH = hashlib.md5(css_data, usedforsecurity=False).hexdigest() +except FileNotFoundError: + pass + +# Force reloading cache when the JS is changed +JS_HASH = "none" +try: + # To keep things simple, we keep a single hash for the 2 files + dat = b"" + for j in [ + ROOT_DIR / "app" / "static" / "common.js", + ROOT_DIR / "app" / "static" / "common-admin.js", + ROOT_DIR / "app" / "static" / "new.js", + ]: + dat += j.read_bytes() + JS_HASH = hashlib.md5(dat, usedforsecurity=False).hexdigest() +except FileNotFoundError: + pass + +MOVED_TO_FILE = ROOT_DIR / "data" / "moved_to.dat" + + +def _get_moved_to() -> str | None: + if not MOVED_TO_FILE.exists(): + return None + + return MOVED_TO_FILE.read_text() + + +def set_moved_to(moved_to: str) -> None: + MOVED_TO_FILE.write_text(moved_to) + + +VERSION = f"2.0.0+{VERSION_COMMIT}" +USER_AGENT = f"microblogpub/{VERSION}" +AP_CONTENT_TYPE = "application/activity+json" + + +class _PrivacyReplace(pydantic.BaseModel): + domain: str + replace_by: str + + +class _ProfileMetadata(pydantic.BaseModel): + key: str + value: str + + +class _BlockedServer(pydantic.BaseModel): + hostname: str + reason: str | None = None + + +class Config(pydantic.BaseModel): + domain: str + username: str + admin_password: bytes + name: str + summary: str + https: bool + icon_url: str | None = None + image_url: str | None = None + secret: str + debug: bool = False + trusted_hosts: list[str] = ["127.0.0.1"] + manually_approves_followers: bool = False + privacy_replace: list[_PrivacyReplace] | None = None + metadata: list[_ProfileMetadata] | None = None + code_highlighting_theme = "friendly_grayscale" + blocked_servers: list[_BlockedServer] = [] + custom_footer: str | None = None + emoji: str | None = None + also_known_as: str | None = None + + hides_followers: bool = False + hides_following: bool = False + + inbox_retention_days: int = 15 + + custom_content_security_policy: str | None = None + + webfinger_domain: str | None = None + + # Config items to make tests easier + sqlalchemy_database: str | None = None + key_path: str | None = None + + session_timeout: int = 3600 * 24 * 3 # in seconds, 3 days by default + csrf_token_exp: int = 3600 + + disabled_notifications: list[str] = [] + + # Only set when the app is served on a non-root path + id: str | None = None + + +def load_config() -> Config: + try: + return Config.parse_obj( + tomli.loads((ROOT_DIR / "data" / _CONFIG_FILE).read_text()) + ) + except FileNotFoundError: + raise ValueError( + f"Please run the configuration wizard, {_CONFIG_FILE} is missing" + ) + + +def is_activitypub_requested(req: Request) -> bool: + accept_value = req.headers.get("accept") + if not accept_value: + return False + for val in { + "application/ld+json", + "application/activity+json", + }: + if accept_value.startswith(val): + return True + + return False + + +def verify_password(pwd: str) -> bool: + return bcrypt.checkpw(pwd.encode(), CONFIG.admin_password) + + +CONFIG = load_config() +DOMAIN = CONFIG.domain +_SCHEME = "https" if CONFIG.https else "http" +ID = f"{_SCHEME}://{DOMAIN}" + +# When running the app on a path, the ID maybe set by the config, but in this +# case, a valid webfinger must be served on the root domain +if CONFIG.id: + ID = CONFIG.id +USERNAME = CONFIG.username + +# Allow to use @handle@webfinger-domain.tld while hosting the server at domain.tld +WEBFINGER_DOMAIN = CONFIG.webfinger_domain or DOMAIN + +MANUALLY_APPROVES_FOLLOWERS = CONFIG.manually_approves_followers +HIDES_FOLLOWERS = CONFIG.hides_followers +HIDES_FOLLOWING = CONFIG.hides_following +PRIVACY_REPLACE = None +if CONFIG.privacy_replace: + PRIVACY_REPLACE = {pr.domain: pr.replace_by for pr in CONFIG.privacy_replace} + +BLOCKED_SERVERS = {blocked_server.hostname for blocked_server in CONFIG.blocked_servers} +ALSO_KNOWN_AS = CONFIG.also_known_as +CUSTOM_CONTENT_SECURITY_POLICY = CONFIG.custom_content_security_policy + +INBOX_RETENTION_DAYS = CONFIG.inbox_retention_days +SESSION_TIMEOUT = CONFIG.session_timeout +CUSTOM_FOOTER = ( + markdown(CONFIG.custom_footer.replace("{version}", VERSION)) + if CONFIG.custom_footer + else None +) + +BASE_URL = ID +DEBUG = CONFIG.debug +DB_PATH = CONFIG.sqlalchemy_database or ROOT_DIR / "data" / "microblogpub.db" +SQLALCHEMY_DATABASE_URL = f"sqlite:///{DB_PATH}" +KEY_PATH = ( + (ROOT_DIR / CONFIG.key_path) if CONFIG.key_path else ROOT_DIR / "data" / "key.pem" +) +EMOJIS = "😺 😸 😹 😻 😼 😽 🙀 😿 😾" +if CONFIG.emoji: + EMOJIS = CONFIG.emoji + +# Emoji template for the FE +EMOJI_TPL = ( + '{raw}' +) + +_load_emojis(ROOT_DIR, BASE_URL) + +CODE_HIGHLIGHTING_THEME = CONFIG.code_highlighting_theme + +MOVED_TO = _get_moved_to() + + +_NavBarItem = tuple[str, str] + + +class NavBarItems: + EXTRA_NAVBAR_ITEMS: list[_NavBarItem] = [] + INDEX_NAVBAR_ITEM: _NavBarItem | None = None + NOTES_PATH = "/" + + +def load_custom_routes() -> None: + try: + from data import custom_routes # type: ignore # noqa: F401 + except ImportError: + pass + + for path, custom_handler in _CUSTOM_ROUTES.items(): + # If a handler wants to replace the root, move the index to /notes + if path == "/": + NavBarItems.NOTES_PATH = "/notes" + NavBarItems.INDEX_NAVBAR_ITEM = (path, custom_handler.title) + else: + if custom_handler.show_in_navbar: + NavBarItems.EXTRA_NAVBAR_ITEMS.append((path, custom_handler.title)) + + +session_serializer = URLSafeTimedSerializer( + CONFIG.secret, + salt=f"{ID}.session", +) +csrf_serializer = URLSafeTimedSerializer( + CONFIG.secret, + salt=f"{ID}.csrf", +) + + +def generate_csrf_token() -> str: + return csrf_serializer.dumps(secrets.token_hex(16)) # type: ignore + + +def verify_csrf_token( + csrf_token: str = Form(), + redirect_url: str | None = Form(None), +) -> None: + please_try_again = "please try again" + if redirect_url: + please_try_again = f'please try again' + try: + csrf_serializer.loads(csrf_token, max_age=CONFIG.csrf_token_exp) + except (itsdangerous.BadData, itsdangerous.SignatureExpired): + logger.exception("Failed to verify CSRF token") + raise HTTPException( + status_code=403, + detail=f"The security token has expired, {please_try_again}", + ) + return None + + +def hmac_sha256() -> hmac.HMAC: + return hmac.new(CONFIG.secret.encode(), digestmod=hashlib.sha256) + + +stream_visibility_callback: _StreamVisibilityCallback +try: + from data.stream import ( # type: ignore # noqa: F401, E501 + custom_stream_visibility_callback, + ) + + stream_visibility_callback = custom_stream_visibility_callback +except ImportError: + stream_visibility_callback = default_stream_visibility_callback diff --git a/app/customization.py b/app/customization.py new file mode 100644 index 0000000..aa107f5 --- /dev/null +++ b/app/customization.py @@ -0,0 +1,155 @@ +from dataclasses import dataclass +from pathlib import Path +from typing import TYPE_CHECKING +from typing import Any +from typing import Callable + +from fastapi import APIRouter +from fastapi import Depends +from fastapi import Request +from loguru import logger +from starlette.responses import JSONResponse + +if TYPE_CHECKING: + from app.ap_object import RemoteObject + + +_DATA_DIR = Path().parent.resolve() / "data" +_Handler = Callable[..., Any] + + +class HTMLPage: + def __init__( + self, + title: str, + html_file: str, + show_in_navbar: bool, + ) -> None: + self.title = title + self.html_file = _DATA_DIR / html_file + self.show_in_navbar = show_in_navbar + + +class RawHandler: + def __init__( + self, + title: str, + handler: Any, + show_in_navbar: bool, + ) -> None: + self.title = title + self.handler = handler + self.show_in_navbar = show_in_navbar + + +_CUSTOM_ROUTES: dict[str, HTMLPage | RawHandler] = {} + + +def register_html_page( + path: str, + *, + title: str, + html_file: str, + show_in_navbar: bool = True, +) -> None: + if path in _CUSTOM_ROUTES: + raise ValueError(f"{path} is already registered") + + _CUSTOM_ROUTES[path] = HTMLPage(title, html_file, show_in_navbar) + + +def register_raw_handler( + path: str, + *, + title: str, + handler: _Handler, + show_in_navbar: bool = True, +) -> None: + if path in _CUSTOM_ROUTES: + raise ValueError(f"{path} is already registered") + + _CUSTOM_ROUTES[path] = RawHandler(title, handler, show_in_navbar) + + +class ActivityPubResponse(JSONResponse): + media_type = "application/activity+json" + + +def _custom_page_handler(path: str, html_page: HTMLPage) -> Any: + from app import templates + from app.actor import LOCAL_ACTOR + from app.config import is_activitypub_requested + from app.database import AsyncSession + from app.database import get_db_session + + async def _handler( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + ) -> templates.TemplateResponse | ActivityPubResponse: + if path == "/" and is_activitypub_requested(request): + return ActivityPubResponse(LOCAL_ACTOR.ap_actor) + + return await templates.render_template( + db_session, + request, + "custom_page.html", + { + "page_content": html_page.html_file.read_text(), + "title": html_page.title, + }, + ) + + return _handler + + +def get_custom_router() -> APIRouter | None: + if not _CUSTOM_ROUTES: + return None + + router = APIRouter() + + for path, handler in _CUSTOM_ROUTES.items(): + if isinstance(handler, HTMLPage): + router.add_api_route( + path, _custom_page_handler(path, handler), methods=["GET"] + ) + else: + router.add_api_route(path, handler.handler) + + return router + + +@dataclass +class ObjectInfo: + # Is it a reply? + is_reply: bool + + # Is it a reply to an outbox object + is_local_reply: bool + + # Is the object mentioning the local actor + is_mention: bool + + # Is it from someone the local actor is following + is_from_following: bool + + # List of hashtags, e.g. #microblogpub + hashtags: list[str] + + # @dev@microblog.pub + actor_handle: str + + remote_object: "RemoteObject" + + +_StreamVisibilityCallback = Callable[[ObjectInfo], bool] + + +def default_stream_visibility_callback(object_info: ObjectInfo) -> bool: + result = ( + (not object_info.is_reply and object_info.is_from_following) + or object_info.is_mention + or object_info.is_local_reply + ) + logger.info(f"{object_info=}/{result=}") + return result diff --git a/app/database.py b/app/database.py new file mode 100644 index 0000000..e7d07c2 --- /dev/null +++ b/app/database.py @@ -0,0 +1,35 @@ +from typing import Any +from typing import AsyncGenerator + +from sqlalchemy import MetaData +from sqlalchemy import create_engine +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.ext.asyncio import create_async_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker + +from app.config import DB_PATH +from app.config import DEBUG +from app.config import SQLALCHEMY_DATABASE_URL + +engine = create_engine( + SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False, "timeout": 15} +) +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +DATABASE_URL = f"sqlite+aiosqlite:///{DB_PATH}" +async_engine = create_async_engine( + DATABASE_URL, future=True, echo=DEBUG, connect_args={"timeout": 15} +) +async_session = sessionmaker(async_engine, class_=AsyncSession, expire_on_commit=False) + +Base: Any = declarative_base() +metadata_obj = MetaData() + + +async def get_db_session() -> AsyncGenerator[AsyncSession, None]: + async with async_session() as session: + try: + yield session + finally: + await session.close() diff --git a/app/httpsig.py b/app/httpsig.py new file mode 100644 index 0000000..e9f9e69 --- /dev/null +++ b/app/httpsig.py @@ -0,0 +1,353 @@ +import base64 +import hashlib +import json +import typing +from dataclasses import dataclass +from datetime import datetime +from datetime import timedelta +from datetime import timezone +from typing import Any +from typing import Dict +from typing import MutableMapping +from typing import Optional +from urllib.parse import urlparse + +import fastapi +import httpx +from cachetools import LFUCache +from Crypto.Hash import SHA256 +from Crypto.Signature import PKCS1_v1_5 +from dateutil.parser import parse +from loguru import logger +from sqlalchemy import select + +from app import activitypub as ap +from app import config +from app.config import KEY_PATH +from app.database import AsyncSession +from app.database import get_db_session +from app.key import Key +from app.utils.datetime import now +from app.utils.url import is_hostname_blocked + +_KEY_CACHE: MutableMapping[str, Key] = LFUCache(256) + + +def _build_signed_string( + signed_headers: str, + method: str, + path: str, + headers: Any, + body_digest: str | None, + sig_data: dict[str, Any], +) -> tuple[str, datetime | None]: + signature_date: datetime | None = None + out = [] + for signed_header in signed_headers.split(" "): + if signed_header == "(created)": + signature_date = datetime.fromtimestamp(int(sig_data["created"])).replace( + tzinfo=timezone.utc + ) + elif signed_header == "date": + signature_date = parse(headers["date"]) + + if signed_header == "(request-target)": + out.append("(request-target): " + method.lower() + " " + path) + elif signed_header == "digest" and body_digest: + out.append("digest: " + body_digest) + elif signed_header in ["(created)", "(expires)"]: + out.append( + signed_header + + ": " + + sig_data[signed_header[1 : len(signed_header) - 1]] + ) + else: + out.append(signed_header + ": " + headers[signed_header]) + return "\n".join(out), signature_date + + +def _parse_sig_header(val: Optional[str]) -> Optional[Dict[str, str]]: + if not val: + return None + out = {} + for data in val.split(","): + k, v = data.split("=", 1) + out[k] = v[1 : len(v) - 1] # noqa: black conflict + return out + + +def _verify_h(signed_string, signature, pubkey): + signer = PKCS1_v1_5.new(pubkey) + digest = SHA256.new() + digest.update(signed_string.encode("utf-8")) + return signer.verify(digest, signature) + + +def _body_digest(body: bytes) -> str: + h = hashlib.new("sha256") + h.update(body) # type: ignore + return "SHA-256=" + base64.b64encode(h.digest()).decode("utf-8") + + +async def _get_public_key( + db_session: AsyncSession, + key_id: str, + should_skip_cache: bool = False, +) -> Key: + if not should_skip_cache and (cached_key := _KEY_CACHE.get(key_id)): + logger.info(f"Key {key_id} found in cache") + return cached_key + + # Check if the key belongs to an actor already in DB + from app import models + + existing_actor = ( + await db_session.scalars( + select(models.Actor).where(models.Actor.ap_id == key_id.split("#")[0]) + ) + ).one_or_none() + if not should_skip_cache: + if existing_actor and existing_actor.public_key_id == key_id: + k = Key(existing_actor.ap_id, key_id) + k.load_pub(existing_actor.public_key_as_pem) + logger.info(f"Found {key_id} on an existing actor") + _KEY_CACHE[key_id] = k + return k + + # Fetch it + from app import activitypub as ap + from app.actor import RemoteActor + from app.actor import update_actor_if_needed + + # Without signing the request as if it's the first contact, the 2 servers + # might race to fetch each other key + try: + actor = await ap.fetch(key_id, disable_httpsig=True) + except ap.ObjectUnavailableError: + actor = await ap.fetch(key_id, disable_httpsig=False) + + if actor["type"] == "Key": + # The Key is not embedded in the Person + k = Key(actor["owner"], actor["id"]) + k.load_pub(actor["publicKeyPem"]) + else: + k = Key(actor["id"], actor["publicKey"]["id"]) + k.load_pub(actor["publicKey"]["publicKeyPem"]) + + # Ensure the right key was fetch + # TODO: some server have the key ID `http://` but fetching it return `https` + if key_id not in [k.key_id(), k.owner]: + raise ValueError( + f"failed to fetch requested key {key_id}: got {actor['publicKey']}" + ) + + if should_skip_cache and actor["type"] != "Key" and existing_actor: + # We had to skip the cache, which means the actor key probably changed + # and we want to update our cached version + await update_actor_if_needed(db_session, existing_actor, RemoteActor(actor)) + await db_session.commit() + + _KEY_CACHE[key_id] = k + return k + + +@dataclass(frozen=True) +class HTTPSigInfo: + has_valid_signature: bool + signed_by_ap_actor_id: str | None = None + + is_ap_actor_gone: bool = False + is_unsupported_algorithm: bool = False + is_expired: bool = False + is_from_blocked_server: bool = False + + server: str | None = None + + +async def httpsig_checker( + request: fastapi.Request, + db_session: AsyncSession = fastapi.Depends(get_db_session), +) -> HTTPSigInfo: + body = await request.body() + + hsig = _parse_sig_header(request.headers.get("Signature")) + if not hsig: + logger.info("No HTTP signature found") + return HTTPSigInfo(has_valid_signature=False) + + try: + key_id = hsig["keyId"] + except KeyError: + logger.info("Missing keyId") + return HTTPSigInfo( + has_valid_signature=False, + ) + + server = urlparse(key_id).hostname + if is_hostname_blocked(server): + return HTTPSigInfo( + has_valid_signature=False, + server=server, + is_from_blocked_server=True, + ) + + if alg := hsig.get("algorithm") not in ["rsa-sha256", "hs2019"]: + logger.info(f"Unsupported HTTP sig algorithm: {alg}") + return HTTPSigInfo( + has_valid_signature=False, + is_unsupported_algorithm=True, + server=server, + ) + + # Try to drop Delete activity spams early on, this prevent making an extra + # HTTP requests trying to fetch an unavailable actor to verify the HTTP sig + try: + if request.method == "POST" and request.url.path.endswith("/inbox"): + from app import models # TODO: solve this circular import + + activity = json.loads(body) + actor_id = ap.get_id(activity["actor"]) + if ( + ap.as_list(activity["type"])[0] == "Delete" + and actor_id == ap.get_id(activity["object"]) + and not ( + await db_session.scalars( + select(models.Actor).where( + models.Actor.ap_id == actor_id, + ) + ) + ).one_or_none() + ): + logger.info(f"Dropping Delete activity early for {body=}") + raise fastapi.HTTPException(status_code=202) + except fastapi.HTTPException as http_exc: + raise http_exc + except Exception: + logger.exception("Failed to check for Delete spam") + + # logger.debug(f"hsig={hsig}") + signed_string, signature_date = _build_signed_string( + hsig["headers"], + request.method, + request.url.path, + request.headers, + _body_digest(body) if body else None, + hsig, + ) + + # Sanity checks on the signature date + if signature_date is None or now() - signature_date > timedelta(hours=12): + logger.info(f"Signature expired: {signature_date=}") + return HTTPSigInfo( + has_valid_signature=False, + is_expired=True, + server=server, + ) + + try: + k = await _get_public_key(db_session, hsig["keyId"]) + except (ap.ObjectIsGoneError, ap.ObjectNotFoundError): + logger.info("Actor is gone or not found") + return HTTPSigInfo(has_valid_signature=False, is_ap_actor_gone=True) + except Exception: + logger.exception(f'Failed to fetch HTTP sig key {hsig["keyId"]}') + return HTTPSigInfo(has_valid_signature=False) + + has_valid_signature = _verify_h( + signed_string, base64.b64decode(hsig["signature"]), k.pubkey + ) + + # If the signature is not valid, we may have to update the cached actor + if not has_valid_signature: + logger.info("Invalid signature, trying to refresh actor") + try: + k = await _get_public_key(db_session, hsig["keyId"], should_skip_cache=True) + has_valid_signature = _verify_h( + signed_string, base64.b64decode(hsig["signature"]), k.pubkey + ) + except Exception: + logger.exception("Failed to refresh actor") + + httpsig_info = HTTPSigInfo( + has_valid_signature=has_valid_signature, + signed_by_ap_actor_id=k.owner, + server=server, + ) + logger.info(f"Valid HTTP signature for {httpsig_info.signed_by_ap_actor_id}") + return httpsig_info + + +async def enforce_httpsig( + request: fastapi.Request, + httpsig_info: HTTPSigInfo = fastapi.Depends(httpsig_checker), +) -> HTTPSigInfo: + """FastAPI Depends""" + if httpsig_info.is_from_blocked_server: + logger.warning(f"{httpsig_info.server} is blocked") + raise fastapi.HTTPException(status_code=403, detail="Blocked") + + if not httpsig_info.has_valid_signature: + logger.warning(f"Invalid HTTP sig {httpsig_info=}") + body = await request.body() + logger.info(f"{body=}") + + # Special case for Mastoodon instance that keep resending Delete + # activities for actor we don't know about if we raise a 401 + if httpsig_info.is_ap_actor_gone: + logger.info("Let's make Mastodon happy, returning a 202") + raise fastapi.HTTPException(status_code=202) + + detail = "Invalid HTTP sig" + if httpsig_info.is_unsupported_algorithm: + detail = "Unsupported signature algorithm, must be rsa-sha256 or hs2019" + elif httpsig_info.is_expired: + detail = "Signature expired" + + raise fastapi.HTTPException(status_code=401, detail=detail) + + return httpsig_info + + +class HTTPXSigAuth(httpx.Auth): + def __init__(self, key: Key) -> None: + self.key = key + + def auth_flow( + self, r: httpx.Request + ) -> typing.Generator[httpx.Request, httpx.Response, None]: + logger.info(f"keyid={self.key.key_id()}") + + bodydigest = None + if r.content: + bh = hashlib.new("sha256") + bh.update(r.content) + bodydigest = "SHA-256=" + base64.b64encode(bh.digest()).decode("utf-8") + + date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S GMT") + r.headers["Date"] = date + if bodydigest: + r.headers["Digest"] = bodydigest + sigheaders = "(request-target) user-agent host date digest content-type" + else: + sigheaders = "(request-target) user-agent host date accept" + + to_be_signed, _ = _build_signed_string( + sigheaders, r.method, r.url.path, r.headers, bodydigest, {} + ) + if not self.key.privkey: + raise ValueError("Should never happen") + signer = PKCS1_v1_5.new(self.key.privkey) + digest = SHA256.new() + digest.update(to_be_signed.encode("utf-8")) + sig = base64.b64encode(signer.sign(digest)).decode() + + key_id = self.key.key_id() + sig_value = f'keyId="{key_id}",algorithm="rsa-sha256",headers="{sigheaders}",signature="{sig}"' # noqa: E501 + logger.debug(f"signed request {sig_value=}") + r.headers["Signature"] = sig_value + yield r + + +k = Key(config.ID, f"{config.ID}#main-key") +k.load(KEY_PATH.read_text()) +auth = HTTPXSigAuth(k) diff --git a/app/incoming_activities.py b/app/incoming_activities.py new file mode 100644 index 0000000..94dd0d6 --- /dev/null +++ b/app/incoming_activities.py @@ -0,0 +1,165 @@ +import asyncio +import traceback +from datetime import datetime +from datetime import timedelta + +from loguru import logger +from sqlalchemy import func +from sqlalchemy import select + +from app import activitypub as ap +from app import httpsig +from app import ldsig +from app import models +from app.boxes import save_to_inbox +from app.database import AsyncSession +from app.utils.datetime import now +from app.utils.workers import Worker + +_MAX_RETRIES = 8 + + +async def new_ap_incoming_activity( + db_session: AsyncSession, + httpsig_info: httpsig.HTTPSigInfo, + raw_object: ap.RawObject, +) -> models.IncomingActivity | None: + ap_id: str + if "id" not in raw_object or ap.as_list(raw_object["type"])[0] in ap.ACTOR_TYPES: + if "@context" not in raw_object: + logger.warning(f"Dropping invalid object: {raw_object}") + return None + else: + # This is a transient object, Build the JSON LD hash as the ID + ap_id = ldsig._doc_hash(raw_object) + else: + ap_id = ap.get_id(raw_object) + + # TODO(ts): dedup first + + incoming_activity = models.IncomingActivity( + sent_by_ap_actor_id=httpsig_info.signed_by_ap_actor_id, + ap_id=ap_id, + ap_object=raw_object, + ) + db_session.add(incoming_activity) + await db_session.commit() + await db_session.refresh(incoming_activity) + return incoming_activity + + +def _exp_backoff(tries: int) -> datetime: + seconds = 2 * (2 ** (tries - 1)) + return now() + timedelta(seconds=seconds) + + +def _set_next_try( + outgoing_activity: models.IncomingActivity, + next_try: datetime | None = None, +) -> None: + if not outgoing_activity.tries: + raise ValueError("Should never happen") + + if outgoing_activity.tries >= _MAX_RETRIES: + outgoing_activity.is_errored = True + outgoing_activity.next_try = None + else: + outgoing_activity.next_try = next_try or _exp_backoff(outgoing_activity.tries) + + +async def fetch_next_incoming_activity( + db_session: AsyncSession, +) -> models.IncomingActivity | None: + where = [ + models.IncomingActivity.next_try <= now(), + models.IncomingActivity.is_errored.is_(False), + models.IncomingActivity.is_processed.is_(False), + ] + q_count = await db_session.scalar( + select(func.count(models.IncomingActivity.id)).where(*where) + ) + if q_count > 0: + logger.info(f"{q_count} incoming activities ready to process") + if not q_count: + # logger.debug("No activities to process") + return None + + next_activity = ( + await db_session.execute( + select(models.IncomingActivity) + .where(*where) + .limit(1) + .order_by(models.IncomingActivity.next_try.asc()) + ) + ).scalar_one() + + return next_activity + + +async def process_next_incoming_activity( + db_session: AsyncSession, + next_activity: models.IncomingActivity, +) -> None: + logger.info( + f"incoming_activity={next_activity.ap_object}/" + f"{next_activity.sent_by_ap_actor_id}" + ) + + next_activity.tries = next_activity.tries + 1 + next_activity.last_try = now() + await db_session.commit() + + if next_activity.ap_object and next_activity.sent_by_ap_actor_id: + try: + async with db_session.begin_nested(): + await asyncio.wait_for( + save_to_inbox( + db_session, + next_activity.ap_object, + next_activity.sent_by_ap_actor_id, + ), + timeout=60, + ) + except asyncio.exceptions.TimeoutError: + logger.error("Activity took too long to process") + await db_session.rollback() + await db_session.refresh(next_activity) + next_activity.error = traceback.format_exc() + _set_next_try(next_activity) + except Exception: + logger.exception("Failed") + await db_session.rollback() + await db_session.refresh(next_activity) + next_activity.error = traceback.format_exc() + _set_next_try(next_activity) + else: + logger.info("Success") + next_activity.is_processed = True + + # FIXME: webmention support + + await db_session.commit() + return None + + +class IncomingActivityWorker(Worker[models.IncomingActivity]): + async def process_message( + self, + db_session: AsyncSession, + next_activity: models.IncomingActivity, + ) -> None: + await process_next_incoming_activity(db_session, next_activity) + + async def get_next_message( + self, + db_session: AsyncSession, + ) -> models.IncomingActivity | None: + return await fetch_next_incoming_activity(db_session) + + +async def loop() -> None: + await IncomingActivityWorker().run_forever() + + +if __name__ == "__main__": + asyncio.run(loop()) diff --git a/app/indieauth.py b/app/indieauth.py new file mode 100644 index 0000000..ea4d21d --- /dev/null +++ b/app/indieauth.py @@ -0,0 +1,552 @@ +import secrets +from dataclasses import dataclass +from datetime import timedelta +from datetime import timezone +from typing import Any + +from fastapi import APIRouter +from fastapi import Depends +from fastapi import Form +from fastapi import HTTPException +from fastapi import Request +from fastapi.responses import JSONResponse +from fastapi.security import HTTPBasic +from fastapi.security import HTTPBasicCredentials +from loguru import logger +from pydantic import BaseModel +from sqlalchemy import select +from sqlalchemy.orm import joinedload + +from app import config +from app import models +from app import templates +from app.admin import user_session_or_redirect +from app.config import verify_csrf_token +from app.database import AsyncSession +from app.database import get_db_session +from app.redirect import redirect +from app.utils import indieauth +from app.utils.datetime import now + +basic_auth = HTTPBasic() + +router = APIRouter() + + +@router.get("/.well-known/oauth-authorization-server") +async def well_known_authorization_server( + request: Request, +) -> dict[str, Any]: + return { + "issuer": config.ID + "/", + "authorization_endpoint": request.url_for("indieauth_authorization_endpoint"), + "token_endpoint": request.url_for("indieauth_token_endpoint"), + "code_challenge_methods_supported": ["S256"], + "revocation_endpoint": request.url_for("indieauth_revocation_endpoint"), + "revocation_endpoint_auth_methods_supported": ["none"], + "registration_endpoint": request.url_for("oauth_registration_endpoint"), + "introspection_endpoint": request.url_for("oauth_introspection_endpoint"), + } + + +class OAuthRegisterClientRequest(BaseModel): + client_name: str + redirect_uris: list[str] | str + + client_uri: str | None = None + logo_uri: str | None = None + scope: str | None = None + + +@router.post("/oauth/register") +async def oauth_registration_endpoint( + register_client_request: OAuthRegisterClientRequest, + db_session: AsyncSession = Depends(get_db_session), +) -> JSONResponse: + """Implements OAuth 2.0 Dynamic Registration.""" + + client = models.OAuthClient( + client_name=register_client_request.client_name, + redirect_uris=[register_client_request.redirect_uris] + if isinstance(register_client_request.redirect_uris, str) + else register_client_request.redirect_uris, + client_uri=register_client_request.client_uri, + logo_uri=register_client_request.logo_uri, + scope=register_client_request.scope, + client_id=secrets.token_hex(16), + client_secret=secrets.token_hex(32), + ) + + db_session.add(client) + await db_session.commit() + + return JSONResponse( + content={ + **register_client_request.dict(), + "client_id_issued_at": int(client.created_at.timestamp()), # type: ignore + "grant_types": ["authorization_code", "refresh_token"], + "client_secret_expires_at": 0, + "client_id": client.client_id, + "client_secret": client.client_secret, + }, + status_code=201, + ) + + +@router.get("/auth") +async def indieauth_authorization_endpoint( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + _: None = Depends(user_session_or_redirect), +) -> templates.TemplateResponse: + me = request.query_params.get("me") + client_id = request.query_params.get("client_id") + redirect_uri = request.query_params.get("redirect_uri") + state = request.query_params.get("state", "") + response_type = request.query_params.get("response_type", "id") + scope = request.query_params.get("scope", "").split() + code_challenge = request.query_params.get("code_challenge", "") + code_challenge_method = request.query_params.get("code_challenge_method", "") + + # Check if the authorization request is coming from an OAuth client + registered_client = ( + await db_session.scalars( + select(models.OAuthClient).where( + models.OAuthClient.client_id == client_id, + ) + ) + ).one_or_none() + if registered_client: + client = { + "name": registered_client.client_name, + "logo": registered_client.logo_uri, + "url": registered_client.client_uri, + } + else: + client = await indieauth.get_client_id_data(client_id) # type: ignore + + return await templates.render_template( + db_session, + request, + "indieauth_flow.html", + dict( + client=client, + scopes=scope, + redirect_uri=redirect_uri, + state=state, + response_type=response_type, + client_id=client_id, + me=me, + code_challenge=code_challenge, + code_challenge_method=code_challenge_method, + ), + ) + + +@router.post("/admin/indieauth") +async def indieauth_flow( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + csrf_check: None = Depends(verify_csrf_token), + _: None = Depends(user_session_or_redirect), +) -> templates.TemplateResponse: + form_data = await request.form() + logger.info(f"{form_data=}") + + # Params needed for the redirect + redirect_uri = form_data["redirect_uri"] + code = secrets.token_urlsafe(32) + iss = config.ID + "/" + state = form_data["state"] + + scope = " ".join(form_data.getlist("scopes")) + client_id = form_data["client_id"] + + # TODO: Ensure that me is correct + # me = form_data.get("me") + + # XXX: should always be code + # response_type = form_data["response_type"] + + code_challenge = form_data["code_challenge"] + code_challenge_method = form_data["code_challenge_method"] + + auth_request = models.IndieAuthAuthorizationRequest( + code=code, + scope=scope, + redirect_uri=redirect_uri, + client_id=client_id, + code_challenge=code_challenge, + code_challenge_method=code_challenge_method, + ) + + db_session.add(auth_request) + await db_session.commit() + + return await redirect( + request, db_session, redirect_uri + f"?code={code}&state={state}&iss={iss}" + ) + + +async def _check_auth_code( + db_session: AsyncSession, + code: str, + client_id: str, + redirect_uri: str, + code_verifier: str | None, +) -> tuple[bool, models.IndieAuthAuthorizationRequest | None]: + auth_code_req = ( + await db_session.scalars( + select(models.IndieAuthAuthorizationRequest).where( + models.IndieAuthAuthorizationRequest.code == code + ) + ) + ).one_or_none() + if not auth_code_req: + return False, None + if auth_code_req.is_used: + logger.info("code was already used") + return False, None + # + if now() > auth_code_req.created_at.replace(tzinfo=timezone.utc) + timedelta( + seconds=120 + ): + logger.info("Auth code request expired") + return False, None + + if ( + auth_code_req.redirect_uri != redirect_uri + or auth_code_req.client_id != client_id + ): + logger.info("redirect_uri/client_id does not match request") + return False, None + + auth_code_req.is_used = True + await db_session.commit() + + return True, auth_code_req + + +@router.post("/auth") +async def indieauth_reedem_auth_code( + request: Request, + db_session: AsyncSession = Depends(get_db_session), +) -> JSONResponse: + form_data = await request.form() + logger.info(f"{form_data=}") + grant_type = form_data.get("grant_type", "authorization_code") + if grant_type != "authorization_code": + raise ValueError(f"Invalid grant_type {grant_type}") + + code = form_data["code"] + + # These must match the params from the first request + client_id = form_data["client_id"] + redirect_uri = form_data["redirect_uri"] + # code_verifier is optional for backward compat + code_verifier = form_data.get("code_verifier") + + is_code_valid, _ = await _check_auth_code( + db_session, + code=code, + client_id=client_id, + redirect_uri=redirect_uri, + code_verifier=code_verifier, + ) + if is_code_valid: + return JSONResponse( + content={ + "me": config.ID + "/", + }, + status_code=200, + ) + else: + return JSONResponse( + content={"error": "invalid_grant"}, + status_code=400, + ) + + +@router.post("/token") +async def indieauth_token_endpoint( + request: Request, + db_session: AsyncSession = Depends(get_db_session), +) -> JSONResponse: + form_data = await request.form() + logger.info(f"{form_data=}") + grant_type = form_data.get("grant_type", "authorization_code") + if grant_type not in ["authorization_code", "refresh_token"]: + raise ValueError(f"Invalid grant_type {grant_type}") + + # These must match the params from the first request + client_id = form_data["client_id"] + code_verifier = form_data.get("code_verifier") + + if grant_type == "authorization_code": + code = form_data["code"] + redirect_uri = form_data["redirect_uri"] + # code_verifier is optional for backward compat + is_code_valid, auth_code_request = await _check_auth_code( + db_session, + code=code, + client_id=client_id, + redirect_uri=redirect_uri, + code_verifier=code_verifier, + ) + if not is_code_valid or (auth_code_request and not auth_code_request.scope): + return JSONResponse( + content={"error": "invalid_grant"}, + status_code=400, + ) + + elif grant_type == "refresh_token": + refresh_token = form_data["refresh_token"] + access_token = ( + await db_session.scalars( + select(models.IndieAuthAccessToken) + .where( + models.IndieAuthAccessToken.refresh_token == refresh_token, + models.IndieAuthAccessToken.was_refreshed.is_(False), + ) + .options( + joinedload( + models.IndieAuthAccessToken.indieauth_authorization_request + ) + ) + ) + ).one_or_none() + if not access_token: + raise ValueError("invalid refresh token") + + if access_token.indieauth_authorization_request.client_id != client_id: + raise ValueError("invalid client ID") + + auth_code_request = access_token.indieauth_authorization_request + access_token.was_refreshed = True + + if not auth_code_request: + raise ValueError("Should never happen") + + access_token = models.IndieAuthAccessToken( + indieauth_authorization_request_id=auth_code_request.id, + access_token=secrets.token_urlsafe(32), + refresh_token=secrets.token_urlsafe(32), + expires_in=3600, + scope=auth_code_request.scope, + ) + db_session.add(access_token) + await db_session.commit() + + return JSONResponse( + content={ + "access_token": access_token.access_token, + "refresh_token": access_token.refresh_token, + "token_type": "Bearer", + "scope": auth_code_request.scope, + "me": config.ID + "/", + "expires_in": 3600, + }, + status_code=200, + ) + + +async def _check_access_token( + db_session: AsyncSession, + token: str, +) -> tuple[bool, models.IndieAuthAccessToken | None]: + access_token_info = ( + await db_session.scalars( + select(models.IndieAuthAccessToken) + .where(models.IndieAuthAccessToken.access_token == token) + .options( + joinedload(models.IndieAuthAccessToken.indieauth_authorization_request) + ) + ) + ).one_or_none() + if not access_token_info: + return False, None + + if access_token_info.is_revoked: + logger.info("Access token is revoked") + return False, None + + if now() > access_token_info.created_at.replace(tzinfo=timezone.utc) + timedelta( + seconds=access_token_info.expires_in + ): + logger.info("Access token has expired") + return False, None + + return True, access_token_info + + +@dataclass(frozen=True) +class AccessTokenInfo: + scopes: list[str] + client_id: str | None + access_token: str + exp: int + + +async def verify_access_token( + request: Request, + db_session: AsyncSession = Depends(get_db_session), +) -> AccessTokenInfo: + token = request.headers.get("Authorization", "").removeprefix("Bearer ") + + # Check if the token is within the form data + if not token: + form_data = await request.form() + if "access_token" in form_data: + token = form_data.get("access_token") + + is_token_valid, access_token = await _check_access_token(db_session, token) + if not is_token_valid: + raise HTTPException( + detail="Invalid access token", + status_code=401, + ) + + if not access_token or not access_token.scope: + raise ValueError("Should never happen") + + return AccessTokenInfo( + scopes=access_token.scope.split(), + client_id=( + access_token.indieauth_authorization_request.client_id + if access_token.indieauth_authorization_request + else None + ), + access_token=access_token.access_token, + exp=int( + ( + access_token.created_at.replace(tzinfo=timezone.utc) + + timedelta(seconds=access_token.expires_in) + ).timestamp() + ), + ) + + +async def check_access_token( + request: Request, + db_session: AsyncSession = Depends(get_db_session), +) -> AccessTokenInfo | None: + token = request.headers.get("Authorization", "").removeprefix("Bearer ") + if not token: + return None + + is_token_valid, access_token = await _check_access_token(db_session, token) + if not is_token_valid: + return None + + if not access_token or not access_token.scope: + raise ValueError("Should never happen") + + access_token_info = AccessTokenInfo( + scopes=access_token.scope.split(), + client_id=( + access_token.indieauth_authorization_request.client_id + if access_token.indieauth_authorization_request + else None + ), + access_token=access_token.access_token, + exp=int( + ( + access_token.created_at.replace(tzinfo=timezone.utc) + + timedelta(seconds=access_token.expires_in) + ).timestamp() + ), + ) + + logger.info( + "Authenticated with access token from client_id=" + f"{access_token_info.client_id} scopes={access_token.scope}" + ) + + return access_token_info + + +async def enforce_access_token( + request: Request, + db_session: AsyncSession = Depends(get_db_session), +) -> AccessTokenInfo: + maybe_access_token_info = await check_access_token(request, db_session) + if not maybe_access_token_info: + raise HTTPException(status_code=401, detail="access token required") + + return maybe_access_token_info + + +@router.post("/revoke_token") +async def indieauth_revocation_endpoint( + request: Request, + token: str = Form(), + db_session: AsyncSession = Depends(get_db_session), +) -> JSONResponse: + + is_token_valid, token_info = await _check_access_token(db_session, token) + if is_token_valid: + if not token_info: + raise ValueError("Should never happen") + + token_info.is_revoked = True + await db_session.commit() + + return JSONResponse( + content={}, + status_code=200, + ) + + +@router.post("/token_introspection") +async def oauth_introspection_endpoint( + request: Request, + credentials: HTTPBasicCredentials = Depends(basic_auth), + db_session: AsyncSession = Depends(get_db_session), + token: str = Form(), +) -> JSONResponse: + registered_client = ( + await db_session.scalars( + select(models.OAuthClient).where( + models.OAuthClient.client_id == credentials.username, + models.OAuthClient.client_secret == credentials.password, + ) + ) + ).one_or_none() + if not registered_client: + raise HTTPException(status_code=401, detail="unauthenticated") + + access_token = ( + await db_session.scalars( + select(models.IndieAuthAccessToken) + .where(models.IndieAuthAccessToken.access_token == token) + .join( + models.IndieAuthAuthorizationRequest, + models.IndieAuthAccessToken.indieauth_authorization_request_id + == models.IndieAuthAuthorizationRequest.id, + ) + .where( + models.IndieAuthAuthorizationRequest.client_id == credentials.username + ) + ) + ).one_or_none() + if not access_token: + return JSONResponse(content={"active": False}) + + is_token_valid, _ = await _check_access_token(db_session, token) + if not is_token_valid: + return JSONResponse(content={"active": False}) + + return JSONResponse( + content={ + "active": True, + "client_id": credentials.username, + "scope": access_token.scope, + "exp": int( + ( + access_token.created_at.replace(tzinfo=timezone.utc) + + timedelta(seconds=access_token.expires_in) + ).timestamp() + ), + }, + status_code=200, + ) diff --git a/app/key.py b/app/key.py new file mode 100644 index 0000000..429446d --- /dev/null +++ b/app/key.py @@ -0,0 +1,75 @@ +import base64 +from pathlib import Path +from typing import Any + +from Crypto.PublicKey import RSA +from Crypto.Util import number + + +def generate_key(key_path: Path) -> None: + if key_path.exists(): + raise ValueError(f"Key at {key_path} already exists") + k = RSA.generate(2048) + privkey_pem = k.exportKey("PEM").decode("utf-8") + key_path.write_text(privkey_pem) + + +def get_pubkey_as_pem(key_path: Path) -> str: + text = key_path.read_text() + return RSA.import_key(text).public_key().export_key("PEM").decode("utf-8") + + +class Key(object): + DEFAULT_KEY_SIZE = 2048 + + def __init__(self, owner: str, id_: str | None = None) -> None: + self.owner = owner + self.privkey_pem: str | None = None + self.pubkey_pem: str | None = None + self.privkey: RSA.RsaKey | None = None + self.pubkey: RSA.RsaKey | None = None + self.id_ = id_ + + def load_pub(self, pubkey_pem: str) -> None: + self.pubkey_pem = pubkey_pem + self.pubkey = RSA.importKey(pubkey_pem) + + def load(self, privkey_pem: str) -> None: + self.privkey_pem = privkey_pem + self.privkey = RSA.importKey(self.privkey_pem) + self.pubkey_pem = self.privkey.publickey().exportKey("PEM").decode("utf-8") + + def new(self) -> None: + k = RSA.generate(self.DEFAULT_KEY_SIZE) + self.privkey_pem = k.exportKey("PEM").decode("utf-8") + self.pubkey_pem = k.publickey().exportKey("PEM").decode("utf-8") + self.privkey = k + + def key_id(self) -> str: + return self.id_ or f"{self.owner}#main-key" + + def to_dict(self) -> dict[str, Any]: + return { + "id": self.key_id(), + "owner": self.owner, + "publicKeyPem": self.pubkey_pem, + "type": "Key", + } + + @classmethod + def from_dict(cls, data): + try: + k = cls(data["owner"], data["id"]) + k.load_pub(data["publicKeyPem"]) + except KeyError: + raise ValueError(f"bad key data {data!r}") + return k + + def to_magic_key(self) -> str: + mod = base64.urlsafe_b64encode( + number.long_to_bytes(self.privkey.n) # type: ignore + ).decode("utf-8") + pubexp = base64.urlsafe_b64encode( + number.long_to_bytes(self.privkey.e) # type: ignore + ).decode("utf-8") + return f"data:application/magic-public-key,RSA.{mod}.{pubexp}" diff --git a/app/ldsig.py b/app/ldsig.py new file mode 100644 index 0000000..2eea9de --- /dev/null +++ b/app/ldsig.py @@ -0,0 +1,98 @@ +import base64 +import hashlib +import typing +from datetime import datetime + +import pyld # type: ignore +from Crypto.Hash import SHA256 +from Crypto.Signature import PKCS1_v1_5 +from loguru import logger +from pyld import jsonld # type: ignore + +from app import activitypub as ap +from app.database import AsyncSession +from app.httpsig import _get_public_key + +if typing.TYPE_CHECKING: + from app.key import Key + + +requests_loader = pyld.documentloader.requests.requests_document_loader() + + +def _loader(url, options={}): + # See https://github.com/digitalbazaar/pyld/issues/133 + options["headers"]["Accept"] = "application/ld+json" + + # XXX: temp fix/hack is it seems to be down for now + if url == "https://w3id.org/identity/v1": + url = ( + "https://raw.githubusercontent.com/web-payments/web-payments.org" + "/master/contexts/identity-v1.jsonld" + ) + return requests_loader(url, options) + + +pyld.jsonld.set_document_loader(_loader) + + +def _options_hash(doc: ap.RawObject) -> str: + doc = dict(doc["signature"]) + for k in ["type", "id", "signatureValue"]: + if k in doc: + del doc[k] + doc["@context"] = "https://w3id.org/security/v1" + normalized = jsonld.normalize( + doc, {"algorithm": "URDNA2015", "format": "application/nquads"} + ) + h = hashlib.new("sha256") + h.update(normalized.encode("utf-8")) + return h.hexdigest() + + +def _doc_hash(doc: ap.RawObject) -> str: + doc = dict(doc) + if "signature" in doc: + del doc["signature"] + normalized = jsonld.normalize( + doc, {"algorithm": "URDNA2015", "format": "application/nquads"} + ) + h = hashlib.new("sha256") + h.update(normalized.encode("utf-8")) + return h.hexdigest() + + +async def verify_signature( + db_session: AsyncSession, + doc: ap.RawObject, +) -> bool: + if "signature" not in doc: + logger.warning("The object does contain a signature") + return False + + key_id = doc["signature"]["creator"] + key = await _get_public_key(db_session, key_id) + to_be_signed = _options_hash(doc) + _doc_hash(doc) + signature = doc["signature"]["signatureValue"] + signer = PKCS1_v1_5.new(key.pubkey or key.privkey) # type: ignore + digest = SHA256.new() + digest.update(to_be_signed.encode("utf-8")) + return signer.verify(digest, base64.b64decode(signature)) # type: ignore + + +def generate_signature(doc: ap.RawObject, key: "Key") -> None: + options = { + "type": "RsaSignature2017", + "creator": doc["actor"] + "#main-key", + "created": datetime.utcnow().replace(microsecond=0).isoformat() + "Z", + } + doc["signature"] = options + to_be_signed = _options_hash(doc) + _doc_hash(doc) + if not key.privkey: + raise ValueError(f"missing privkey on key {key!r}") + + signer = PKCS1_v1_5.new(key.privkey) + digest = SHA256.new() + digest.update(to_be_signed.encode("utf-8")) + sig = base64.b64encode(signer.sign(digest)) # type: ignore + options["signatureValue"] = sig.decode("utf-8") diff --git a/app/lookup.py b/app/lookup.py new file mode 100644 index 0000000..c1b6ef1 --- /dev/null +++ b/app/lookup.py @@ -0,0 +1,46 @@ +import mf2py # type: ignore + +from app import activitypub as ap +from app import webfinger +from app.actor import Actor +from app.actor import RemoteActor +from app.ap_object import RemoteObject +from app.database import AsyncSession +from app.source import _MENTION_REGEX + + +async def lookup(db_session: AsyncSession, query: str) -> Actor | RemoteObject: + query = query.strip() + if query.startswith("@") or _MENTION_REGEX.match("@" + query): + query = await webfinger.get_actor_url(query) # type: ignore # None check below + + if not query: + raise ap.NotAnObjectError(query) + + try: + ap_obj = await ap.fetch(query) + except ap.NotAnObjectError as not_an_object_error: + resp = not_an_object_error.resp + if not resp: + raise ap.NotAnObjectError(query) + + alternate_obj = None + if resp.headers.get("content-type", "").startswith("text/html"): + for alternate in mf2py.parse(doc=resp.text).get("alternates", []): + if alternate.get("type") == "application/activity+json": + alternate_obj = await ap.fetch(alternate["url"]) + + if alternate_obj: + ap_obj = alternate_obj + else: + raise + + if ap.as_list(ap_obj["type"])[0] in ap.ACTOR_TYPES: + return RemoteActor(ap_obj) + else: + # Some software return objects wrapped in a Create activity (like + # python-federation) + if ap.as_list(ap_obj["type"])[0] == "Create": + ap_obj = await ap.get_object(ap_obj) + + return await RemoteObject.from_raw_object(ap_obj) diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..103d15e --- /dev/null +++ b/app/main.py @@ -0,0 +1,1728 @@ +import base64 +import os +import sys +import time +from datetime import timezone +from io import BytesIO +from typing import Any +from typing import MutableMapping +from typing import Type + +import fastapi +import httpx +import starlette +from asgiref.typing import ASGI3Application +from asgiref.typing import ASGIReceiveCallable +from asgiref.typing import ASGISendCallable +from asgiref.typing import Scope +from cachetools import LFUCache +from fastapi import Depends +from fastapi import FastAPI +from fastapi import Form +from fastapi import Request +from fastapi import Response +from fastapi.exception_handlers import http_exception_handler +from fastapi.exceptions import HTTPException +from fastapi.responses import FileResponse +from fastapi.responses import PlainTextResponse +from fastapi.responses import RedirectResponse +from fastapi.responses import StreamingResponse +from fastapi.staticfiles import StaticFiles +from feedgen.feed import FeedGenerator # type: ignore +from loguru import logger +from PIL import Image +from sqlalchemy import func +from sqlalchemy import select +from sqlalchemy.orm import joinedload +from starlette.background import BackgroundTask +from starlette.datastructures import Headers +from starlette.datastructures import MutableHeaders +from starlette.exceptions import HTTPException as StarletteHTTPException +from starlette.responses import JSONResponse +from starlette.types import Message +from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware # type: ignore + +from app import activitypub as ap +from app import admin +from app import boxes +from app import config +from app import httpsig +from app import indieauth +from app import media +from app import micropub +from app import models +from app import templates +from app import webmentions +from app.actor import LOCAL_ACTOR +from app.actor import get_actors_metadata +from app.boxes import public_outbox_objects_count +from app.config import BASE_URL +from app.config import DEBUG +from app.config import DOMAIN +from app.config import ID +from app.config import USER_AGENT +from app.config import USERNAME +from app.config import WEBFINGER_DOMAIN +from app.config import is_activitypub_requested +from app.config import verify_csrf_token +from app.customization import get_custom_router +from app.database import AsyncSession +from app.database import async_session +from app.database import get_db_session +from app.incoming_activities import new_ap_incoming_activity +from app.templates import is_current_user_admin +from app.uploads import UPLOAD_DIR +from app.utils import pagination +from app.utils.emoji import EMOJIS_BY_NAME +from app.utils.facepile import Face +from app.utils.facepile import WebmentionReply +from app.utils.facepile import merge_faces +from app.utils.highlight import HIGHLIGHT_CSS_HASH +from app.utils.url import check_url +from app.webfinger import get_remote_follow_template + +# Only images <1MB will be cached, so 32MB of data will be cached +_RESIZED_CACHE: MutableMapping[tuple[str, int], tuple[bytes, str, Any]] = LFUCache(32) + + +# TODO(ts): +# Next: +# - self-destruct + move support and actions/tasks for +# - doc for prune/move/delete +# - fix issue with followers from a blocked server (skip it?) +# - allow to share old notes +# - only show 10 most recent threads in DMs +# - prevent double accept/double follow +# - UI support for updating posts +# - indieauth tweaks +# - support update post with history? + + +class CustomMiddleware: + """Raw ASGI middleware as using starlette base middleware causes issues + with both: + - Jinja2: https://github.com/encode/starlette/issues/472 + - async SQLAchemy: https://github.com/tiangolo/fastapi/issues/4719 + """ + + def __init__( + self, + app: ASGI3Application, + ) -> None: + self.app = app + + async def __call__( + self, scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable + ) -> None: + # We only care about HTTP requests + if scope["type"] != "http": + await self.app(scope, receive, send) + return + + response_details = {"status_code": None} + start_time = time.perf_counter() + request_id = os.urandom(8).hex() + + async def send_wrapper(message: Message) -> None: + if message["type"] == "http.response.start": + + # Extract the HTTP response status code + response_details["status_code"] = message["status"] + + # And add the security headers + headers = MutableHeaders(scope=message) + headers["X-Request-ID"] = request_id + headers["x-powered-by"] = "microblogpub" + headers[ + "referrer-policy" + ] = "no-referrer, strict-origin-when-cross-origin" + headers["x-content-type-options"] = "nosniff" + headers["x-xss-protection"] = "1; mode=block" + headers["x-frame-options"] = "DENY" + headers["permissions-policy"] = "interest-cohort=()" + headers["content-security-policy"] = ( + ( + f"default-src 'self'; " + f"style-src 'self' 'sha256-{HIGHLIGHT_CSS_HASH}'; " + f"frame-ancestors 'none'; base-uri 'self'; form-action 'self';" + ) + if not config.CUSTOM_CONTENT_SECURITY_POLICY + else config.CUSTOM_CONTENT_SECURITY_POLICY.format( + HIGHLIGHT_CSS_HASH=HIGHLIGHT_CSS_HASH + ) + ) + if not DEBUG: + headers["strict-transport-security"] = "max-age=63072000;" + + await send(message) # type: ignore + + # Make loguru ouput the request ID on every log statement within + # the request + with logger.contextualize(request_id=request_id): + client_host, client_port = scope["client"] # type: ignore + scheme = scope["scheme"] + server_host, server_port = scope["server"] # type: ignore + request_method = scope["method"] + request_path = scope["path"] + headers = Headers(raw=scope["headers"]) # type: ignore + user_agent = headers.get("user-agent") + logger.info( + f"{client_host}:{client_port} - " + f"{request_method} " + f"{scheme}://{server_host}:{server_port}{request_path} - " + f'"{user_agent}"' + ) + try: + await self.app(scope, receive, send_wrapper) # type: ignore + finally: + elapsed_time = time.perf_counter() - start_time + logger.info( + f"status_code={response_details['status_code']} " + f"{elapsed_time=:.2f}s" + ) + + return None + + +def _check_0rtt_early_data(request: Request) -> None: + """Disable TLS1.3 0-RTT requests for non-GET.""" + if request.headers.get("Early-Data", None) == "1" and request.method != "GET": + raise fastapi.HTTPException(status_code=425, detail="Too early") + + +app = FastAPI( + docs_url=None, redoc_url=None, dependencies=[Depends(_check_0rtt_early_data)] +) +app.mount( + "/custom_emoji", + StaticFiles(directory="data/custom_emoji"), + name="custom_emoji", +) +app.mount("/static", StaticFiles(directory="app/static"), name="static") +app.include_router(admin.router, prefix="/admin") +app.include_router(admin.unauthenticated_router, prefix="/admin") +app.include_router(indieauth.router) +app.include_router(micropub.router) +app.include_router(webmentions.router) +config.load_custom_routes() +if custom_router := get_custom_router(): + app.include_router(custom_router) + +# XXX: order matters, the proxy middleware needs to be last +app.add_middleware(CustomMiddleware) +app.add_middleware(ProxyHeadersMiddleware, trusted_hosts=config.CONFIG.trusted_hosts) + +logger.configure(extra={"request_id": "no_req_id"}) +logger.remove() +logger_format = ( + "{time:YYYY-MM-DD HH:mm:ss.SSS} | " + "{level: <8} | " + "{name}:{function}:{line} | " + "{extra[request_id]} - {message}" +) +logger.add(sys.stdout, format=logger_format, level="DEBUG" if DEBUG else "INFO") + + +@app.exception_handler(StarletteHTTPException) +async def custom_http_exception_handler( + request: Request, + exc: StarletteHTTPException, +) -> templates.TemplateResponse | JSONResponse: + accept_value = request.headers.get("accept") + if ( + accept_value + and accept_value.startswith("text/html") + and 400 <= exc.status_code < 600 + ): + async with async_session() as db_session: + title = ( + { + 404: "Oops, nothing to see here", + 500: "Oops, something went wrong", + } + ).get(exc.status_code, exc.detail) + try: + return await templates.render_template( + db_session, + request, + "error.html", + {"title": title}, + status_code=exc.status_code, + ) + finally: + await db_session.close() + return await http_exception_handler(request, exc) + + +class ActivityPubResponse(JSONResponse): + media_type = "application/activity+json" + + +async def redirect_to_remote_instance( + request: Request, + db_session: AsyncSession, + url: str, +) -> templates.TemplateResponse: + """ + Similar to RedirectResponse, but uses a 200 response with HTML. + + Needed for remote redirects on form submission endpoints, + since our CSP policy disallows remote form submission. + https://github.com/w3c/webappsec-csp/issues/8#issuecomment-810108984 + """ + return await templates.render_template( + db_session, + request, + "redirect_to_remote_instance.html", + { + "request": request, + "url": url, + }, + headers={"Refresh": "0;url=" + url}, + ) + + +@app.get(config.NavBarItems.NOTES_PATH) +async def index( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + page: int | None = None, +) -> templates.TemplateResponse | ActivityPubResponse: + if is_activitypub_requested(request): + + return ActivityPubResponse(LOCAL_ACTOR.ap_actor) + + page = page or 1 + where = ( + models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, + models.OutboxObject.is_deleted.is_(False), + models.OutboxObject.is_hidden_from_homepage.is_(False), + models.OutboxObject.ap_type.in_(["Announce", "Note", "Video", "Question"]), + ) + q = select(models.OutboxObject).where(*where) + total_count = await db_session.scalar( + select(func.count(models.OutboxObject.id)).where(*where) + ) + page_size = 20 + page_offset = (page - 1) * page_size + + outbox_objects_result = await db_session.scalars( + q.options( + joinedload(models.OutboxObject.outbox_object_attachments).options( + joinedload(models.OutboxObjectAttachment.upload) + ), + joinedload(models.OutboxObject.relates_to_inbox_object).options( + joinedload(models.InboxObject.actor), + ), + joinedload(models.OutboxObject.relates_to_outbox_object).options( + joinedload(models.OutboxObject.outbox_object_attachments).options( + joinedload(models.OutboxObjectAttachment.upload) + ), + ), + ) + .order_by(models.OutboxObject.is_pinned.desc()) + .order_by(models.OutboxObject.ap_published_at.desc()) + .offset(page_offset) + .limit(page_size) + ) + outbox_objects = outbox_objects_result.unique().all() + + return await templates.render_template( + db_session, + request, + "index.html", + { + "request": request, + "objects": outbox_objects, + "current_page": page, + "has_next_page": page_offset + len(outbox_objects) < total_count, + "has_previous_page": page > 1, + }, + ) + + +@app.get("/articles") +async def articles( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), + page: int | None = None, +) -> templates.TemplateResponse | ActivityPubResponse: + # TODO: special ActivityPub collection for Article + + where = ( + models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, + models.OutboxObject.is_deleted.is_(False), + models.OutboxObject.is_hidden_from_homepage.is_(False), + models.OutboxObject.ap_type == "Article", + ) + q = select(models.OutboxObject).where(*where) + + outbox_objects_result = await db_session.scalars( + q.options( + joinedload(models.OutboxObject.outbox_object_attachments).options( + joinedload(models.OutboxObjectAttachment.upload) + ), + joinedload(models.OutboxObject.relates_to_inbox_object).options( + joinedload(models.InboxObject.actor), + ), + joinedload(models.OutboxObject.relates_to_outbox_object).options( + joinedload(models.OutboxObject.outbox_object_attachments).options( + joinedload(models.OutboxObjectAttachment.upload) + ), + ), + ).order_by(models.OutboxObject.ap_published_at.desc()) + ) + outbox_objects = outbox_objects_result.unique().all() + + return await templates.render_template( + db_session, + request, + "articles.html", + { + "request": request, + "objects": outbox_objects, + }, + ) + + +async def _build_followx_collection( + db_session: AsyncSession, + model_cls: Type[models.Following | models.Follower], + path: str, + page: bool | None, + next_cursor: str | None, +) -> ap.RawObject: + total_items = await db_session.scalar(select(func.count(model_cls.id))) + + if not page and not next_cursor: + return { + "@context": ap.AS_CTX, + "id": ID + path, + "first": ID + path + "?page=true", + "type": "OrderedCollection", + "totalItems": total_items, + } + + q = select(model_cls).order_by(model_cls.created_at.desc()) # type: ignore + if next_cursor: + q = q.where( + model_cls.created_at < pagination.decode_cursor(next_cursor) # type: ignore + ) + q = q.limit(20) + + items = [followx for followx in (await db_session.scalars(q)).all()] + next_cursor = None + if ( + items + and await db_session.scalar( + select(func.count(model_cls.id)).where( + model_cls.created_at < items[-1].created_at + ) + ) + > 0 + ): + next_cursor = pagination.encode_cursor(items[-1].created_at) + + collection_page = { + "@context": ap.AS_CTX, + "id": ( + ID + path + "?page=true" + if not next_cursor + else ID + path + f"?next_cursor={next_cursor}" + ), + "partOf": ID + path, + "type": "OrderedCollectionPage", + "orderedItems": [item.ap_actor_id for item in items], + } + if next_cursor: + collection_page["next"] = ID + path + f"?next_cursor={next_cursor}" + + return collection_page + + +async def _empty_followx_collection( + db_session: AsyncSession, + model_cls: Type[models.Following | models.Follower], + path: str, +) -> ap.RawObject: + total_items = await db_session.scalar(select(func.count(model_cls.id))) + return { + "@context": ap.AS_CTX, + "id": ID + path, + "type": "OrderedCollection", + "totalItems": total_items, + } + + +@app.get("/followers") +async def followers( + request: Request, + page: bool | None = None, + next_cursor: str | None = None, + prev_cursor: str | None = None, + db_session: AsyncSession = Depends(get_db_session), + _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse | templates.TemplateResponse: + if is_activitypub_requested(request): + maybe_access_token_info = await indieauth.check_access_token( + request, + db_session, + ) + + if config.HIDES_FOLLOWERS and not maybe_access_token_info: + return ActivityPubResponse( + await _empty_followx_collection( + db_session=db_session, + model_cls=models.Follower, + path="/followers", + ) + ) + else: + return ActivityPubResponse( + await _build_followx_collection( + db_session=db_session, + model_cls=models.Follower, + path="/followers", + page=page, + next_cursor=next_cursor, + ) + ) + + if config.HIDES_FOLLOWERS and not is_current_user_admin(request): + raise HTTPException(status_code=404) + + # We only show the most recent 20 followers on the public website + followers_result = await db_session.scalars( + select(models.Follower) + .options(joinedload(models.Follower.actor)) + .order_by(models.Follower.created_at.desc()) + .limit(20) + ) + followers = followers_result.unique().all() + + actors_metadata = {} + if is_current_user_admin(request): + actors_metadata = await get_actors_metadata( + db_session, + [f.actor for f in followers], + ) + + return await templates.render_template( + db_session, + request, + "followers.html", + { + "followers": followers, + "actors_metadata": actors_metadata, + }, + ) + + +@app.get("/following") +async def following( + request: Request, + page: bool | None = None, + next_cursor: str | None = None, + prev_cursor: str | None = None, + db_session: AsyncSession = Depends(get_db_session), + _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse | templates.TemplateResponse: + if is_activitypub_requested(request): + maybe_access_token_info = await indieauth.check_access_token( + request, + db_session, + ) + + if config.HIDES_FOLLOWING and not maybe_access_token_info: + return ActivityPubResponse( + await _empty_followx_collection( + db_session=db_session, + model_cls=models.Following, + path="/following", + ) + ) + else: + return ActivityPubResponse( + await _build_followx_collection( + db_session=db_session, + model_cls=models.Following, + path="/following", + page=page, + next_cursor=next_cursor, + ) + ) + + if config.HIDES_FOLLOWING and not is_current_user_admin(request): + raise HTTPException(status_code=404) + + # We only show the most recent 20 follows on the public website + following = ( + ( + await db_session.scalars( + select(models.Following) + .options(joinedload(models.Following.actor)) + .order_by(models.Following.created_at.desc()) + .limit(20) + ) + ) + .unique() + .all() + ) + + actors_metadata = {} + if is_current_user_admin(request): + actors_metadata = await get_actors_metadata( + db_session, + [f.actor for f in following], + ) + + return await templates.render_template( + db_session, + request, + "following.html", + { + "following": following, + "actors_metadata": actors_metadata, + }, + ) + + +@app.get("/outbox") +async def outbox( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse: + maybe_access_token_info = await indieauth.check_access_token( + request, + db_session, + ) + + # Default restrictions unless the request is authenticated with an access token + restricted_where = [ + models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, + models.OutboxObject.ap_type.in_(["Create", "Note", "Article", "Announce"]), + ] + + # By design, we only show the last 20 public activities in the oubox + outbox_objects = ( + await db_session.scalars( + select(models.OutboxObject) + .where( + models.OutboxObject.is_deleted.is_(False), + *([] if maybe_access_token_info else restricted_where), + ) + .order_by(models.OutboxObject.ap_published_at.desc()) + .limit(20) + ) + ).all() + + return ActivityPubResponse( + { + "@context": ap.AS_EXTENDED_CTX, + "id": f"{ID}/outbox", + "type": "OrderedCollection", + "totalItems": len(outbox_objects), + "orderedItems": [ + ap.remove_context(ap.wrap_object_if_needed(a.ap_object)) + for a in outbox_objects + ], + } + ) + + +@app.post("/outbox") +async def post_outbox( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + access_token_info: indieauth.AccessTokenInfo = Depends( + indieauth.enforce_access_token + ), +) -> ActivityPubResponse: + payload = await request.json() + logger.info(f"{payload=}") + + if payload.get("type") == "Create": + assert payload["actor"] == ID + obj = payload["object"] + + to_and_cc = obj.get("to", []) + obj.get("cc", []) + if ap.AS_PUBLIC in obj.get("to", []) and ID + "/followers" in to_and_cc: + visibility = ap.VisibilityEnum.PUBLIC + elif ap.AS_PUBLIC in to_and_cc and ID + "/followers" in to_and_cc: + visibility = ap.VisibilityEnum.UNLISTED + else: + visibility = ap.VisibilityEnum.DIRECT + + object_id, outbox_object = await boxes.send_create( + db_session, + ap_type=obj["type"], + source=obj["content"], + uploads=[], + in_reply_to=obj.get("inReplyTo"), + visibility=visibility, + content_warning=obj.get("summary"), + is_sensitive=obj.get("sensitive", False), + ) + else: + raise ValueError("TODO") + + return ActivityPubResponse( + outbox_object.ap_object, + status_code=201, + headers={"Location": boxes.outbox_object_id(object_id)}, + ) + + +@app.get("/featured") +async def featured( + db_session: AsyncSession = Depends(get_db_session), + _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse: + outbox_objects = ( + await db_session.scalars( + select(models.OutboxObject) + .filter( + models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, + models.OutboxObject.is_deleted.is_(False), + models.OutboxObject.is_pinned.is_(True), + ) + .order_by(models.OutboxObject.ap_published_at.desc()) + .limit(5) + ) + ).all() + return ActivityPubResponse( + { + "@context": ap.AS_EXTENDED_CTX, + "id": f"{ID}/featured", + "type": "OrderedCollection", + "totalItems": len(outbox_objects), + "orderedItems": [ap.remove_context(a.ap_object) for a in outbox_objects], + } + ) + + +async def _check_outbox_object_acl( + request: Request, + db_session: AsyncSession, + ap_object: models.OutboxObject, + httpsig_info: httpsig.HTTPSigInfo, +) -> None: + if templates.is_current_user_admin(request): + return None + + maybe_access_token_info = await indieauth.check_access_token( + request, + db_session, + ) + if maybe_access_token_info: + # TODO: check scopes + return None + + if ap_object.visibility in [ + ap.VisibilityEnum.PUBLIC, + ap.VisibilityEnum.UNLISTED, + ]: + return None + + elif ap_object.visibility == ap.VisibilityEnum.FOLLOWERS_ONLY: + # Is the signing actor a follower? + followers = await boxes.fetch_actor_collection( + db_session, BASE_URL + "/followers" + ) + if httpsig_info.signed_by_ap_actor_id in [actor.ap_id for actor in followers]: + return None + + elif ap_object.visibility == ap.VisibilityEnum.DIRECT: + # Is the signing actor targeted in the object audience? + audience = ap_object.ap_object.get("to", []) + ap_object.ap_object.get("cc", []) + if httpsig_info.signed_by_ap_actor_id in audience: + return None + + raise HTTPException(status_code=404) + + +async def _fetch_likes( + db_session: AsyncSession, + outbox_object: models.OutboxObject, +) -> list[models.InboxObject]: + return ( + ( + await db_session.scalars( + select(models.InboxObject) + .where( + models.InboxObject.ap_type == "Like", + models.InboxObject.activity_object_ap_id == outbox_object.ap_id, + models.InboxObject.is_deleted.is_(False), + ) + .options(joinedload(models.InboxObject.actor)) + .order_by(models.InboxObject.ap_published_at.desc()) + .limit(10) + ) + ) + .unique() + .all() + ) + + +async def _fetch_shares( + db_session: AsyncSession, + outbox_object: models.OutboxObject, +) -> list[models.InboxObject]: + return ( + ( + await db_session.scalars( + select(models.InboxObject) + .filter( + models.InboxObject.ap_type == "Announce", + models.InboxObject.activity_object_ap_id == outbox_object.ap_id, + models.InboxObject.is_deleted.is_(False), + ) + .options(joinedload(models.InboxObject.actor)) + .order_by(models.InboxObject.ap_published_at.desc()) + .limit(10) + ) + ) + .unique() + .all() + ) + + +async def _fetch_webmentions( + db_session: AsyncSession, + outbox_object: models.OutboxObject, +) -> list[models.Webmention]: + return ( + await db_session.scalars( + select(models.Webmention) + .filter( + models.Webmention.outbox_object_id == outbox_object.id, + models.Webmention.is_deleted.is_(False), + ) + .limit(50) + ) + ).all() + + +@app.get("/o/{public_id}") +async def outbox_by_public_id( + public_id: str, + request: Request, + db_session: AsyncSession = Depends(get_db_session), + httpsig_info: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse | templates.TemplateResponse | RedirectResponse: + maybe_object = ( + ( + await db_session.execute( + select(models.OutboxObject) + .options( + joinedload(models.OutboxObject.outbox_object_attachments).options( + joinedload(models.OutboxObjectAttachment.upload) + ) + ) + .where( + models.OutboxObject.public_id == public_id, + models.OutboxObject.is_deleted.is_(False), + ) + ) + ) + .unique() + .scalar_one_or_none() + ) + if not maybe_object: + raise HTTPException(status_code=404) + + await _check_outbox_object_acl(request, db_session, maybe_object, httpsig_info) + + if is_activitypub_requested(request): + return ActivityPubResponse(maybe_object.ap_object) + + if maybe_object.ap_type == "Article": + return RedirectResponse( + f"{BASE_URL}/articles/{public_id[:7]}/{maybe_object.slug}", + status_code=301, + ) + + replies_tree = await boxes.get_replies_tree( + db_session, + maybe_object, + is_current_user_admin=is_current_user_admin(request), + ) + + webmentions = await _fetch_webmentions(db_session, maybe_object) + likes = await _fetch_likes(db_session, maybe_object) + shares = await _fetch_shares(db_session, maybe_object) + return await templates.render_template( + db_session, + request, + "object.html", + { + "replies_tree": _merge_replies(replies_tree, webmentions), + "outbox_object": maybe_object, + "likes": _merge_faces_from_inbox_object_and_webmentions( + likes, + webmentions, + models.WebmentionType.LIKE, + ), + "shares": _merge_faces_from_inbox_object_and_webmentions( + shares, + webmentions, + models.WebmentionType.REPOST, + ), + "webmentions": _filter_webmentions(webmentions), + }, + ) + + +def _filter_webmentions( + webmentions: list[models.Webmention], +) -> list[models.Webmention]: + return [ + wm + for wm in webmentions + if wm.webmention_type + not in [ + models.WebmentionType.LIKE, + models.WebmentionType.REPOST, + models.WebmentionType.REPLY, + ] + ] + + +def _merge_faces_from_inbox_object_and_webmentions( + inbox_objects: list[models.InboxObject], + webmentions: list[models.Webmention], + webmention_type: models.WebmentionType, +) -> list[Face]: + wm_faces = [] + for wm in webmentions: + if wm.webmention_type != webmention_type: + continue + if face := Face.from_webmention(wm): + wm_faces.append(face) + + return merge_faces( + [Face.from_inbox_object(obj) for obj in inbox_objects] + wm_faces + ) + + +def _merge_replies( + reply_tree_node: boxes.ReplyTreeNode, + webmentions: list[models.Webmention], +) -> boxes.ReplyTreeNode: + # TODO: return None as we update the object in place + webmention_replies = [] + for wm in [ + wm for wm in webmentions if wm.webmention_type == models.WebmentionType.REPLY + ]: + if rep := WebmentionReply.from_webmention(wm): + webmention_replies.append( + boxes.ReplyTreeNode( + ap_object=None, + wm_reply=rep, + is_requested=False, + children=[], + ) + ) + + reply_tree_node.children = sorted( + reply_tree_node.children + webmention_replies, + key=lambda node: node.published_at, + reverse=True, + ) + return reply_tree_node + + +@app.get("/articles/{short_id}/{slug}") +async def article_by_slug( + short_id: str, + slug: str, + request: Request, + db_session: AsyncSession = Depends(get_db_session), + httpsig_info: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse | templates.TemplateResponse | RedirectResponse: + maybe_object = await boxes.get_outbox_object_by_slug_and_short_id( + db_session, slug, short_id + ) + if not maybe_object: + raise HTTPException(status_code=404) + + await _check_outbox_object_acl(request, db_session, maybe_object, httpsig_info) + + if is_activitypub_requested(request): + return ActivityPubResponse(maybe_object.ap_object) + + replies_tree = await boxes.get_replies_tree( + db_session, + maybe_object, + is_current_user_admin=is_current_user_admin(request), + ) + + likes = await _fetch_likes(db_session, maybe_object) + shares = await _fetch_shares(db_session, maybe_object) + webmentions = await _fetch_webmentions(db_session, maybe_object) + return await templates.render_template( + db_session, + request, + "object.html", + { + "replies_tree": _merge_replies(replies_tree, webmentions), + "outbox_object": maybe_object, + "likes": _merge_faces_from_inbox_object_and_webmentions( + likes, + webmentions, + models.WebmentionType.LIKE, + ), + "shares": _merge_faces_from_inbox_object_and_webmentions( + shares, + webmentions, + models.WebmentionType.REPOST, + ), + "webmentions": _filter_webmentions(webmentions), + }, + ) + + +@app.get("/o/{public_id}/activity") +async def outbox_activity_by_public_id( + public_id: str, + request: Request, + db_session: AsyncSession = Depends(get_db_session), + httpsig_info: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse: + maybe_object = ( + await db_session.execute( + select(models.OutboxObject).where( + models.OutboxObject.public_id == public_id, + models.OutboxObject.is_deleted.is_(False), + ) + ) + ).scalar_one_or_none() + if not maybe_object: + raise HTTPException(status_code=404) + + await _check_outbox_object_acl(request, db_session, maybe_object, httpsig_info) + + return ActivityPubResponse(ap.wrap_object(maybe_object.ap_object)) + + +@app.get("/t/{tag}") +async def tag_by_name( + tag: str, + request: Request, + db_session: AsyncSession = Depends(get_db_session), + _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), +) -> ActivityPubResponse | templates.TemplateResponse: + where = [ + models.TaggedOutboxObject.tag == tag.lower(), + models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, + models.OutboxObject.is_deleted.is_(False), + ] + tagged_count = await db_session.scalar( + select(func.count(models.OutboxObject.id)) + .join(models.TaggedOutboxObject) + .where(*where) + ) + if is_activitypub_requested(request): + if not tagged_count: + raise HTTPException(status_code=404) + + outbox_object_ids = await db_session.execute( + select(models.OutboxObject.ap_id) + .join( + models.TaggedOutboxObject, + models.TaggedOutboxObject.outbox_object_id == models.OutboxObject.id, + ) + .where(*where) + .order_by(models.OutboxObject.ap_published_at.desc()) + .limit(20) + ) + return ActivityPubResponse( + { + "@context": ap.AS_CTX, + "id": BASE_URL + f"/t/{tag.lower()}", + "type": "OrderedCollection", + "totalItems": tagged_count, + "orderedItems": [ + outbox_object.ap_id for outbox_object in outbox_object_ids + ], + } + ) + + outbox_objects_result = await db_session.scalars( + select(models.OutboxObject) + .where(*where) + .join( + models.TaggedOutboxObject, + models.TaggedOutboxObject.outbox_object_id == models.OutboxObject.id, + ) + .options( + joinedload(models.OutboxObject.outbox_object_attachments).options( + joinedload(models.OutboxObjectAttachment.upload) + ) + ) + .order_by(models.OutboxObject.ap_published_at.desc()) + .limit(20) + ) + outbox_objects = outbox_objects_result.unique().all() + + return await templates.render_template( + db_session, + request, + "index.html", + { + "request": request, + "objects": outbox_objects, + }, + status_code=200 if len(outbox_objects) else 404, + ) + + +@app.get("/e/{name}") +def emoji_by_name(name: str) -> ActivityPubResponse: + try: + emoji = EMOJIS_BY_NAME[f":{name}:"] + except KeyError: + raise HTTPException(status_code=404) + + return ActivityPubResponse({"@context": ap.AS_EXTENDED_CTX, **emoji}) + + +@app.get("/inbox") +async def get_inbox( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + access_token_info: indieauth.AccessTokenInfo = Depends( + indieauth.enforce_access_token + ), + page: bool | None = None, + next_cursor: str | None = None, +) -> ActivityPubResponse: + where = [ + models.InboxObject.ap_type.in_( + ["Create", "Follow", "Like", "Announce", "Undo", "Update"] + ) + ] + total_items = await db_session.scalar( + select(func.count(models.InboxObject.id)).where(*where) + ) + + if not page and not next_cursor: + return ActivityPubResponse( + { + "@context": ap.AS_CTX, + "id": ID + "/inbox", + "first": ID + "/inbox?page=true", + "type": "OrderedCollection", + "totalItems": total_items, + } + ) + + q = ( + select(models.InboxObject) + .where(*where) + .order_by(models.InboxObject.created_at.desc()) + ) # type: ignore + if next_cursor: + q = q.where( + models.InboxObject.created_at + < pagination.decode_cursor(next_cursor) # type: ignore + ) + q = q.limit(20) + + items = [item for item in (await db_session.scalars(q)).all()] + next_cursor = None + if ( + items + and await db_session.scalar( + select(func.count(models.InboxObject.id)).where( + *where, models.InboxObject.created_at < items[-1].created_at + ) + ) + > 0 + ): + next_cursor = pagination.encode_cursor(items[-1].created_at) + + collection_page = { + "@context": ap.AS_CTX, + "id": ( + ID + "/inbox?page=true" + if not next_cursor + else ID + f"/inbox?next_cursor={next_cursor}" + ), + "partOf": ID + "/inbox", + "type": "OrderedCollectionPage", + "orderedItems": [item.ap_object for item in items], + } + if next_cursor: + collection_page["next"] = ID + f"/inbox?next_cursor={next_cursor}" + + return ActivityPubResponse(collection_page) + + +@app.post("/inbox") +async def inbox( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + httpsig_info: httpsig.HTTPSigInfo = Depends(httpsig.enforce_httpsig), +) -> Response: + # logger.info(f"headers={request.headers}") + payload = await request.json() + logger.info(f"{payload=}") + await new_ap_incoming_activity(db_session, httpsig_info, payload) + return Response(status_code=202) + + +@app.get("/remote_follow") +async def get_remote_follow( + request: Request, + db_session: AsyncSession = Depends(get_db_session), +) -> templates.TemplateResponse: + return await templates.render_template( + db_session, + request, + "remote_follow.html", + {}, + ) + + +@app.post("/remote_follow") +async def post_remote_follow( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + csrf_check: None = Depends(verify_csrf_token), + profile: str = Form(), +) -> templates.TemplateResponse: + if not profile.startswith("@"): + profile = f"@{profile}" + + remote_follow_template = await get_remote_follow_template(profile) + if not remote_follow_template: + # TODO(ts): error message to user + raise HTTPException(status_code=404) + + return await redirect_to_remote_instance( + request, + db_session, + remote_follow_template.format(uri=ID), + ) + + +@app.get("/remote_interaction") +async def remote_interaction( + request: Request, + ap_id: str, + db_session: AsyncSession = Depends(get_db_session), +) -> templates.TemplateResponse: + outbox_object = await boxes.get_outbox_object_by_ap_id( + db_session, + ap_id, + ) + if not outbox_object: + raise HTTPException(status_code=404) + + return await templates.render_template( + db_session, + request, + "remote_interact.html", + {"outbox_object": outbox_object}, + ) + + +@app.post("/remote_interaction") +async def post_remote_interaction( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + csrf_check: None = Depends(verify_csrf_token), + profile: str = Form(), + ap_id: str = Form(), +) -> templates.TemplateResponse: + if not profile.startswith("@"): + profile = f"@{profile}" + + remote_follow_template = await get_remote_follow_template(profile) + if not remote_follow_template: + # TODO(ts): error message to user + raise HTTPException(status_code=404) + + return await redirect_to_remote_instance( + request, + db_session, + remote_follow_template.format(uri=ID), + ) + + +@app.get("/.well-known/webfinger") +async def wellknown_webfinger(resource: str) -> JSONResponse: + """Exposes/servers WebFinger data.""" + if resource not in [ + f"acct:{USERNAME}@{WEBFINGER_DOMAIN}", + ID, + f"acct:{USERNAME}@{DOMAIN}", + ]: + logger.info(f"Got invalid req for {resource}") + raise HTTPException(status_code=404) + + out = { + "subject": f"acct:{USERNAME}@{WEBFINGER_DOMAIN}", + "aliases": [ID], + "links": [ + { + "rel": "http://webfinger.net/rel/profile-page", + "type": "text/html", + "href": ID + "/", + }, + {"rel": "self", "type": "application/activity+json", "href": ID}, + { + "rel": "http://ostatus.org/schema/1.0/subscribe", + "template": BASE_URL + "/admin/lookup?query={uri}", + }, + ], + } + + return JSONResponse( + out, + media_type="application/jrd+json; charset=utf-8", + headers={"Access-Control-Allow-Origin": "*"}, + ) + + +@app.get("/.well-known/nodeinfo") +async def well_known_nodeinfo() -> dict[str, Any]: + return { + "links": [ + { + "rel": "http://nodeinfo.diaspora.software/ns/schema/2.1", + "href": f"{BASE_URL}/nodeinfo", + } + ] + } + + +@app.get("/nodeinfo") +async def nodeinfo( + db_session: AsyncSession = Depends(get_db_session), +): + local_posts = await public_outbox_objects_count(db_session) + return JSONResponse( + { + "version": "2.1", + "software": { + "name": "microblogpub", + "version": config.VERSION, + "repository": "https://sr.ht/~tsileo/microblog.pub", + "homepage": "https://docs.microblog.pub", + }, + "protocols": ["activitypub"], + "services": {"inbound": [], "outbound": []}, + "openRegistrations": False, + "usage": {"users": {"total": 1}, "localPosts": local_posts}, + "metadata": { + "nodeName": LOCAL_ACTOR.handle, + }, + }, + media_type=( + "application/json; " + "profile=http://nodeinfo.diaspora.software/ns/schema/2.1#" + ), + ) + + +async def _proxy_get( + proxy_client: httpx.AsyncClient, + request: starlette.requests.Request, + url: str, + stream: bool, +) -> httpx.Response: + # Request the URL (and filter request headers) + proxy_req = proxy_client.build_request( + request.method, + url, + headers=[ + (k, v) + for (k, v) in request.headers.raw + if k.lower() + not in [ + b"host", + b"cookie", + b"x-forwarded-for", + b"x-forwarded-proto", + b"x-real-ip", + b"user-agent", + ] + ] + + [(b"user-agent", USER_AGENT.encode())], + ) + return await proxy_client.send(proxy_req, stream=stream) + + +def _filter_proxy_resp_headers( + proxy_resp: httpx.Response, + allowed_headers: list[str], +) -> dict[str, str]: + return { + k: v for (k, v) in proxy_resp.headers.items() if k.lower() in allowed_headers + } + + +def _strip_content_type(headers: dict[str, str]) -> dict[str, str]: + return {k: v for k, v in headers.items() if k.lower() != "content-type"} + + +def _add_cache_control(headers: dict[str, str]) -> dict[str, str]: + return {**headers, "Cache-Control": "max-age=31536000"} + + +@app.get("/proxy/media/{exp}/{sig}/{encoded_url}") +async def serve_proxy_media( + request: Request, + exp: int, + sig: str, + encoded_url: str, + background_tasks: fastapi.BackgroundTasks, +) -> StreamingResponse | PlainTextResponse: + # Decode the base64-encoded URL + url = base64.urlsafe_b64decode(encoded_url).decode() + check_url(url) + media.verify_proxied_media_sig(exp, url, sig) + + proxy_client = httpx.AsyncClient( + follow_redirects=True, + timeout=httpx.Timeout(timeout=10.0), + transport=httpx.AsyncHTTPTransport(retries=1), + ) + + async def _close_proxy_client(): + await proxy_client.aclose() + + background_tasks.add_task(_close_proxy_client) + proxy_resp = await _proxy_get(proxy_client, request, url, stream=True) + + if proxy_resp.status_code >= 300: + logger.info(f"failed to proxy {url}, got {proxy_resp.status_code}") + await proxy_resp.aclose() + return PlainTextResponse( + status_code=proxy_resp.status_code, + ) + + return StreamingResponse( + proxy_resp.aiter_raw(), + status_code=proxy_resp.status_code, + headers=_add_cache_control( + _filter_proxy_resp_headers( + proxy_resp, + [ + "content-encoding", + "content-length", + "content-type", + "content-range", + "accept-ranges", + "etag", + "expires", + "date", + "last-modified", + ], + ) + ), + background=BackgroundTask(proxy_resp.aclose), + ) + + +@app.get("/proxy/media/{exp}/{sig}/{encoded_url}/{size}") +async def serve_proxy_media_resized( + request: Request, + exp: int, + sig: str, + encoded_url: str, + size: int, + background_tasks: fastapi.BackgroundTasks, +) -> PlainTextResponse: + if size not in {50, 740}: + raise ValueError("Unsupported size") + + is_webp_supported = "image/webp" in request.headers.get("accept") + + # Decode the base64-encoded URL + url = base64.urlsafe_b64decode(encoded_url).decode() + check_url(url) + media.verify_proxied_media_sig(exp, url, sig) + + if (cached_resp := _RESIZED_CACHE.get((url, size))) and is_webp_supported: + resized_content, resized_mimetype, resp_headers = cached_resp + return PlainTextResponse( + resized_content, + media_type=resized_mimetype, + headers=resp_headers, + ) + + proxy_client = httpx.AsyncClient( + follow_redirects=True, + timeout=httpx.Timeout(timeout=10.0), + transport=httpx.AsyncHTTPTransport(retries=1), + ) + + async def _close_proxy_client(): + await proxy_client.aclose() + + background_tasks.add_task(_close_proxy_client) + proxy_resp = await _proxy_get(proxy_client, request, url, stream=False) + if proxy_resp.status_code >= 300: + logger.info(f"failed to proxy {url}, got {proxy_resp.status_code}") + await proxy_resp.aclose() + return PlainTextResponse( + status_code=proxy_resp.status_code, + ) + + # Filter the headers + proxy_resp_headers = _add_cache_control( + _filter_proxy_resp_headers( + proxy_resp, + [ + "content-type", + "etag", + "expires", + "last-modified", + ], + ) + ) + + try: + out = BytesIO(proxy_resp.content) + i = Image.open(out) + if getattr(i, "is_animated", False): + raise ValueError + i.thumbnail((size, size)) + is_webp = False + try: + resized_buf = BytesIO() + i.save(resized_buf, format="webp" if is_webp_supported else i.format) + is_webp = is_webp_supported + except Exception: + logger.exception("Failed to create thumbnail") + resized_buf = BytesIO() + i.save(resized_buf, format=i.format) + resized_buf.seek(0) + resized_content = resized_buf.read() + resized_mimetype = ( + "image/webp" if is_webp else i.get_format_mimetype() # type: ignore + ) + # Only cache images < 1MB + if len(resized_content) < 2**20: + _RESIZED_CACHE[(url, size)] = ( + resized_content, + resized_mimetype, + _strip_content_type(proxy_resp_headers), + ) + return PlainTextResponse( + resized_content, + media_type=resized_mimetype, + headers=_strip_content_type(proxy_resp_headers), + ) + except ValueError: + return PlainTextResponse( + proxy_resp.content, + headers=proxy_resp_headers, + ) + except Exception: + logger.exception(f"Failed to resize {url} on the fly") + return PlainTextResponse( + proxy_resp.content, + headers=proxy_resp_headers, + ) + + +@app.get("/attachments/{content_hash}/{filename}") +async def serve_attachment( + content_hash: str, + filename: str, + db_session: AsyncSession = Depends(get_db_session), +): + upload = ( + await db_session.execute( + select(models.Upload).where( + models.Upload.content_hash == content_hash, + ) + ) + ).scalar_one_or_none() + if not upload: + raise HTTPException(status_code=404) + + return FileResponse( + UPLOAD_DIR / content_hash, + media_type=upload.content_type, + headers={"Cache-Control": "max-age=31536000"}, + ) + + +@app.get("/attachments/thumbnails/{content_hash}/{filename}") +async def serve_attachment_thumbnail( + request: Request, + content_hash: str, + filename: str, + db_session: AsyncSession = Depends(get_db_session), +): + upload = ( + await db_session.execute( + select(models.Upload).where( + models.Upload.content_hash == content_hash, + ) + ) + ).scalar_one_or_none() + if not upload or not upload.has_thumbnail: + raise HTTPException(status_code=404) + + is_webp_supported = "image/webp" in request.headers.get("accept") + + if is_webp_supported: + return FileResponse( + UPLOAD_DIR / (content_hash + "_resized"), + media_type="image/webp", + headers={"Cache-Control": "max-age=31536000"}, + ) + else: + return FileResponse( + UPLOAD_DIR / content_hash, + media_type=upload.content_type, + headers={"Cache-Control": "max-age=31536000"}, + ) + + +@app.get("/robots.txt", response_class=PlainTextResponse) +async def robots_file(): + return """User-agent: * +Disallow: /followers +Disallow: /following +Disallow: /admin +Disallow: /remote_interaction +Disallow: /remote_follow""" + + +async def _get_outbox_for_feed(db_session: AsyncSession) -> list[models.OutboxObject]: + return ( + ( + await db_session.scalars( + select(models.OutboxObject) + .where( + models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, + models.OutboxObject.is_deleted.is_(False), + models.OutboxObject.ap_type.in_(["Note", "Article", "Video"]), + ) + .options( + joinedload(models.OutboxObject.outbox_object_attachments).options( + joinedload(models.OutboxObjectAttachment.upload) + ) + ) + .order_by(models.OutboxObject.ap_published_at.desc()) + .limit(20) + ) + ) + .unique() + .all() + ) + + +@app.get("/feed.json") +async def json_feed( + db_session: AsyncSession = Depends(get_db_session), +) -> dict[str, Any]: + outbox_objects = await _get_outbox_for_feed(db_session) + data = [] + for outbox_object in outbox_objects: + if not outbox_object.ap_published_at: + raise ValueError(f"{outbox_object} has no published date") + data.append( + { + "id": outbox_object.public_id, + "url": outbox_object.url, + "content_html": outbox_object.content, + "content_text": outbox_object.source, + "date_published": outbox_object.ap_published_at.isoformat(), + "attachments": [ + {"url": a.url, "mime_type": a.media_type} + for a in outbox_object.attachments + ], + } + ) + result = { + "version": "https://jsonfeed.org/version/1.1", + "title": f"{LOCAL_ACTOR.display_name}'s microblog'", + "home_page_url": LOCAL_ACTOR.url, + "feed_url": BASE_URL + "/feed.json", + "authors": [ + { + "name": LOCAL_ACTOR.display_name, + "url": LOCAL_ACTOR.url, + } + ], + "items": data, + } + if LOCAL_ACTOR.icon_url: + result["authors"][0]["avatar"] = LOCAL_ACTOR.icon_url # type: ignore + return result + + +async def _gen_rss_feed( + db_session: AsyncSession, + is_rss: bool, +): + fg = FeedGenerator() + fg.id(BASE_URL + "/feed.rss") + fg.title(f"{LOCAL_ACTOR.display_name}'s microblog") + fg.description(f"{LOCAL_ACTOR.display_name}'s microblog") + fg.author({"name": LOCAL_ACTOR.display_name}) + fg.link(href=LOCAL_ACTOR.url, rel="alternate") + if LOCAL_ACTOR.icon_url: + fg.logo(LOCAL_ACTOR.icon_url) + fg.language("en") + + outbox_objects = await _get_outbox_for_feed(db_session) + for outbox_object in outbox_objects: + if not outbox_object.ap_published_at: + raise ValueError(f"{outbox_object} has no published date") + + content = outbox_object.content + if content is None: + raise ValueError("Should never happen") + + if outbox_object.attachments: + for attachment in outbox_object.attachments: + if attachment.type == "Image" or ( + attachment.media_type and attachment.media_type.startswith("image") + ): + content += f'' + # TODO(ts): other attachment types + + fe = fg.add_entry() + fe.id(outbox_object.url) + if outbox_object.name is not None: + fe.title(outbox_object.name) + elif not is_rss: # Atom feeds require a title + fe.title(outbox_object.url) + + fe.link(href=outbox_object.url) + fe.description(content) + fe.content(content) + fe.published(outbox_object.ap_published_at.replace(tzinfo=timezone.utc)) + + return fg + + +@app.get("/feed.rss") +async def rss_feed( + db_session: AsyncSession = Depends(get_db_session), +) -> PlainTextResponse: + return PlainTextResponse( + (await _gen_rss_feed(db_session, is_rss=True)).rss_str(), + headers={"Content-Type": "application/rss+xml"}, + ) + + +@app.get("/feed.atom") +async def atom_feed( + db_session: AsyncSession = Depends(get_db_session), +) -> PlainTextResponse: + return PlainTextResponse( + (await _gen_rss_feed(db_session, is_rss=False)).atom_str(), + headers={"Content-Type": "application/atom+xml"}, + ) diff --git a/app/media.py b/app/media.py new file mode 100644 index 0000000..287042a --- /dev/null +++ b/app/media.py @@ -0,0 +1,49 @@ +import base64 +import time + +from app.config import BASE_URL +from app.config import hmac_sha256 + +SUPPORTED_RESIZE = [50, 740] +EXPIRY_PERIOD = 86400 +EXPIRY_LENGTH = 7 + + +class InvalidProxySignatureError(Exception): + pass + + +def proxied_media_sig(expires: int, url: str) -> str: + hm = hmac_sha256() + hm.update(f"{expires}".encode()) + hm.update(b"|") + hm.update(url.encode()) + return base64.urlsafe_b64encode(hm.digest()).decode() + + +def verify_proxied_media_sig(expires: int, url: str, sig: str) -> None: + now = int(time.time() / EXPIRY_PERIOD) + expected = proxied_media_sig(expires, url) + if now > expires or sig != expected: + raise InvalidProxySignatureError("invalid or expired media") + + +def proxied_media_url(url: str) -> str: + if url.startswith(BASE_URL): + return url + expires = int(time.time() / EXPIRY_PERIOD) + EXPIRY_LENGTH + sig = proxied_media_sig(expires, url) + + return ( + BASE_URL + + f"/proxy/media/{expires}/{sig}/" + + base64.urlsafe_b64encode(url.encode()).decode() + ) + + +def resized_media_url(url: str, size: int) -> str: + if size not in SUPPORTED_RESIZE: + raise ValueError(f"Unsupported resize {size}") + if url.startswith(BASE_URL): + return url + return proxied_media_url(url) + f"/{size}" diff --git a/app/micropub.py b/app/micropub.py new file mode 100644 index 0000000..91692e6 --- /dev/null +++ b/app/micropub.py @@ -0,0 +1,168 @@ +from typing import Any + +from fastapi import APIRouter +from fastapi import Depends +from fastapi import Request +from fastapi.responses import JSONResponse +from fastapi.responses import RedirectResponse +from loguru import logger + +from app import activitypub as ap +from app.boxes import get_outbox_object_by_ap_id +from app.boxes import send_create +from app.boxes import send_delete +from app.database import AsyncSession +from app.database import get_db_session +from app.indieauth import AccessTokenInfo +from app.indieauth import verify_access_token + +router = APIRouter() + + +@router.get("/micropub") +async def micropub_endpoint( + request: Request, + access_token_info: AccessTokenInfo = Depends(verify_access_token), + db_session: AsyncSession = Depends(get_db_session), +) -> dict[str, Any] | JSONResponse: + if request.query_params.get("q") == "config": + return {} + + elif request.query_params.get("q") == "source": + url = request.query_params.get("url") + outbox_object = await get_outbox_object_by_ap_id(db_session, url) + if not outbox_object: + return JSONResponse( + content={ + "error": "invalid_request", + "error_description": "No post with this URL", + }, + status_code=400, + ) + + extra_props: dict[str, list[str]] = {} + + return { + "type": ["h-entry"], + "properties": { + "published": [ + outbox_object.ap_published_at.isoformat() # type: ignore + ], + "content": [outbox_object.source], + **extra_props, + }, + } + + return {} + + +def _prop_get(dat: dict[str, Any], key: str) -> str: + val = dat[key] + if isinstance(val, list): + return val[0] + else: + return val + + +@router.post("/micropub") +async def post_micropub_endpoint( + request: Request, + access_token_info: AccessTokenInfo = Depends(verify_access_token), + db_session: AsyncSession = Depends(get_db_session), +) -> RedirectResponse | JSONResponse: + form_data = await request.form() + is_json = False + if not form_data: + form_data = await request.json() + is_json = True + + insufficient_scope_resp = JSONResponse( + status_code=401, content={"error": "insufficient_scope"} + ) + + if "action" in form_data: + if form_data["action"] in ["delete", "update"]: + outbox_object = await get_outbox_object_by_ap_id( + db_session, form_data["url"] + ) + if not outbox_object: + return JSONResponse( + content={ + "error": "invalid_request", + "error_description": "No post with this URL", + }, + status_code=400, + ) + + if form_data["action"] == "delete": + if "delete" not in access_token_info.scopes: + return insufficient_scope_resp + logger.info(f"Deleting object {outbox_object.ap_id}") + await send_delete(db_session, outbox_object.ap_id) # type: ignore + return JSONResponse(content={}, status_code=200) + + elif form_data["action"] == "update": + if "update" not in access_token_info.scopes: + return insufficient_scope_resp + + # TODO(ts): support update + # "replace": {"content": ["new content"]} + + logger.info(f"Updating object {outbox_object.ap_id}: {form_data}") + return JSONResponse(content={}, status_code=200) + else: + raise ValueError("Should never happen") + else: + return JSONResponse( + content={ + "error": "invalid_request", + "error_description": f'Unsupported action: {form_data["action"]}', + }, + status_code=400, + ) + + if "create" not in access_token_info.scopes: + return insufficient_scope_resp + + if is_json: + entry_type = _prop_get(form_data, "type") # type: ignore + else: + h = "entry" + if "h" in form_data: + h = form_data["h"] + entry_type = f"h-{h}" + + logger.info(f"Creating {entry_type=} with {access_token_info=}") + + if entry_type != "h-entry": + return JSONResponse( + content={ + "error": "invalid_request", + "error_description": "Only h-entry are supported", + }, + status_code=400, + ) + + # TODO(ts): support creating Article (with a name) + + if is_json: + content = _prop_get(form_data["properties"], "content") # type: ignore + else: + content = form_data["content"] + + public_id, _ = await send_create( + db_session, + "Note", + content, + uploads=[], + in_reply_to=None, + visibility=ap.VisibilityEnum.PUBLIC, + ) + + return JSONResponse( + content={}, + status_code=201, + headers={ + "Location": request.url_for("outbox_by_public_id", public_id=public_id) + }, + ) diff --git a/app/models.py b/app/models.py new file mode 100644 index 0000000..2c9c3de --- /dev/null +++ b/app/models.py @@ -0,0 +1,656 @@ +import enum +from datetime import datetime +from typing import Any +from typing import Optional +from typing import Union + +import pydantic +from loguru import logger +from sqlalchemy import JSON +from sqlalchemy import Boolean +from sqlalchemy import Column +from sqlalchemy import DateTime +from sqlalchemy import Enum +from sqlalchemy import ForeignKey +from sqlalchemy import Index +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import Table +from sqlalchemy import UniqueConstraint +from sqlalchemy import text +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import relationship + +from app import activitypub as ap +from app.actor import LOCAL_ACTOR +from app.actor import Actor as BaseActor +from app.ap_object import Attachment +from app.ap_object import Object as BaseObject +from app.config import BASE_URL +from app.database import Base +from app.database import metadata_obj +from app.utils import webmentions +from app.utils.datetime import now + + +class ObjectRevision(pydantic.BaseModel): + ap_object: ap.RawObject + source: str + updated_at: str + + +class Actor(Base, BaseActor): + __tablename__ = "actor" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + updated_at = Column(DateTime(timezone=True), nullable=False, default=now) + + ap_id: Mapped[str] = Column(String, unique=True, nullable=False, index=True) + ap_actor: Mapped[ap.RawObject] = Column(JSON, nullable=False) + ap_type = Column(String, nullable=False) + + handle = Column(String, nullable=True, index=True) + + is_blocked = Column(Boolean, nullable=False, default=False, server_default="0") + is_deleted = Column(Boolean, nullable=False, default=False, server_default="0") + + are_announces_hidden_from_stream = Column( + Boolean, nullable=False, default=False, server_default="0" + ) + + @property + def is_from_db(self) -> bool: + return True + + +class InboxObject(Base, BaseObject): + __tablename__ = "inbox" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + updated_at = Column(DateTime(timezone=True), nullable=False, default=now) + + actor_id = Column(Integer, ForeignKey("actor.id"), nullable=False) + actor: Mapped[Actor] = relationship(Actor, uselist=False) + + server = Column(String, nullable=False) + + is_hidden_from_stream = Column(Boolean, nullable=False, default=False) + + ap_actor_id = Column(String, nullable=False) + ap_type = Column(String, nullable=False, index=True) + ap_id: Mapped[str] = Column(String, nullable=False, unique=True, index=True) + ap_context = Column(String, nullable=True) + ap_published_at = Column(DateTime(timezone=True), nullable=False) + ap_object: Mapped[ap.RawObject] = Column(JSON, nullable=False) + + # Only set for activities + activity_object_ap_id = Column(String, nullable=True, index=True) + + visibility = Column(Enum(ap.VisibilityEnum), nullable=False) + conversation = Column(String, nullable=True) + + has_local_mention = Column( + Boolean, nullable=False, default=False, server_default="0" + ) + + # Used for Like, Announce and Undo activities + relates_to_inbox_object_id = Column( + Integer, + ForeignKey("inbox.id"), + nullable=True, + ) + relates_to_inbox_object: Mapped[Optional["InboxObject"]] = relationship( + "InboxObject", + foreign_keys=relates_to_inbox_object_id, + remote_side=id, + uselist=False, + ) + relates_to_outbox_object_id = Column( + Integer, + ForeignKey("outbox.id"), + nullable=True, + ) + relates_to_outbox_object: Mapped[Optional["OutboxObject"]] = relationship( + "OutboxObject", + foreign_keys=[relates_to_outbox_object_id], + uselist=False, + ) + + undone_by_inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=True) + + # Link the oubox AP ID to allow undo without any extra query + liked_via_outbox_object_ap_id = Column(String, nullable=True) + announced_via_outbox_object_ap_id = Column(String, nullable=True) + voted_for_answers: Mapped[list[str] | None] = Column(JSON, nullable=True) + + is_bookmarked = Column(Boolean, nullable=False, default=False) + + # Used to mark deleted objects, but also activities that were undone + is_deleted = Column(Boolean, nullable=False, default=False) + is_transient = Column(Boolean, nullable=False, default=False, server_default="0") + + replies_count: Mapped[int] = Column(Integer, nullable=False, default=0) + + og_meta: Mapped[list[dict[str, Any]] | None] = Column(JSON, nullable=True) + + @property + def relates_to_anybox_object(self) -> Union["InboxObject", "OutboxObject"] | None: + if self.relates_to_inbox_object_id: + return self.relates_to_inbox_object + elif self.relates_to_outbox_object_id: + return self.relates_to_outbox_object + else: + return None + + @property + def is_from_db(self) -> bool: + return True + + @property + def is_from_inbox(self) -> bool: + return True + + +class OutboxObject(Base, BaseObject): + __tablename__ = "outbox" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + updated_at = Column(DateTime(timezone=True), nullable=False, default=now) + + is_hidden_from_homepage = Column(Boolean, nullable=False, default=False) + + public_id = Column(String, nullable=False, index=True) + slug = Column(String, nullable=True, index=True) + + ap_type = Column(String, nullable=False, index=True) + ap_id: Mapped[str] = Column(String, nullable=False, unique=True, index=True) + ap_context = Column(String, nullable=True) + ap_object: Mapped[ap.RawObject] = Column(JSON, nullable=False) + + activity_object_ap_id = Column(String, nullable=True, index=True) + + # Source content for activities (like Notes) + source = Column(String, nullable=True) + revisions: Mapped[list[dict[str, Any]] | None] = Column(JSON, nullable=True) + + ap_published_at = Column(DateTime(timezone=True), nullable=False, default=now) + visibility = Column(Enum(ap.VisibilityEnum), nullable=False) + conversation = Column(String, nullable=True) + + likes_count = Column(Integer, nullable=False, default=0) + announces_count = Column(Integer, nullable=False, default=0) + replies_count: Mapped[int] = Column(Integer, nullable=False, default=0) + webmentions_count: Mapped[int] = Column( + Integer, nullable=False, default=0, server_default="0" + ) + # reactions: Mapped[list[dict[str, Any]] | None] = Column(JSON, nullable=True) + + og_meta: Mapped[list[dict[str, Any]] | None] = Column(JSON, nullable=True) + + # For the featured collection + is_pinned = Column(Boolean, nullable=False, default=False) + is_transient = Column(Boolean, nullable=False, default=False, server_default="0") + + # Never actually delete from the outbox + is_deleted = Column(Boolean, nullable=False, default=False) + + # Used for Create, Like, Announce and Undo activities + relates_to_inbox_object_id = Column( + Integer, + ForeignKey("inbox.id"), + nullable=True, + ) + relates_to_inbox_object: Mapped[Optional["InboxObject"]] = relationship( + "InboxObject", + foreign_keys=[relates_to_inbox_object_id], + uselist=False, + ) + relates_to_outbox_object_id = Column( + Integer, + ForeignKey("outbox.id"), + nullable=True, + ) + relates_to_outbox_object: Mapped[Optional["OutboxObject"]] = relationship( + "OutboxObject", + foreign_keys=[relates_to_outbox_object_id], + remote_side=id, + uselist=False, + ) + # For Follow activies + relates_to_actor_id = Column( + Integer, + ForeignKey("actor.id"), + nullable=True, + ) + relates_to_actor: Mapped[Optional["Actor"]] = relationship( + "Actor", + foreign_keys=[relates_to_actor_id], + uselist=False, + ) + + undone_by_outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=True) + + @property + def actor(self) -> BaseActor: + return LOCAL_ACTOR + + outbox_object_attachments: Mapped[list["OutboxObjectAttachment"]] = relationship( + "OutboxObjectAttachment", uselist=True, backref="outbox_object" + ) + + @property + def attachments(self) -> list[Attachment]: + out = [] + for attachment in self.outbox_object_attachments: + url = ( + BASE_URL + + f"/attachments/{attachment.upload.content_hash}/{attachment.filename}" + ) + out.append( + Attachment.parse_obj( + { + "type": "Document", + "mediaType": attachment.upload.content_type, + "name": attachment.alt or attachment.filename, + "url": url, + "width": attachment.upload.width, + "height": attachment.upload.height, + "proxiedUrl": url, + "resizedUrl": BASE_URL + + ( + "/attachments/thumbnails/" + f"{attachment.upload.content_hash}" + f"/{attachment.filename}" + ) + if attachment.upload.has_thumbnail + else None, + } + ) + ) + return out + + @property + def relates_to_anybox_object(self) -> Union["InboxObject", "OutboxObject"] | None: + if self.relates_to_inbox_object_id: + return self.relates_to_inbox_object + elif self.relates_to_outbox_object_id: + return self.relates_to_outbox_object + else: + return None + + @property + def is_from_db(self) -> bool: + return True + + @property + def is_from_outbox(self) -> bool: + return True + + @property + def url(self) -> str | None: + # XXX: rewrite old URL here for compat + if self.ap_type == "Article" and self.slug and self.public_id: + return f"{BASE_URL}/articles/{self.public_id[:7]}/{self.slug}" + return super().url + + +class Follower(Base): + __tablename__ = "follower" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + updated_at = Column(DateTime(timezone=True), nullable=False, default=now) + + actor_id = Column(Integer, ForeignKey("actor.id"), nullable=False, unique=True) + actor: Mapped[Actor] = relationship(Actor, uselist=False) + + inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=False) + inbox_object = relationship(InboxObject, uselist=False) + + ap_actor_id = Column(String, nullable=False, unique=True) + + +class Following(Base): + __tablename__ = "following" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + updated_at = Column(DateTime(timezone=True), nullable=False, default=now) + + actor_id = Column(Integer, ForeignKey("actor.id"), nullable=False, unique=True) + actor = relationship(Actor, uselist=False) + + outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) + outbox_object = relationship(OutboxObject, uselist=False) + + ap_actor_id = Column(String, nullable=False, unique=True) + + +class IncomingActivity(Base): + __tablename__ = "incoming_activity" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + + # An incoming activity can be a webmention + webmention_source = Column(String, nullable=True) + # or an AP object + sent_by_ap_actor_id = Column(String, nullable=True) + ap_id = Column(String, nullable=True, index=True) + ap_object: Mapped[ap.RawObject] = Column(JSON, nullable=True) + + tries: Mapped[int] = Column(Integer, nullable=False, default=0) + next_try = Column(DateTime(timezone=True), nullable=True, default=now) + + last_try = Column(DateTime(timezone=True), nullable=True) + + is_processed = Column(Boolean, nullable=False, default=False) + is_errored = Column(Boolean, nullable=False, default=False) + error = Column(String, nullable=True) + + +class OutgoingActivity(Base): + __tablename__ = "outgoing_activity" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + + recipient = Column(String, nullable=False) + + outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=True) + outbox_object = relationship(OutboxObject, uselist=False) + + # Can also reference an inbox object if it needds to be forwarded + inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=True) + inbox_object = relationship(InboxObject, uselist=False) + + # The source will be the outbox object URL + webmention_target = Column(String, nullable=True) + + tries = Column(Integer, nullable=False, default=0) + next_try = Column(DateTime(timezone=True), nullable=True, default=now) + + last_try = Column(DateTime(timezone=True), nullable=True) + last_status_code = Column(Integer, nullable=True) + last_response = Column(String, nullable=True) + + is_sent = Column(Boolean, nullable=False, default=False) + is_errored = Column(Boolean, nullable=False, default=False) + error = Column(String, nullable=True) + + @property + def anybox_object(self) -> OutboxObject | InboxObject: + if self.outbox_object_id: + return self.outbox_object # type: ignore + elif self.inbox_object_id: + return self.inbox_object # type: ignore + else: + raise ValueError("Should never happen") + + +class TaggedOutboxObject(Base): + __tablename__ = "tagged_outbox_object" + __table_args__ = ( + UniqueConstraint("outbox_object_id", "tag", name="uix_tagged_object"), + ) + + id = Column(Integer, primary_key=True, index=True) + + outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) + outbox_object = relationship(OutboxObject, uselist=False) + + tag = Column(String, nullable=False, index=True) + + +class Upload(Base): + __tablename__ = "upload" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + + content_type: Mapped[str] = Column(String, nullable=False) + content_hash = Column(String, nullable=False, unique=True) + + has_thumbnail = Column(Boolean, nullable=False) + + # Only set for images + blurhash = Column(String, nullable=True) + width = Column(Integer, nullable=True) + height = Column(Integer, nullable=True) + + @property + def is_image(self) -> bool: + return self.content_type.startswith("image") + + +class OutboxObjectAttachment(Base): + __tablename__ = "outbox_object_attachment" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + filename = Column(String, nullable=False) + alt = Column(String, nullable=True) + + outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) + + upload_id = Column(Integer, ForeignKey("upload.id"), nullable=False) + upload: Mapped["Upload"] = relationship(Upload, uselist=False) + + +class IndieAuthAuthorizationRequest(Base): + __tablename__ = "indieauth_authorization_request" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + + code = Column(String, nullable=False, unique=True, index=True) + scope = Column(String, nullable=False) + redirect_uri = Column(String, nullable=False) + client_id = Column(String, nullable=False) + code_challenge = Column(String, nullable=True) + code_challenge_method = Column(String, nullable=True) + + is_used = Column(Boolean, nullable=False, default=False) + + +class IndieAuthAccessToken(Base): + __tablename__ = "indieauth_access_token" + + id = Column(Integer, primary_key=True, index=True) + created_at: Mapped[datetime] = Column( + DateTime(timezone=True), nullable=False, default=now + ) + + # Will be null for personal access tokens + indieauth_authorization_request_id = Column( + Integer, ForeignKey("indieauth_authorization_request.id"), nullable=True + ) + indieauth_authorization_request = relationship( + IndieAuthAuthorizationRequest, + uselist=False, + ) + + access_token: Mapped[str] = Column(String, nullable=False, unique=True, index=True) + refresh_token = Column(String, nullable=True, unique=True, index=True) + expires_in: Mapped[int] = Column(Integer, nullable=False) + scope = Column(String, nullable=False) + is_revoked = Column(Boolean, nullable=False, default=False) + was_refreshed = Column(Boolean, nullable=False, default=False, server_default="0") + + +class OAuthClient(Base): + __tablename__ = "oauth_client" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + + # Request + client_name = Column(String, nullable=False) + redirect_uris: Mapped[list[str]] = Column(JSON, nullable=True) + + # Optional from request + client_uri = Column(String, nullable=True) + logo_uri = Column(String, nullable=True) + scope = Column(String, nullable=True) + + # Response + client_id = Column(String, nullable=False, unique=True, index=True) + client_secret = Column(String, nullable=False, unique=True) + + +@enum.unique +class WebmentionType(str, enum.Enum): + UNKNOWN = "unknown" + LIKE = "like" + REPLY = "reply" + REPOST = "repost" + + +class Webmention(Base): + __tablename__ = "webmention" + __table_args__ = (UniqueConstraint("source", "target", name="uix_source_target"),) + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + + is_deleted = Column(Boolean, nullable=False, default=False) + + source: Mapped[str] = Column(String, nullable=False, index=True, unique=True) + source_microformats: Mapped[dict[str, Any] | None] = Column(JSON, nullable=True) + + target = Column(String, nullable=False, index=True) + outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) + outbox_object = relationship(OutboxObject, uselist=False) + + webmention_type = Column(Enum(WebmentionType), nullable=True) + + @property + def as_facepile_item(self) -> webmentions.Webmention | None: + if not self.source_microformats: + return None + try: + return webmentions.Webmention.from_microformats( + self.source_microformats["items"], self.source + ) + except Exception: + # TODO: return a facepile with the unknown image + logger.warning( + f"Failed to generate facefile item for Webmention id={self.id}" + ) + return None + + +class PollAnswer(Base): + __tablename__ = "poll_answer" + __table_args__ = ( + # Enforce a single answer for poll/actor/answer + UniqueConstraint( + "outbox_object_id", + "name", + "actor_id", + name="uix_outbox_object_id_name_actor_id", + ), + # Enforce an actor can only vote once on a "oneOf" Question + Index( + "uix_one_of_outbox_object_id_actor_id", + "outbox_object_id", + "actor_id", + unique=True, + sqlite_where=text('poll_type = "oneOf"'), + ), + ) + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + + outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) + outbox_object = relationship(OutboxObject, uselist=False) + + # oneOf|anyOf + poll_type = Column(String, nullable=False) + + inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=False) + inbox_object = relationship(InboxObject, uselist=False) + + actor_id = Column(Integer, ForeignKey("actor.id"), nullable=False) + actor = relationship(Actor, uselist=False) + + name = Column(String, nullable=False) + + +@enum.unique +class NotificationType(str, enum.Enum): + NEW_FOLLOWER = "new_follower" + PENDING_INCOMING_FOLLOWER = "pending_incoming_follower" + REJECTED_FOLLOWER = "rejected_follower" + UNFOLLOW = "unfollow" + + FOLLOW_REQUEST_ACCEPTED = "follow_request_accepted" + FOLLOW_REQUEST_REJECTED = "follow_request_rejected" + + MOVE = "move" + + LIKE = "like" + UNDO_LIKE = "undo_like" + + ANNOUNCE = "announce" + UNDO_ANNOUNCE = "undo_announce" + + MENTION = "mention" + + NEW_WEBMENTION = "new_webmention" + UPDATED_WEBMENTION = "updated_webmention" + DELETED_WEBMENTION = "deleted_webmention" + + # incoming + BLOCKED = "blocked" + UNBLOCKED = "unblocked" + + # outgoing + BLOCK = "block" + UNBLOCK = "unblock" + + +class Notification(Base): + __tablename__ = "notifications" + + id = Column(Integer, primary_key=True, index=True) + created_at = Column(DateTime(timezone=True), nullable=False, default=now) + notification_type = Column(Enum(NotificationType), nullable=True) + is_new = Column(Boolean, nullable=False, default=True) + + actor_id = Column(Integer, ForeignKey("actor.id"), nullable=True) + actor = relationship(Actor, uselist=False) + + outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=True) + outbox_object = relationship(OutboxObject, uselist=False) + + inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=True) + inbox_object = relationship(InboxObject, uselist=False) + + webmention_id = Column( + Integer, ForeignKey("webmention.id", name="fk_webmention_id"), nullable=True + ) + webmention = relationship(Webmention, uselist=False) + + is_accepted = Column(Boolean, nullable=True) + is_rejected = Column(Boolean, nullable=True) + + +outbox_fts = Table( + "outbox_fts", + # TODO(tsileo): use Base.metadata + metadata_obj, + Column("rowid", Integer), + Column("outbox_fts", String), + Column("summary", String, nullable=True), + Column("name", String, nullable=True), + Column("source", String), +) + +# db.execute(select(outbox_fts.c.rowid).where(outbox_fts.c.outbox_fts.op("MATCH")("toto AND omg"))).all() # noqa +# db.execute(select(models.OutboxObject).join(outbox_fts, outbox_fts.c.rowid == models.OutboxObject.id).where(outbox_fts.c.outbox_fts.op("MATCH")("toto2"))).scalars() # noqa +# db.execute(insert(outbox_fts).values({"outbox_fts": "delete", "rowid": 1, "source": dat[0].source})) # noqa diff --git a/app/outgoing_activities.py b/app/outgoing_activities.py new file mode 100644 index 0000000..022334d --- /dev/null +++ b/app/outgoing_activities.py @@ -0,0 +1,295 @@ +import asyncio +import email +import time +import traceback +from datetime import datetime +from datetime import timedelta +from typing import MutableMapping + +import httpx +from cachetools import TTLCache +from loguru import logger +from sqlalchemy import func +from sqlalchemy import select +from sqlalchemy.orm import joinedload + +from app import activitypub as ap +from app import config +from app import ldsig +from app import models +from app.actor import LOCAL_ACTOR +from app.actor import _actor_hash +from app.config import KEY_PATH +from app.database import AsyncSession +from app.key import Key +from app.utils.datetime import now +from app.utils.url import check_url +from app.utils.workers import Worker + +_MAX_RETRIES = 16 + +_LD_SIG_CACHE: MutableMapping[str, ap.RawObject] = TTLCache(maxsize=5, ttl=60 * 5) + + +k = Key(config.ID, f"{config.ID}#main-key") +k.load(KEY_PATH.read_text()) + + +def _is_local_actor_updated() -> bool: + """Returns True if the local actor was updated, i.e. updated via the config file""" + actor_hash = _actor_hash(LOCAL_ACTOR) + actor_hash_cache = config.ROOT_DIR / "data" / "local_actor_hash.dat" + + if not actor_hash_cache.exists(): + logger.info("Initializing local actor hash cache") + actor_hash_cache.write_bytes(actor_hash) + return False + + previous_actor_hash = actor_hash_cache.read_bytes() + if previous_actor_hash == actor_hash: + logger.info("Local actor hasn't been updated") + return False + + actor_hash_cache.write_bytes(actor_hash) + logger.info("Local actor has been updated") + return True + + +async def _send_actor_update_if_needed( + db_session: AsyncSession, +) -> None: + """The process for sending an update for the local actor is done here as + in production, we may have multiple uvicorn worker and this worker will + always run in a single process.""" + if not _is_local_actor_updated(): + return + + logger.info("Will send an Update for the local actor") + + from app.boxes import allocate_outbox_id + from app.boxes import compute_all_known_recipients + from app.boxes import outbox_object_id + from app.boxes import save_outbox_object + + update_activity_id = allocate_outbox_id() + update_activity = { + "@context": ap.AS_EXTENDED_CTX, + "id": outbox_object_id(update_activity_id), + "type": "Update", + "to": [ap.AS_PUBLIC], + "actor": config.ID, + "object": ap.remove_context(LOCAL_ACTOR.ap_actor), + } + outbox_object = await save_outbox_object( + db_session, update_activity_id, update_activity + ) + + # Send the update to the followers collection and all the actor we have ever + # contacted + recipients = await compute_all_known_recipients(db_session) + for rcp in recipients: + await new_outgoing_activity( + db_session, + recipient=rcp, + outbox_object_id=outbox_object.id, + ) + + await db_session.commit() + + +async def new_outgoing_activity( + db_session: AsyncSession, + recipient: str, + outbox_object_id: int | None = None, + inbox_object_id: int | None = None, + webmention_target: str | None = None, +) -> models.OutgoingActivity: + if outbox_object_id is None and inbox_object_id is None: + raise ValueError("Must reference at least one inbox/outbox activity") + if webmention_target and outbox_object_id is None: + raise ValueError("Webmentions must reference an outbox activity") + if outbox_object_id and inbox_object_id: + raise ValueError("Cannot reference both inbox/outbox activities") + + outgoing_activity = models.OutgoingActivity( + recipient=recipient, + outbox_object_id=outbox_object_id, + inbox_object_id=inbox_object_id, + webmention_target=webmention_target, + ) + + db_session.add(outgoing_activity) + await db_session.flush() + await db_session.refresh(outgoing_activity) + return outgoing_activity + + +def _parse_retry_after(retry_after: str) -> datetime | None: + try: + # Retry-After: 120 + seconds = int(retry_after) + except ValueError: + # Retry-After: Wed, 21 Oct 2015 07:28:00 GMT + dt_tuple = email.utils.parsedate_tz(retry_after) + if dt_tuple is None: + return None + + seconds = int(email.utils.mktime_tz(dt_tuple) - time.time()) + + return now() + timedelta(seconds=seconds) + + +def _exp_backoff(tries: int) -> datetime: + seconds = 2 * (2 ** (tries - 1)) + return now() + timedelta(seconds=seconds) + + +def _set_next_try( + outgoing_activity: models.OutgoingActivity, + next_try: datetime | None = None, +) -> None: + if not outgoing_activity.tries: + raise ValueError("Should never happen") + + if outgoing_activity.tries >= _MAX_RETRIES: + outgoing_activity.is_errored = True + outgoing_activity.next_try = None + else: + outgoing_activity.next_try = next_try or _exp_backoff(outgoing_activity.tries) + + +async def fetch_next_outgoing_activity( + db_session: AsyncSession, +) -> models.OutgoingActivity | None: + where = [ + models.OutgoingActivity.next_try <= now(), + models.OutgoingActivity.is_errored.is_(False), + models.OutgoingActivity.is_sent.is_(False), + ] + q_count = await db_session.scalar( + select(func.count(models.OutgoingActivity.id)).where(*where) + ) + if q_count > 0: + logger.info(f"{q_count} outgoing activities ready to process") + if not q_count: + # logger.debug("No activities to process") + return None + + next_activity = ( + await db_session.execute( + select(models.OutgoingActivity) + .where(*where) + .limit(1) + .options( + joinedload(models.OutgoingActivity.inbox_object), + joinedload(models.OutgoingActivity.outbox_object), + ) + .order_by(models.OutgoingActivity.next_try) + ) + ).scalar_one() + return next_activity + + +async def process_next_outgoing_activity( + db_session: AsyncSession, + next_activity: models.OutgoingActivity, +) -> None: + next_activity.tries = next_activity.tries + 1 # type: ignore + next_activity.last_try = now() + + logger.info(f"recipient={next_activity.recipient}") + + try: + if next_activity.webmention_target and next_activity.outbox_object: + webmention_payload = { + "source": next_activity.outbox_object.url, + "target": next_activity.webmention_target, + } + logger.info(f"{webmention_payload=}") + check_url(next_activity.recipient) + async with httpx.AsyncClient() as client: + resp = await client.post( + next_activity.recipient, # type: ignore + data=webmention_payload, + headers={ + "User-Agent": config.USER_AGENT, + }, + ) + resp.raise_for_status() + else: + payload = ap.wrap_object_if_needed(next_activity.anybox_object.ap_object) + + # Use LD sig if the activity may need to be forwarded by recipients + if next_activity.anybox_object.is_from_outbox and payload["type"] in [ + "Create", + "Update", + "Delete", + ]: + # But only if the object is public (to help with deniability/privacy) + if next_activity.outbox_object.visibility == ap.VisibilityEnum.PUBLIC: # type: ignore # noqa: E501 + if p := _LD_SIG_CACHE.get(payload["id"]): + payload = p + else: + ldsig.generate_signature(payload, k) + _LD_SIG_CACHE[payload["id"]] = payload + + logger.info(f"{payload=}") + + resp = await ap.post(next_activity.recipient, payload) # type: ignore + except httpx.HTTPStatusError as http_error: + logger.exception("Failed") + next_activity.last_status_code = http_error.response.status_code + next_activity.last_response = http_error.response.text + next_activity.error = traceback.format_exc() + + if http_error.response.status_code in [429, 503]: + retry_after: datetime | None = None + if retry_after_value := http_error.response.headers.get("Retry-After"): + retry_after = _parse_retry_after(retry_after_value) + _set_next_try(next_activity, retry_after) + elif http_error.response.status_code == 401: + _set_next_try(next_activity) + elif 400 <= http_error.response.status_code < 500: + logger.info(f"status_code={http_error.response.status_code} not retrying") + next_activity.is_errored = True + next_activity.next_try = None + else: + _set_next_try(next_activity) + except Exception: + logger.exception("Failed") + next_activity.error = traceback.format_exc() + _set_next_try(next_activity) + else: + logger.info("Success") + next_activity.is_sent = True + next_activity.last_status_code = resp.status_code + next_activity.last_response = resp.text + + await db_session.commit() + return None + + +class OutgoingActivityWorker(Worker[models.OutgoingActivity]): + async def process_message( + self, + db_session: AsyncSession, + next_activity: models.OutgoingActivity, + ) -> None: + await process_next_outgoing_activity(db_session, next_activity) + + async def get_next_message( + self, + db_session: AsyncSession, + ) -> models.OutgoingActivity | None: + return await fetch_next_outgoing_activity(db_session) + + async def startup(self, db_session: AsyncSession) -> None: + await _send_actor_update_if_needed(db_session) + + +async def loop() -> None: + await OutgoingActivityWorker().run_forever() + + +if __name__ == "__main__": + asyncio.run(loop()) diff --git a/app/prune.py b/app/prune.py new file mode 100644 index 0000000..75ca89b --- /dev/null +++ b/app/prune.py @@ -0,0 +1,119 @@ +from datetime import timedelta + +from loguru import logger +from sqlalchemy import and_ +from sqlalchemy import delete +from sqlalchemy import func +from sqlalchemy import not_ +from sqlalchemy import or_ +from sqlalchemy import select + +from app import activitypub as ap +from app import models +from app.config import BASE_URL +from app.config import INBOX_RETENTION_DAYS +from app.database import AsyncSession +from app.database import async_session +from app.utils.datetime import now + + +async def prune_old_data( + db_session: AsyncSession, +) -> None: + logger.info(f"Pruning old data with {INBOX_RETENTION_DAYS=}") + await _prune_old_incoming_activities(db_session) + await _prune_old_outgoing_activities(db_session) + await _prune_old_inbox_objects(db_session) + + # TODO: delete actor with no remaining inbox objects + + await db_session.commit() + # Reclaim disk space + await db_session.execute("VACUUM") # type: ignore + + +async def _prune_old_incoming_activities( + db_session: AsyncSession, +) -> None: + result = await db_session.execute( + delete(models.IncomingActivity) + .where( + models.IncomingActivity.created_at + < now() - timedelta(days=INBOX_RETENTION_DAYS), + # Keep failed activity for debug + models.IncomingActivity.is_errored.is_(False), + ) + .execution_options(synchronize_session=False) + ) + logger.info(f"Deleted {result.rowcount} old incoming activities") # type: ignore + + +async def _prune_old_outgoing_activities( + db_session: AsyncSession, +) -> None: + result = await db_session.execute( + delete(models.OutgoingActivity) + .where( + models.OutgoingActivity.created_at + < now() - timedelta(days=INBOX_RETENTION_DAYS), + # Keep failed activity for debug + models.OutgoingActivity.is_errored.is_(False), + ) + .execution_options(synchronize_session=False) + ) + logger.info(f"Deleted {result.rowcount} old outgoing activities") # type: ignore + + +async def _prune_old_inbox_objects( + db_session: AsyncSession, +) -> None: + outbox_conversation = select(func.distinct(models.OutboxObject.conversation)).where( + models.OutboxObject.conversation.is_not(None), + models.OutboxObject.conversation.not_like(f"{BASE_URL}%"), + ) + result = await db_session.execute( + delete(models.InboxObject) + .where( + # Keep bookmarked objects + models.InboxObject.is_bookmarked.is_(False), + # Keep liked objects + models.InboxObject.liked_via_outbox_object_ap_id.is_(None), + # Keep announced objects + models.InboxObject.announced_via_outbox_object_ap_id.is_(None), + # Keep objects mentioning the local actor + models.InboxObject.has_local_mention.is_(False), + # Keep objects related to local conversations (i.e. don't break the + # public website) + or_( + models.InboxObject.conversation.not_like(f"{BASE_URL}%"), + models.InboxObject.conversation.is_(None), + models.InboxObject.conversation.not_in(outbox_conversation), + ), + # Keep activities related to the outbox (like Like/Announce/Follow...) + or_( + # XXX: no `/` here because the local ID does not have one + models.InboxObject.activity_object_ap_id.not_like(f"{BASE_URL}%"), + models.InboxObject.activity_object_ap_id.is_(None), + ), + # Keep direct messages + not_( + and_( + models.InboxObject.visibility == ap.VisibilityEnum.DIRECT, + models.InboxObject.ap_type.in_(["Note"]), + ) + ), + # Keep Move object as they are linked to notifications + models.InboxObject.ap_type.not_in(["Move"]), + # Filter by retention days + models.InboxObject.ap_published_at + < now() - timedelta(days=INBOX_RETENTION_DAYS), + ) + .execution_options(synchronize_session=False) + ) + logger.info(f"Deleted {result.rowcount} old inbox objects") # type: ignore + + +async def run_prune_old_data() -> None: + """CLI entrypoint.""" + async with async_session() as db_session: + await prune_old_data(db_session) diff --git a/app/redirect.py b/app/redirect.py new file mode 100644 index 0000000..137e80b --- /dev/null +++ b/app/redirect.py @@ -0,0 +1,28 @@ +from fastapi import Request + +from app import templates +from app.database import AsyncSession + + +async def redirect( + request: Request, + db_session: AsyncSession, + url: str, +) -> templates.TemplateResponse: + """ + Similar to RedirectResponse, but uses a 200 response with HTML. + + Needed for remote redirects on form submission endpoints, + since our CSP policy disallows remote form submission. + https://github.com/w3c/webappsec-csp/issues/8#issuecomment-810108984 + """ + return await templates.render_template( + db_session, + request, + "redirect.html", + { + "request": request, + "url": url, + }, + headers={"Refresh": "0;url=" + url}, + ) diff --git a/app/scss/_theme.scss b/app/scss/_theme.scss new file mode 120000 index 0000000..4036f4c --- /dev/null +++ b/app/scss/_theme.scss @@ -0,0 +1 @@ +../../data/_theme.scss \ No newline at end of file diff --git a/app/scss/main.scss b/app/scss/main.scss new file mode 100644 index 0000000..d2c5642 --- /dev/null +++ b/app/scss/main.scss @@ -0,0 +1,571 @@ +$font-stack: Helvetica, sans-serif; +$background: #ddd; +$light-background: #e6e6e6; +$text-color: #111; +$primary-color: #1d781d; +$secondary-color: #781D78; +$form-background-color: #ccc; +$form-text-color: #333; +$muted-color: #555; // solarized comment text +$primary-button-text-color: #fff; +$code-highlight-background: #f0f0f0; + +// Load custom theme +@import "theme.scss"; + +.primary-color { + color: $primary-color; +} + +#admin { + .admin-menu { + margin-bottom: 30px; + padding: 0 20px; + } +} + +.empty-state { + padding: 20px; +} + +.public-top-menu { + margin: 30px 0 0 0; +} + +.width-95 { + width: 95%; +} + +.bold { + font-weight: bold; +} + +.admin-new { + textarea { + font-size: 1.2em; + width: 95%; + } +} + +.show-more-wrapper { + .p-summary { + display: inline-block; + } + .show-more-btn { + margin-left: 5px; + } + summary { + display: inline-block; + } + summary::-webkit-details-marker { + display: none + } + &:not([open]) .show-more-btn::after { + content: 'show more'; + } + &[open] .show-more-btn::after { + content: 'show less'; + } +} +.sensitive-attachment { + display: inline-block; + .sensitive-attachment-state { + display: none; + } + .sensitive-attachment-state:checked ~ .sensitive-attachment-box div { + display:none; + } + .sensitive-attachment-box { + position: relative; + div { + position: absolute; + width: 100%; + height: 100%; + z-index: 10; + backdrop-filter: blur(2em); + } + } +} + + +blockquote { + border-left: 3px solid $secondary-color; + margin-left: 0; + padding-left: 1.5em; +} + +.muted { + color: $muted-color; +} + +.light-background { + background: $light-background; +} + + +body { + font-family: $font-stack; + font-size: 20px; + line-height: 32px; + background: $background; + color: $text-color; + margin: 0; + padding: 0; + display: flex; + min-height: 100vh; + flex-direction: column; +} +a { + text-decoration: none; +} + +dl { + display: flex; + dt { + width: 200px; + flex: 0 0 auto; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + } + dd { + flex: 1 1 auto; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + p { + display: inline; + } + } +} + +.shared-header { + margin-left: 20px; + margin-top: 30px; + margin-bottom: -20px; + strong { + color: $primary-color; + } + span { + color: $muted-color; + } +} + +div.highlight { + background: $code-highlight-background; + padding: 0 10px; + overflow: auto; + display: block; + margin: 20px 0; +} + +.box { + padding: 0 20px; +} + +code, pre { + color: $secondary-color; // #cb4b16; // #268bd2; // #2aa198; + font-family: monospace; +} + +.form { + input, select, textarea { + font-size: 20px; + border: 0; + padding: 5px; + background: $form-background-color; + color: $form-text-color; + &:focus { + outline: 1px solid $secondary-color; + } + } + input[type=submit] { + font-size: 20px; + outline: none; + background: $primary-color; + color: $primary-button-text-color; + padding: 5px 12px; + cursor: pointer; + } +} + +header { + padding: 0 20px; + .title { + font-size: 1.3em; + text-decoration: none; + .handle { + font-size: 0.85em; + color: $muted-color; + } + } + .counter { + color: $muted-color; + } + .summary { + a:hover { + text-decoration: underline; + } + } +} +a { + color: $primary-color; + &:hover { + color: $secondary-color; + } +} +#main { + display: flex; + flex: 1; +} +main { + width: 100%; + max-width: 1000px; + margin: 30px auto; +} + +.main-flex { + display: flex; + flex: 1; +} + +.centered { + display: flex; + flex: 1; + justify-content: center; + align-items: center; + div { + display: block; + } +} + +footer { + width: 100%; + max-width: 1000px; + margin: 20px auto; + color: $muted-color; + p { + margin: 0; + } +} +.tiny-actor-icon { + max-width: 24px; + max-height: 24px; + position: relative; + top: 5px; +} +.actor-box { + display: flex; + column-gap: 20px; + margin:10px 0; + .icon-box { + flex: 0 0 50px; + } + .actor-handle { + font-size: 0.85em; + line-height: 1em; + color: $muted-color; + } + .actor-icon { + max-width: 50px; + } +} +#articles { + list-style-type: none; + margin: 30px 0; + padding: 0 20px; + li { + display: block; + span { + padding-right:10px; + } + } +} + +#notifications, #followers, #following { + ul { + list-style-type: none; + margin: 0; + padding: 0; + } + li { + display: block; + } +} + +@mixin admin-button() { + font-size: 20px; + line-height: 32px; + font-family: $font-stack; + background: $form-background-color; + color: $form-text-color; + border: 1px solid $background; + padding: 8px 10px 5px 10px; + cursor: pointer; + &:hover { + border: 1px solid $form-text-color; + } +} + +.show-sensitive-btn, .show-more-btn, .label-btn { + @include admin-button; + padding: 10px 5px; + margin: 20px 0; +} + +.show-hide-sensitive-btn { + display:inline-block; +} + +.no-margin-top { + margin-top: 0; +} + +.float-right { + float: right; +} + +ul.poll-items { + list-style-type: none; + padding: 0; + li { + display: block; + p { + margin: 20px 0 10px 0; + .poll-vote { + padding-left: 20px; + } + } + + .poll-bar { + width:100%;height:20px; + line { + stroke: $secondary-color; + stroke-width: 20px; + } + } + + } +} + +.attachment-wrapper { + .attachment-item { + margin-top: 20px; + } + img.attachment { + margin: 0; + } + a.attachment { + display: inline-block; + margin-bottom: 15px; + } + audio.attachment { + width: 480px; + } +} + +nav { + form { + margin: 15px 0; + } + input[type=submit], button { + @include admin-button; + } +} + +nav.flexbox { + ul { + display: flex; + flex-wrap: wrap; + align-items: center; + list-style-type: none; + margin: 0; + padding: 0; + + } + + ul li { + margin-right: 20px; + + &:last-child { + margin-right: 0px; + } + } + a:not(.label-btn) { + color: $primary-color; + text-decoration: none; + &:hover, &:active { + color: $secondary-color; + text-decoration: underline; + } + } + a.active:not(.label-btn) { + color: $secondary-color; + font-weight: bold; + } +} + +// after nav.flexbox to override default behavior +a.label-btn { + color: $form-text-color; + &:hover { + text-decoration: none; + color: $form-text-color; + } +} + +.ap-object { + margin: 15px 0; + padding: 20px; + nav { + color: $muted-color; + } + .in-reply-to { + display: inline; + color: $muted-color; + } + .e-content, .activity-og-meta { + a:hover { + text-decoration: underline; + } + } + .activity-attachment { + margin: 30px 0 20px 0; + img, audio, video { + max-width: calc(min(740px, 100%)); + } + } + img.inline-img { + display: block; + max-width: 740px; + } +} + +.activity-og-meta { + display: flex; + column-gap: 20px; + margin: 20px 0; + img { + max-width: 200px; + max-height: 100px; + } + small { + display: block; + } +} + +.ap-object-expanded { + border: 2px dashed $secondary-color; +} + +.error-box, .scolor { + color: $secondary-color; +} + +.actor-action { + margin-top:20px; + margin-bottom:-20px; + padding: 0 20px; + span { + color: $muted-color; + } + span.new { + color: $secondary-color; + } +} +.actor-metadata { + color: $muted-color; +} +.emoji, .custom-emoji { + max-width: 25px; +} + +.indieauth-box { + display: flex; + column-gap: 20px; + + .indieauth-logo { + flex: initial; + width: 100px; + img { + max-width: 100px; + } + } + .indieauth-details { + flex: 1; + div { + padding-left: 20px; + a { + font-size: 1.2em; + font-weight: 600; + } + } + } +} + +.public-interactions { + display: flex; + column-gap: 20px; + flex-wrap: wrap; + margin-top: 20px; + .interactions-block { + flex: 0 1 30%; + max-width: 50%; + .facepile-wrapper { + display: flex; + column-gap: 20px; + row-gap: 20px; + flex-wrap: wrap; + margin-top: 20px; + a { + height: 50px; + img { + max-width: 50px; + } + } + .and-x-more { + display: inline-block; + align-self: center; + } + } + } +} + +.error-title { + a { + text-decoration: underline; + } +} + +.ap-place { + h3 { + display: inline; + font-weight: normal; + } + h3::after { + content: ': '; + } +} + +.margin-top-20 { + margin-top: 20px; +} + +.video-wrapper { + position: relative; +} + +.video-gif-overlay { + display: none; +} + +.video-gif-mode + .video-gif-overlay { + display: block; + position: absolute; + top: 5px; + left: 5px; + padding: 0 3px; + font-size: 0.8em; + background: rgba(0,0,0,.5); + color: #fff; +} diff --git a/app/source.py b/app/source.py new file mode 100644 index 0000000..20e98ee --- /dev/null +++ b/app/source.py @@ -0,0 +1,218 @@ +import re +import typing + +from loguru import logger +from mistletoe import Document # type: ignore +from mistletoe.block_token import CodeFence # type: ignore +from mistletoe.html_renderer import HTMLRenderer # type: ignore +from mistletoe.span_token import SpanToken # type: ignore +from pygments.formatters import HtmlFormatter # type: ignore +from pygments.lexers import get_lexer_by_name as get_lexer # type: ignore +from pygments.util import ClassNotFound # type: ignore +from sqlalchemy import select + +from app import webfinger +from app.config import BASE_URL +from app.config import CODE_HIGHLIGHTING_THEME +from app.database import AsyncSession +from app.utils import emoji + +if typing.TYPE_CHECKING: + from app.actor import Actor + +_FORMATTER = HtmlFormatter(style=CODE_HIGHLIGHTING_THEME) +_HASHTAG_REGEX = re.compile(r"(#[\d\w]+)") +_MENTION_REGEX = re.compile(r"(@[\d\w_.+-]+@[\d\w-]+\.[\d\w\-.]+)") +_URL_REGEX = re.compile( + "(https?:\\/\\/(?:www\\.)?[-a-zA-Z0-9@:%._\\+~#=]{1,256}\\.[a-zA-Z0-9()]{1,6}\\b(?:[-a-zA-Z0-9()@:%_\\+.~#?&\\/=]*))" # noqa: E501 +) + + +class AutoLink(SpanToken): + parse_inner = False + precedence = 1 + pattern = _URL_REGEX + + def __init__(self, match_obj: re.Match) -> None: + self.target = match_obj.group() + + +class Mention(SpanToken): + parse_inner = False + precedence = 10 + pattern = _MENTION_REGEX + + def __init__(self, match_obj: re.Match) -> None: + self.target = match_obj.group() + + +class Hashtag(SpanToken): + parse_inner = False + precedence = 10 + pattern = _HASHTAG_REGEX + + def __init__(self, match_obj: re.Match) -> None: + self.target = match_obj.group() + + +class CustomRenderer(HTMLRenderer): + def __init__( + self, + mentioned_actors: dict[str, "Actor"] = {}, + enable_mentionify: bool = True, + enable_hashtagify: bool = True, + ) -> None: + extra_tokens = [] + if enable_mentionify: + extra_tokens.append(Mention) + if enable_hashtagify: + extra_tokens.append(Hashtag) + super().__init__(AutoLink, *extra_tokens) + + self.tags: list[dict[str, str]] = [] + self.mentioned_actors = mentioned_actors + + def render_auto_link(self, token: AutoLink) -> str: + template = '{inner}' + target = self.escape_url(token.target) + return template.format(target=target, inner=target) + + def render_mention(self, token: Mention) -> str: + mention = token.target + suffix = "" + if mention.endswith("."): + mention = mention[:-1] + suffix = "." + actor = self.mentioned_actors.get(mention) + if not actor: + return mention + + self.tags.append(dict(type="Mention", href=actor.ap_id, name=mention)) + + link = f'{actor.handle}{suffix}' # noqa: E501 + return link + + def render_hashtag(self, token: Hashtag) -> str: + tag = token.target[1:] + link = f'' # noqa: E501 + self.tags.append( + dict( + href=f"{BASE_URL}/t/{tag.lower()}", + name=token.target.lower(), + type="Hashtag", + ) + ) + return link + + def render_block_code(self, token: CodeFence) -> str: + lexer_attr = "" + try: + lexer = get_lexer(token.language) + lexer_attr = f' data-microblogpub-lexer="{lexer.aliases[0]}"' + except ClassNotFound: + pass + + code = token.children[0].content + return f"
\n{code}\n
" + + +async def _prefetch_mentioned_actors( + db_session: AsyncSession, + content: str, +) -> dict[str, "Actor"]: + from app import models + from app.actor import fetch_actor + + actors = {} + + for mention in re.findall(_MENTION_REGEX, content): + if mention in actors: + continue + + # XXX: the regex catches stuff like `@toto@example.com.` + if mention.endswith("."): + mention = mention[:-1] + + try: + _, username, domain = mention.split("@") + actor = ( + await db_session.execute( + select(models.Actor).where( + models.Actor.handle == mention, + models.Actor.is_deleted.is_(False), + ) + ) + ).scalar_one_or_none() + if not actor: + actor_url = await webfinger.get_actor_url(mention) + if not actor_url: + # FIXME(ts): raise an error? + continue + actor = await fetch_actor(db_session, actor_url) + + actors[mention] = actor + except Exception: + logger.exception(f"Failed to prefetch {mention}") + + return actors + + +def hashtagify( + content: str, +) -> tuple[str, list[dict[str, str]]]: + tags = [] + with CustomRenderer( + mentioned_actors={}, + enable_mentionify=False, + enable_hashtagify=True, + ) as renderer: + rendered_content = renderer.render(Document(content)) + tags.extend(renderer.tags) + + # Handle custom emoji + tags.extend(emoji.tags(content)) + + return rendered_content, tags + + +async def markdownify( + db_session: AsyncSession, + content: str, + enable_mentionify: bool = True, + enable_hashtagify: bool = True, +) -> tuple[str, list[dict[str, str]], list["Actor"]]: + """ + >>> content, tags = markdownify("Hello") + + """ + tags = [] + mentioned_actors: dict[str, "Actor"] = {} + if enable_mentionify: + mentioned_actors = await _prefetch_mentioned_actors(db_session, content) + + with CustomRenderer( + mentioned_actors=mentioned_actors, + enable_mentionify=enable_mentionify, + enable_hashtagify=enable_hashtagify, + ) as renderer: + rendered_content = renderer.render(Document(content)) + tags.extend(renderer.tags) + + # Handle custom emoji + tags.extend(emoji.tags(content)) + + return rendered_content, dedup_tags(tags), list(mentioned_actors.values()) + + +def dedup_tags(tags: list[dict[str, str]]) -> list[dict[str, str]]: + idx = set() + deduped_tags = [] + for tag in tags: + tag_idx = (tag["type"], tag["name"]) + if tag_idx in idx: + continue + + idx.add(tag_idx) + deduped_tags.append(tag) + + return deduped_tags diff --git a/app/static/common-admin.js b/app/static/common-admin.js new file mode 100644 index 0000000..10254cd --- /dev/null +++ b/app/static/common-admin.js @@ -0,0 +1,11 @@ +document.addEventListener('DOMContentLoaded', (ev) => { + // Add confirm to "delete" button next to outbox objects + var forms = document.getElementsByClassName("object-delete-form") + for (var i = 0; i < forms.length; i++) { + forms[i].addEventListener('submit', (ev) => { + if (!confirm('Do you really want to delete this object?')) { + ev.preventDefault(); + }; + }); + } +}); diff --git a/app/static/common.js b/app/static/common.js new file mode 100644 index 0000000..410b96f --- /dev/null +++ b/app/static/common.js @@ -0,0 +1,32 @@ +function hasAudio (video) { + return video.mozHasAudio || + Boolean(video.webkitAudioDecodedByteCount) || + Boolean(video.audioTracks && video.audioTracks.length); +} + +function setVideoInGIFMode(video) { + if (!hasAudio(video)) { + if (typeof video.loop == 'boolean' && video.duration <= 10.0) { + video.classList.add("video-gif-mode"); + video.loop = true; + video.controls = false; + video.addEventListener("mouseover", () => { + video.play(); + }) + video.addEventListener("mouseleave", () => { + video.pause(); + }) + } + }; +} + +var items = document.getElementsByTagName("video") +for (var i = 0; i < items.length; i++) { + if (items[i].duration) { + setVideoInGIFMode(items[i]); + } else { + items[i].addEventListener("loadeddata", function() { + setVideoInGIFMode(this); + }); + } +} diff --git a/app/static/css/.gitignore b/app/static/css/.gitignore new file mode 100644 index 0000000..b3a5267 --- /dev/null +++ b/app/static/css/.gitignore @@ -0,0 +1 @@ +*.css diff --git a/data/mongodb/.gitignore b/app/static/emoji/.gitignore similarity index 100% rename from data/mongodb/.gitignore rename to app/static/emoji/.gitignore diff --git a/app/static/emoji/goose_honk.png b/app/static/emoji/goose_honk.png new file mode 100644 index 0000000..0665643 Binary files /dev/null and b/app/static/emoji/goose_honk.png differ diff --git a/app/static/new.js b/app/static/new.js new file mode 100644 index 0000000..065bad5 --- /dev/null +++ b/app/static/new.js @@ -0,0 +1,56 @@ +// The new post textarea +var ta = document.getElementsByTagName("textarea")[0]; +// Helper for inserting text (emojis) in the textarea +function insertAtCursor (textToInsert) { + ta.focus(); + const isSuccess = document.execCommand("insertText", false, textToInsert); + + // Firefox (non-standard method) + if (!isSuccess) { + // Credits to https://www.everythingfrontend.com/posts/insert-text-into-textarea-at-cursor-position.html + // get current text of the input + const value = ta.value; + // save selection start and end position + const start = ta.selectionStart; + const end = ta.selectionEnd; + // update the value with our text inserted + ta.value = value.slice(0, start) + textToInsert + value.slice(end); + // update cursor to be at the end of insertion + ta.selectionStart = ta.selectionEnd = start + textToInsert.length; + } +} +// Emoji click callback func +var ji = function (ev) { + insertAtCursor(ev.target.attributes.alt.value + " "); + ta.focus() + //console.log(document.execCommand('insertText', false /*no UI*/, ev.target.attributes.alt.value)); +} +// Enable the click for each emojis +var items = document.getElementsByClassName("ji") +for (var i = 0; i < items.length; i++) { + items[i].addEventListener('click', ji); +} + +// Add new input text dynamically to allow setting an alt text on attachments +var files = document.getElementById("files"); +var alts = document.getElementById("alts"); +files.addEventListener("change", function(e) { + // Reset the div content + alts.innerHTML = ""; + + // Add an input for each files + for (var i = 0; i < e.target.files.length; i++) { + var p = document.createElement("p"); + var altInput = document.createElement("input"); + altInput.setAttribute("type", "text"); + altInput.setAttribute("name", "alt_" + e.target.files[i].name); + altInput.setAttribute("placeholder", "Alt text for " + e.target.files[i].name); + altInput.setAttribute("style", "width:95%;") + p.appendChild(altInput); + alts.appendChild(p); + } +}); +// Focus at the end of the textarea +const end = ta.value.length; +ta.setSelectionRange(end, end); +ta.focus(); diff --git a/static/nopic.png b/app/static/nopic.png similarity index 100% rename from static/nopic.png rename to app/static/nopic.png diff --git a/data/poussetaches/.gitignore b/app/static/twemoji/.gitignore similarity index 100% rename from data/poussetaches/.gitignore rename to app/static/twemoji/.gitignore diff --git a/app/templates.py b/app/templates.py new file mode 100644 index 0000000..ea4fb26 --- /dev/null +++ b/app/templates.py @@ -0,0 +1,439 @@ +from datetime import datetime +from datetime import timezone +from functools import lru_cache +from typing import Any +from typing import Callable +from urllib.parse import urlparse + +import bleach +import emoji +import html2text +import humanize +from bs4 import BeautifulSoup # type: ignore +from dateutil.parser import parse +from fastapi import Request +from fastapi.templating import Jinja2Templates +from loguru import logger +from sqlalchemy import func +from sqlalchemy import select +from starlette.templating import _TemplateResponse as TemplateResponse + +from app import activitypub as ap +from app import config +from app import models +from app.actor import LOCAL_ACTOR +from app.ap_object import Attachment +from app.ap_object import Object +from app.config import BASE_URL +from app.config import CUSTOM_FOOTER +from app.config import DEBUG +from app.config import SESSION_TIMEOUT +from app.config import VERSION +from app.config import generate_csrf_token +from app.config import session_serializer +from app.database import AsyncSession +from app.media import proxied_media_url +from app.utils import privacy_replace +from app.utils.datetime import now +from app.utils.highlight import HIGHLIGHT_CSS +from app.utils.highlight import highlight + +_templates = Jinja2Templates( + directory=["data/templates", "app/templates"], # type: ignore # bad typing + trim_blocks=True, + lstrip_blocks=True, +) + + +H2T = html2text.HTML2Text() +H2T.ignore_links = True +H2T.ignore_images = True + + +def _filter_domain(text: str) -> str: + hostname = urlparse(text).hostname + if not hostname: + raise ValueError(f"No hostname for {text}") + return hostname + + +def _media_proxy_url(url: str | None) -> str: + if not url: + return BASE_URL + "/static/nopic.png" + return proxied_media_url(url) + + +def is_current_user_admin(request: Request) -> bool: + is_admin = False + session_cookie = request.cookies.get("session") + if session_cookie: + try: + loaded_session = session_serializer.loads( + session_cookie, + max_age=SESSION_TIMEOUT, + ) + except Exception: + logger.exception("Failed to validate session timeout") + else: + is_admin = loaded_session.get("is_logged_in") + + return is_admin + + +async def render_template( + db_session: AsyncSession, + request: Request, + template: str, + template_args: dict[str, Any] | None = None, + status_code: int = 200, + headers: dict[str, str] | None = None, +) -> TemplateResponse: + if template_args is None: + template_args = {} + + is_admin = False + is_admin = is_current_user_admin(request) + + return _templates.TemplateResponse( + template, + { + "request": request, + "debug": DEBUG, + "microblogpub_version": VERSION, + "is_admin": is_admin, + "csrf_token": generate_csrf_token(), + "highlight_css": HIGHLIGHT_CSS, + "visibility_enum": ap.VisibilityEnum, + "notifications_count": await db_session.scalar( + select(func.count(models.Notification.id)).where( + models.Notification.is_new.is_(True) + ) + ) + if is_admin + else 0, + "articles_count": await db_session.scalar( + select(func.count(models.OutboxObject.id)).where( + models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, + models.OutboxObject.is_deleted.is_(False), + models.OutboxObject.is_hidden_from_homepage.is_(False), + models.OutboxObject.ap_type == "Article", + ) + ), + "local_actor": LOCAL_ACTOR, + "followers_count": await db_session.scalar( + select(func.count(models.Follower.id)) + ), + "following_count": await db_session.scalar( + select(func.count(models.Following.id)) + ), + "actor_types": ap.ACTOR_TYPES, + "custom_footer": CUSTOM_FOOTER, + **template_args, + }, + status_code=status_code, + headers=headers, + ) + + +# HTML/templates helper +ALLOWED_TAGS = [ + "a", + "abbr", + "acronym", + "b", + "br", + "blockquote", + "code", + "pre", + "em", + "i", + "li", + "ol", + "strong", + "sup", + "sub", + "del", + "ul", + "span", + "div", + "p", + "h1", + "h2", + "h3", + "h4", + "h5", + "h6", + "table", + "th", + "tr", + "td", + "thead", + "tbody", + "tfoot", + "colgroup", + "caption", + "img", + "div", + "span", +] + +ALLOWED_CSS_CLASSES = [ + # microformats + "h-card", + "u-url", + "mention", + # code highlighting + "highlight", + "codehilite", + "hll", + "c", + "err", + "g", + "k", + "l", + "n", + "o", + "x", + "p", + "ch", + "cm", + "cp", + "cpf", + "c1", + "cs", + "gd", + "ge", + "gr", + "gh", + "gi", + "go", + "gp", + "gs", + "gu", + "gt", + "kc", + "kd", + "kn", + "kp", + "kr", + "kt", + "ld", + "m", + "s", + "na", + "nb", + "nc", + "no", + "nd", + "ni", + "ne", + "nf", + "nl", + "nn", + "nx", + "py", + "nt", + "nv", + "ow", + "w", + "mb", + "mf", + "mh", + "mi", + "mo", + "sa", + "sb", + "sc", + "dl", + "sd", + "s2", + "se", + "sh", + "si", + "sx", + "sr", + "s1", + "ss", + "bp", + "fm", + "vc", + "vg", + "vi", + "vm", + "il", +] + + +def _allow_class(_tag: str, name: str, value: str) -> bool: + return name == "class" and value in ALLOWED_CSS_CLASSES + + +def _allow_img_attrs(_tag: str, name: str, value: str) -> bool: + if name in ["src", "alt", "title"]: + return True + if name == "class" and value == "inline-img": + return True + + return False + + +ALLOWED_ATTRIBUTES: dict[str, list[str] | Callable[[str, str, str], bool]] = { + "a": ["href", "title"], + "abbr": ["title"], + "acronym": ["title"], + "img": _allow_img_attrs, + "div": _allow_class, + "span": _allow_class, + "code": _allow_class, +} + + +def _allow_all_attributes(tag: Any, name: Any, value: Any) -> bool: + return True + + +@lru_cache(maxsize=256) +def _update_inline_imgs(content): + soup = BeautifulSoup(content, "html5lib") + imgs = soup.find_all("img") + if not imgs: + return content + + for img in imgs: + if not img.attrs.get("src"): + continue + + img.attrs["src"] = _media_proxy_url(img.attrs["src"]) + "/740" + img["class"] = "inline-img" + + return soup.find("body").decode_contents() + + +def _clean_html(html: str, note: Object) -> str: + if html is None: + logger.error(f"{html=} for {note.ap_id}/{note.ap_object}") + return "" + try: + return _emojify( + _replace_custom_emojis( + bleach.clean( + privacy_replace.replace_content( + _update_inline_imgs(highlight(html)) + ), + tags=ALLOWED_TAGS, + attributes=( + _allow_all_attributes + if note.ap_id.startswith(config.ID) + else ALLOWED_ATTRIBUTES + ), + strip=True, + ), + note, + ), + is_local=note.ap_id.startswith(BASE_URL), + ) + except Exception: + raise + + +def _clean_html_wm(html: str) -> str: + return bleach.clean( + html, + attributes=ALLOWED_ATTRIBUTES, + strip=True, + ) + + +def _timeago(original_dt: datetime) -> str: + dt = original_dt + if dt.tzinfo: + dt = dt.astimezone(timezone.utc).replace(tzinfo=None) + return humanize.naturaltime(dt, when=now().replace(tzinfo=None)) + + +def _has_media_type(attachment: Attachment, media_type_prefix: str) -> bool: + if attachment.media_type: + return attachment.media_type.startswith(media_type_prefix) + return False + + +def _format_date(dt: datetime) -> str: + return dt.strftime("%b %d, %Y, %H:%M") + + +def _pluralize(count: int, singular: str = "", plural: str = "s") -> str: + if count > 1: + return plural + else: + return singular + + +def _replace_custom_emojis(content: str, note: Object) -> str: + idx = {} + for tag in note.tags: + if tag.get("type") == "Emoji": + try: + idx[tag["name"]] = proxied_media_url(tag["icon"]["url"]) + except KeyError: + logger.warning(f"Failed to parse custom emoji {tag=}") + continue + + for emoji_name, emoji_url in idx.items(): + content = content.replace( + emoji_name, + f'{emoji_name}', # noqa: E501 + ) + + return content + + +def _html2text(content: str) -> str: + return H2T.handle(content) + + +def _replace_emoji(u: str, _) -> str: + filename = "-".join(hex(ord(c))[2:] for c in u) + return config.EMOJI_TPL.format(base_url=BASE_URL, filename=filename, raw=u) + + +def _emojify(text: str, is_local: bool) -> str: + if not is_local: + return text + + return emoji.replace_emoji( + text, + replace=_replace_emoji, + ) + + +def _parse_datetime(dt: str) -> datetime: + return parse(dt) + + +def _poll_item_pct(item: ap.RawObject, voters_count: int) -> int: + if voters_count == 0: + return 0 + + return int(item["replies"]["totalItems"] * 100 / voters_count) + + +_templates.env.filters["domain"] = _filter_domain +_templates.env.filters["media_proxy_url"] = _media_proxy_url +_templates.env.filters["clean_html"] = _clean_html +_templates.env.filters["clean_html_wm"] = _clean_html_wm +_templates.env.filters["timeago"] = _timeago +_templates.env.filters["format_date"] = _format_date +_templates.env.filters["has_media_type"] = _has_media_type +_templates.env.filters["html2text"] = _html2text +_templates.env.filters["emojify"] = _emojify +_templates.env.filters["pluralize"] = _pluralize +_templates.env.filters["parse_datetime"] = _parse_datetime +_templates.env.filters["poll_item_pct"] = _poll_item_pct +_templates.env.filters["privacy_replace_url"] = privacy_replace.replace_url +_templates.env.globals["JS_HASH"] = config.JS_HASH +_templates.env.globals["CSS_HASH"] = config.CSS_HASH +_templates.env.globals["BASE_URL"] = config.BASE_URL +_templates.env.globals["HIDES_FOLLOWERS"] = config.HIDES_FOLLOWERS +_templates.env.globals["HIDES_FOLLOWING"] = config.HIDES_FOLLOWING +_templates.env.globals["NAVBAR_ITEMS"] = config.NavBarItems +_templates.env.globals["ICON_URL"] = config.CONFIG.icon_url diff --git a/app/templates/admin_direct_messages.html b/app/templates/admin_direct_messages.html new file mode 100644 index 0000000..c6a2319 --- /dev/null +++ b/app/templates/admin_direct_messages.html @@ -0,0 +1,20 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ local_actor.display_name }} - Direct messages +{% endblock %} + +{% block content %} + +{% for anybox_object, convo, actors in threads %} +
+ With {% for actor in actors %} + + {{ actor.handle }} + + {% endfor %} +
+ {{ utils.display_object(anybox_object) }} +{% endfor %} +{% endblock %} diff --git a/app/templates/admin_inbox.html b/app/templates/admin_inbox.html new file mode 100644 index 0000000..221b197 --- /dev/null +++ b/app/templates/admin_inbox.html @@ -0,0 +1,46 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ local_actor.display_name }} - Inbox +{% endblock %} + +{% block content %} + +{% if show_filters %} +{{ utils.display_box_filters("admin_inbox") }} +{% endif %} + +{% if not inbox %} + +{% endif %} + +{% for inbox_object in inbox %} +{% if inbox_object.ap_type == "Announce" %} + {{ utils.actor_action(inbox_object, "shared", with_icon=True) }} + {{ utils.display_object(inbox_object.relates_to_anybox_object) }} +{% elif inbox_object.ap_type in ["Article", "Note", "Video", "Page", "Question"] %} +{{ utils.display_object(inbox_object) }} +{% elif inbox_object.ap_type == "Follow" %} + {{ utils.actor_action(inbox_object, "followed you") }} + {{ utils.display_actor(inbox_object.actor, actors_metadata) }} +{% elif inbox_object.ap_type == "Like" %} + {{ utils.actor_action(inbox_object, "liked one of your posts", with_icon=True) }} + {{ utils.display_object(inbox_object.relates_to_anybox_object) }} +{% else %} +

+ Implement {{ inbox_object.ap_type }} + {{ inbox_object.ap_object }} +

+{% endif %} +{% endfor %} + +{% if next_cursor %} + +{% endif %} + +{% endblock %} diff --git a/app/templates/admin_new.html b/app/templates/admin_new.html new file mode 100644 index 0000000..ac3b3db --- /dev/null +++ b/app/templates/admin_new.html @@ -0,0 +1,94 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ local_actor.display_name }} - New +{% endblock %} + +{% block content %} + +{% if in_reply_to_object %} +
In reply to:
+{{ utils.display_object(in_reply_to_object) }} +{% endif %} + +
+ + + +
+ {{ utils.embed_csrf_token() }} + {{ utils.embed_redirect_url() }} +

+ +

+ + {% if request.query_params.type == "Article" %} +

+ +

+ {% endif %} + + {% for emoji in emojis %} + {{ emoji | emojify(True) | safe }} + {% endfor %} + {% for emoji in custom_emojis %} + {{ emoji.name }} + {% endfor %} + + + + {% if request.query_params.type == "Question" %} +

+ +

+

+ +

+ {% for i in ["1", "2", "3", "4"] %} +

+ +

+ {% endfor %} + {% endif %} + +

+ +

+

+ +

+ +

+ +

+
+

+ +

+
+
+ +{% endblock %} diff --git a/app/templates/admin_outbox.html b/app/templates/admin_outbox.html new file mode 100644 index 0000000..d0ad58b --- /dev/null +++ b/app/templates/admin_outbox.html @@ -0,0 +1,35 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ local_actor.display_name }} - Outbox +{% endblock %} + +{% block content %} + +{{ utils.display_box_filters("admin_outbox") }} + +{% for outbox_object in outbox %} + + {% if outbox_object.ap_type == "Announce" %} +
You shared {{ outbox_object.ap_published_at | timeago }}
+ {{ utils.display_object(outbox_object.relates_to_anybox_object) }} + {% elif outbox_object.ap_type == "Like" %} +
You liked {{ outbox_object.ap_published_at | timeago }}
+ {{ utils.display_object(outbox_object.relates_to_anybox_object) }} + {% elif outbox_object.ap_type == "Follow" %} +
You followed {{ outbox_object.ap_published_at | timeago }}
+ {{ utils.display_actor(outbox_object.relates_to_actor, actors_metadata) }} + {% elif outbox_object.ap_type in ["Article", "Note", "Video", "Question"] %} + {{ utils.display_object(outbox_object) }} + {% endif %} + +{% endfor %} + +{% if next_cursor %} + +{% endif %} + +{% endblock %} diff --git a/app/templates/admin_profile.html b/app/templates/admin_profile.html new file mode 100644 index 0000000..1f5646c --- /dev/null +++ b/app/templates/admin_profile.html @@ -0,0 +1,29 @@ +{%- import "utils.html" as utils with context -%} + +{% block head %} +{{ local_actor.display_name }} - {{ actor.display_name }} +{% endblock %} + +{% extends "layout.html" %} +{% block content %} + {{ utils.display_actor(actor, actors_metadata, with_details=True) }} + {% for inbox_object in inbox_objects %} + {% if inbox_object.ap_type == "Announce" %} + {{ utils.actor_action(inbox_object, "shared", with_icon=True) }} + {{ utils.display_object(inbox_object.relates_to_anybox_object) }} + {% else %} + {{ utils.display_object(inbox_object) }} + {% endif %} + {% endfor %} + +{% if next_cursor %} + +{% endif %} + +{% endblock %} diff --git a/app/templates/admin_stream.html b/app/templates/admin_stream.html new file mode 100644 index 0000000..4fd8a79 --- /dev/null +++ b/app/templates/admin_stream.html @@ -0,0 +1,18 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ local_actor.display_name }} - Stream +{% endblock %} + +{% block content %} + +{% for inbox_object in stream %} +{% if inbox_object.ap_type == "Announce" %} + {{ utils.display_object(inbox_object.relates_to_anybox_object) }} +{% elif inbox_object.ap_type in ["Article", "Note", "Video"] %} +{{ utils.display_object(inbox_object) }} +{% endif %} +{% endfor %} + +{% endblock %} diff --git a/app/templates/articles.html b/app/templates/articles.html new file mode 100644 index 0000000..459492d --- /dev/null +++ b/app/templates/articles.html @@ -0,0 +1,20 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ local_actor.display_name }}'s articles +{% endblock %} + +{% block content %} +{% include "header.html" %} + +
    + +{% for outbox_object in objects %} +
  • + {{ outbox_object.name }} +
  • +{% endfor %} +
+ +{% endblock %} diff --git a/app/templates/custom_page.html b/app/templates/custom_page.html new file mode 100644 index 0000000..2f1c937 --- /dev/null +++ b/app/templates/custom_page.html @@ -0,0 +1,30 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ title }} +{% if request.url.path == "/" %} + + + + + + + + + + + + + +{% endif %} +{% endblock %} + +{% block content %} +{% include "header.html" %} + +
+ {{ page_content | safe }} +
+ +{% endblock %} diff --git a/app/templates/error.html b/app/templates/error.html new file mode 100644 index 0000000..c45a567 --- /dev/null +++ b/app/templates/error.html @@ -0,0 +1,12 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} +{% block main_tag %} class="main-flex"{% endblock %} +{% block head %} +{{ title }} +{% endblock %} + +{% block content %} +
+

{{ title | safe }}

+
+{% endblock %} diff --git a/app/templates/followers.html b/app/templates/followers.html new file mode 100644 index 0000000..dabd049 --- /dev/null +++ b/app/templates/followers.html @@ -0,0 +1,32 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ local_actor.display_name }}'s followers + +{% endblock %} + +{% block content %} +{% include "header.html" %} +
+
    +{% for follower in followers %} +
  • {{ utils.display_actor(follower.actor, actors_metadata) }}
  • +{% endfor %} +
+ +{% set x_more = followers_count - followers | length %} +{% if x_more > 0 %} +
+

And {{ x_more }} more.

+
+{% endif %} + +{% if is_admin %} + +{% endif %} + +
+{% endblock %} diff --git a/app/templates/following.html b/app/templates/following.html new file mode 100644 index 0000000..3341725 --- /dev/null +++ b/app/templates/following.html @@ -0,0 +1,32 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ local_actor.display_name }}'s follows + +{% endblock %} + +{% block content %} +{% include "header.html" %} +
+
    +{% for follow in following %} +
  • {{ utils.display_actor(follow.actor, actors_metadata) }}
  • +{% endfor %} +
+ +{% set x_more = following_count - following | length %} +{% if x_more > 0 %} +
+

And {{ x_more }} more.

+
+{% endif %} + +{% if is_admin %} + +{% endif %} + +
+{% endblock %} diff --git a/app/templates/header.html b/app/templates/header.html new file mode 100644 index 0000000..a15f051 --- /dev/null +++ b/app/templates/header.html @@ -0,0 +1,62 @@ + diff --git a/app/templates/index.html b/app/templates/index.html new file mode 100644 index 0000000..8db1638 --- /dev/null +++ b/app/templates/index.html @@ -0,0 +1,60 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ local_actor.display_name }}'s microblog + + + + + + + + + + + + + +{% endblock %} + +{% block content %} +{% include "header.html" %} + +{% if objects %} + +
+ + {% for outbox_object in objects %} + {% if outbox_object.ap_type in ["Note", "Video", "Question"] %} + {{ utils.display_object(outbox_object) }} + {% elif outbox_object.ap_type == "Announce" %} +
+ +
+ {{ utils.display_object(outbox_object.relates_to_anybox_object, is_h_entry=False) }} +
+
+ {% endif %} + {% endfor %} +
+ + {% if has_previous_page or has_next_page %} +
+ {% if has_previous_page %} + Previous + {% endif %} + + {% if has_next_page %} + Next + {% endif %} +
+ {% endif %} + +{% else %} +
+

Nothing to see here yet!

+
+{% endif %} + +{% endblock %} diff --git a/app/templates/indieauth_flow.html b/app/templates/indieauth_flow.html new file mode 100644 index 0000000..fb4d15c --- /dev/null +++ b/app/templates/indieauth_flow.html @@ -0,0 +1,45 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} +{% block content %} +
+
+ {% if client.logo %} + + {% endif %} +
+
+ {% if client.url %} + {{ client.name }} + {% else %} + {{ client.name }} + {% endif %} +

wants you to login{% if me %} as {{ me }}{% endif %} with the following redirect URI: {{ redirect_uri }}.

+ + +
+ {{ utils.embed_csrf_token() }} + {% if scopes %} +

Scopes

+
    + {% for scope in scopes %} +
  • +
  • + {% endfor %} +
+ {% endif %} + + + + + + + + +
+
+
+
+
+{% endblock %} diff --git a/app/templates/layout.html b/app/templates/layout.html new file mode 100644 index 0000000..cdf5959 --- /dev/null +++ b/app/templates/layout.html @@ -0,0 +1,60 @@ + + + + + + + + + + + + +{% block head %}{% endblock %} + + +
+ +{% if is_admin %} +
+{% macro admin_link(url, text) %} +{% set url_for = BASE_URL + request.app.router.url_path_for(url) %} +{{ text }} +{% endmacro %} +
+ +
+ +
+{% endif %} +{% block content %}{% endblock %} + +
+ +
+
+ {% if custom_footer %} + {{ custom_footer | safe }} + {% else %} + Powered by microblog.pub {{ microblogpub_version }} and the ActivityPub protocol. Admin. + {% endif %} +
+
+{% if is_admin %} + +{% endif %} + + + diff --git a/app/templates/login.html b/app/templates/login.html new file mode 100644 index 0000000..8f3940a --- /dev/null +++ b/app/templates/login.html @@ -0,0 +1,21 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} +{% block head %} + +{% endblock %} +{% block main_tag %} class="main-flex"{% endblock %} +{% block content %} +
+
+ {% if error %} +

Invalid password.

+ {% endif %} +
+ + + + +
+
+
+{% endblock %} diff --git a/app/templates/lookup.html b/app/templates/lookup.html new file mode 100644 index 0000000..0b077e2 --- /dev/null +++ b/app/templates/lookup.html @@ -0,0 +1,38 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ local_actor.display_name }} - Lookup +{% endblock %} + +{% block content %} + +
+

Interact with an ActivityPub object via its URL or look for a user using @user@domain.tld

+ +
+ + +
+
+ + {% if error %} +
+ {% if error.value == "NOT_FOUND" %} +

The remote object is unavailable.

+ {% elif error.value == "UNAUTHORIZED" %} +

Missing permissions to fetch the remote object.

+ {% elif error.value == "TIMEOUT" %} +

Lookup timed out, please try refreshing the page.

+ {% else %} +

Unexpected error, please check the logs and report an issue if needed.

+ {% endif %} +
+ {% endif %} + + {% if ap_object and ap_object.ap_type in actor_types %} + {{ utils.display_actor(ap_object, actors_metadata, with_details=True) }} + {% elif ap_object %} + {{ utils.display_object(ap_object, actors_metadata=actors_metadata) }} + {% endif %} +{% endblock %} diff --git a/app/templates/notifications.html b/app/templates/notifications.html new file mode 100644 index 0000000..aaa541f --- /dev/null +++ b/app/templates/notifications.html @@ -0,0 +1,126 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ local_actor.display_name }} - Notifications +{% endblock %} + +{% macro notif_actor_action(notif, text, with_icon=False) %} + +{% endmacro %} + +{% block content %} +
+

Notifications

+
+
+ {%- for notif in notifications %} +
+ {%- if notif.notification_type.value == "new_follower" %} + {{ notif_actor_action(notif, "followed you") }} + {{ utils.display_actor(notif.actor, actors_metadata) }} + {%- elif notif.notification_type.value == "pending_incoming_follower" %} + {{ notif_actor_action(notif, "sent a follow request") }} + {{ utils.display_actor(notif.actor, actors_metadata, pending_incoming_follow_notif=notif) }} + {% elif notif.notification_type.value == "rejected_follower" %} + {% elif notif.notification_type.value == "unfollow" %} + {{ notif_actor_action(notif, "unfollowed you") }} + {{ utils.display_actor(notif.actor, actors_metadata) }} + {%- elif notif.notification_type.value == "follow_request_accepted" %} + {{ notif_actor_action(notif, "accepted your follow request") }} + {{ utils.display_actor(notif.actor, actors_metadata) }} + {%- elif notif.notification_type.value == "follow_request_rejected" %} + {{ notif_actor_action(notif, "rejected your follow request") }} + {{ utils.display_actor(notif.actor, actors_metadata) }} + {% elif notif.notification_type.value == "blocked" %} + {{ notif_actor_action(notif, "blocked you") }} + {{ utils.display_actor(notif.actor, actors_metadata) }} + {% elif notif.notification_type.value == "unblocked" %} + {{ notif_actor_action(notif, "unblocked you") }} + {{ utils.display_actor(notif.actor, actors_metadata) }} + {% elif notif.notification_type.value == "block" %} + {{ notif_actor_action(notif, "was blocked") }} + {{ utils.display_actor(notif.actor, actors_metadata) }} + {% elif notif.notification_type.value == "unblock" %} + {{ notif_actor_action(notif, "was unblocked") }} + {{ utils.display_actor(notif.actor, actors_metadata) }} + {%- elif notif.notification_type.value == "move" and notif.inbox_object %} + {# for move notif, the actor is the target and the inbox object the Move activity #} + + {{ utils.display_actor(notif.actor) }} + {% elif notif.notification_type.value == "like" %} + {{ notif_actor_action(notif, "liked a post", with_icon=True) }} + {{ utils.display_object(notif.outbox_object) }} + {% elif notif.notification_type.value == "undo_like" %} + {{ notif_actor_action(notif, "unliked a post", with_icon=True) }} + {{ utils.display_object(notif.outbox_object) }} + {% elif notif.notification_type.value == "announce" %} + {{ notif_actor_action(notif, "shared a post", with_icon=True) }} + {{ utils.display_object(notif.outbox_object) }} + {% elif notif.notification_type.value == "undo_announce" %} + {{ notif_actor_action(notif, "unshared a post", with_icon=True) }} + {{ utils.display_object(notif.outbox_object) }} + {% elif notif.notification_type.value == "mention" %} + {{ notif_actor_action(notif, "mentioned you") }} + {{ utils.display_object(notif.inbox_object) }} + {% elif notif.notification_type.value == "new_webmention" %} +
+ new webmention from + {% set facepile_item = notif.webmention.as_facepile_item %} + {% if facepile_item %} + {{ facepile_item.actor_name }} + {% endif %} + {{ notif.webmention.source }} +
+ {{ utils.display_object(notif.outbox_object) }} + {% elif notif.notification_type.value == "updated_webmention" %} +
+ updated webmention from + {% set facepile_item = notif.webmention.as_facepile_item %} + {% if facepile_item %} + {{ facepile_item.actor_name }} + {% endif %} + {{ notif.webmention.source }} +
+ {{ utils.display_object(notif.outbox_object) }} + {% elif notif.notification_type.value == "deleted_webmention" %} +
+ deleted webmention from + {% set facepile_item = notif.webmention.as_facepile_item %} + {% if facepile_item %} + {{ facepile_item.actor_name }} + {% endif %} + {{ notif.webmention.source }} +
+ {{ utils.display_object(notif.outbox_object) }} + {% else %} +
+ Implement {{ notif.notification_type }} +
+ {%- endif %} +
+ {%- endfor %} +
+ +{% if next_cursor %} + +{% endif %} + +{% endblock %} diff --git a/app/templates/object.html b/app/templates/object.html new file mode 100644 index 0000000..d89accb --- /dev/null +++ b/app/templates/object.html @@ -0,0 +1,56 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{% if outbox_object %} +{% if outbox_object.content %} + {% set excerpt = outbox_object.content | html2text | trim | truncate(50) %} +{% else %} + {% set excerpt = outbox_object.summary | html2text | trim | truncate(50) %} +{% endif %} +{% if outbox_object.name %}{{ outbox_object.name }}{% else %}{{ local_actor.display_name }}: "{{ excerpt }}"{% endif %} + + + + + + + + + + +{% endif %} +{% endblock %} + +{% block content %} + +{% if outbox_object %} +{% include "header.html" %} +{% endif %} + +{% macro display_replies_tree(replies_tree_node) %} + +{% if replies_tree_node.is_requested %} +{{ utils.display_object(replies_tree_node.ap_object, likes=likes, shares=shares, webmentions=webmentions, expanded=not replies_tree_node.is_root, is_object_page=True, is_h_entry=False) }} +{% else %} + {% if replies_tree_node.wm_reply %} + {# u-comment h-cite is displayed by default for webmention #} + {{ utils.display_webmention_reply(replies_tree_node.wm_reply) }} + {% else %} +
+ {{ utils.display_object(replies_tree_node.ap_object, is_h_entry=False) }} +
+ {% endif %} +{% endif %} + +{% for child in replies_tree_node.children %} + {{ display_replies_tree(child) }} +{% endfor %} + +{% endmacro %} + +
+{{ display_replies_tree(replies_tree) }} +
+ +{% endblock %} diff --git a/app/templates/redirect.html b/app/templates/redirect.html new file mode 100644 index 0000000..f8b0601 --- /dev/null +++ b/app/templates/redirect.html @@ -0,0 +1,15 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ local_actor.display_name }}'s microblog - Redirect +{% endblock %} + +{% block content %} +{% include "header.html" %} + +
+

You are being redirected to: {{ url }}

+
+ +{% endblock %} diff --git a/app/templates/redirect_to_remote_instance.html b/app/templates/redirect_to_remote_instance.html new file mode 100644 index 0000000..f814777 --- /dev/null +++ b/app/templates/redirect_to_remote_instance.html @@ -0,0 +1,15 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +{{ local_actor.display_name }}'s microblog - Redirect +{% endblock %} + +{% block content %} +{% include "header.html" %} + +
+

You are being redirected to your instance: {{ url }}

+
+ +{% endblock %} diff --git a/app/templates/remote_follow.html b/app/templates/remote_follow.html new file mode 100644 index 0000000..ed25be0 --- /dev/null +++ b/app/templates/remote_follow.html @@ -0,0 +1,21 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +Remote follow {{ local_actor.display_name }} + +{% endblock %} + +{% block content %} +{% include "header.html" %} + +
+

Remotely follow {{ local_actor.display_name }}

+
+ {{ utils.embed_csrf_token() }} + + +
+
+ +{% endblock %} diff --git a/app/templates/remote_interact.html b/app/templates/remote_interact.html new file mode 100644 index 0000000..0fbfe26 --- /dev/null +++ b/app/templates/remote_interact.html @@ -0,0 +1,27 @@ +{%- import "utils.html" as utils with context -%} +{% extends "layout.html" %} + +{% block head %} +Interact from your instance + +{% endblock %} + +{% block content %} +{% include "header.html" %} + +
+

Interact with this object

+
+ +{{ utils.display_object(outbox_object) }} + +
+
+ {{ utils.embed_csrf_token() }} + + + +
+
+ +{% endblock %} diff --git a/app/templates/utils.html b/app/templates/utils.html new file mode 100644 index 0000000..d88a881 --- /dev/null +++ b/app/templates/utils.html @@ -0,0 +1,853 @@ +{% macro embed_csrf_token() %} +{% block embed_csrf_token scoped %} + +{% endblock %} +{% endmacro %} + +{% macro embed_redirect_url(permalink_id=None) %} +{% block embed_redirect_url scoped %} + +{% endblock %} +{% endmacro %} + +{% macro admin_block_button(actor) %} +{% block admin_block_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url() }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_unblock_button(actor) %} +{% block admin_unblock_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url() }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_hide_shares_button(actor) %} +{% block admin_hide_shares_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url() }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_show_shares_button(actor) %} +{% block admin_show_shares_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url() }} + + +
+{% endblock %} +{% endmacro %} + + +{% macro admin_follow_button(actor) %} +{% block admin_follow_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url() }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_accept_incoming_follow_button(notif) %} +{% block admin_accept_incoming_follow_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url() }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_reject_incoming_follow_button(notif) %} +{% block admin_reject_incoming_follow_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url() }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_like_button(ap_object_id, permalink_id) %} +{% block admin_like_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url(permalink_id) }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_bookmark_button(ap_object_id, permalink_id) %} +{% block admin_bookmark_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url(permalink_id) }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_unbookmark_button(ap_object_id, permalink_id) %} +{% block admin_unbookmark_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url(permalink_id) }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_pin_button(ap_object_id, permalink_id) %} +{% block admin_pin_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url(permalink_id) }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_unpin_button(ap_object_id, permalink_id) %} +{% block admin_unpin_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url(permalink_id) }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_delete_button(ap_object) %} +{% block admin_delete_button scoped %} +
+ {{ embed_csrf_token() }} + + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_force_delete_button(ap_object_id, permalink_id=None) %} +{% block admin_force_delete_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url(permalink_id) }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_force_delete_webmention_button(webmention_id, permalink_id=None) %} +{% block admin_force_delete_webmention_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url(permalink_id) }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_announce_button(ap_object_id, permalink_id=None) %} +{% block admin_announce_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url(permalink_id) }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_undo_button(ap_object_id, action="undo", permalink_id=None) %} +{% block admin_undo_button scoped %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url(permalink_id) }} + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_reply_button(ap_object_id) %} +{% block admin_reply_button scoped %} +
+ + +
+{% endblock %} +{% endmacro %} + +{% macro admin_dm_button(actor_handle) %} +{% block admin_dm_button scoped %} +
+ + + +
+{% endblock %} +{% endmacro %} + +{% macro admin_mention_button(actor_handle) %} +{% block admin_mention_button scoped %} +
+ + +
+{% endblock %} +{% endmacro %} + + + +{% macro admin_profile_button(ap_actor_id) %} +{% block admin_profile_button scoped %} +
+ + +
+{% endblock %} +{% endmacro %} + +{% macro admin_expand_button(ap_object) %} +{% block admin_expand_button scoped %} +{# TODO turn these into a regular link and append permalink ID if it's a reply #} +
+ + +
+{% endblock %} +{% endmacro %} + +{% macro display_box_filters(route) %} +{% block display_box_filters scoped %} + +{% endblock %} +{% endmacro %} + +{% macro display_tiny_actor_icon(actor) %} +{% block display_tiny_actor_icon scoped %} + +{% endblock %} +{% endmacro %} + +{% macro actor_action(inbox_object, text, with_icon=False) %} +{% block actor_action scoped %} + + +{% endblock %} +{% endmacro %} + +{% macro display_actor(actor, actors_metadata={}, embedded=False, with_details=False, pending_incoming_follow_notif=None) %} +{% block display_actor scoped %} +{% set metadata = actors_metadata.get(actor.ap_id) %} + +{% if not embedded %} +
+{% endif %} + + + +{% if is_admin and metadata %} +
+ +
+{% endif %} + +{% if with_details %} + {% if actor.summary %} +
+ {{ actor.summary | clean_html(actor) | safe }} +
+ {% endif %} + + {% if actor.attachments %} +
+ {% for prop in actor.attachments %} +
+ {% if prop.type == "PropertyValue" %} +
{{ prop.name }}
+
{{ prop.value | clean_html(actor) | safe }}
+ {% endif %} +
+ {% endfor %} +
+ {% endif %} +{% endif %} + +{% if not embedded %} +
+{% endif %} + +{% endblock %} +{% endmacro %} + +{% macro display_og_meta(object) %} +{% block display_og_meta scoped %} +{% if object.og_meta %} +{% for og_meta in object.og_meta[:1] %} +
+ {% if og_meta.image %} +
+ +
+ {% endif %} +
+ {{ og_meta.title }} + {% if og_meta.site_name %} + {{ og_meta.site_name }} + {% endif %} +
+
+{% endfor %} +{% endif %} +{% endblock %} +{% endmacro %} + + +{% macro display_attachments(object) %} +{% block display_attachments scoped %} + + {% for attachment in object.attachments %} + {% if attachment.type != "PropertyValue" %} + {% set orientation = "unknown" %} + {% if attachment.width %} + {% set orientation = "portrait" if attachment.width < attachment.height else "landscape" %} + {% endif %} + {% if object.sensitive and (attachment.type == "Image" or (attachment | has_media_type("image")) or attachment.type == "Video" or (attachment | has_media_type("video"))) %} +
+ +
+
+ +
+
+ {% else %} +
+ {% endif %} + + {% if attachment.type == "Image" or (attachment | has_media_type("image")) %} + {% if attachment.url not in object.inlined_images %} + + {{ attachment.name }} + + {% endif %} + {% elif attachment.type == "Video" or (attachment | has_media_type("video")) %} +
+ +
GIF
+
+ {% elif attachment.type == "Audio" or (attachment | has_media_type("audio")) %} + + {% elif attachment.type == "Link" %} + {{ attachment.url | truncate(64, True) }} ({{ attachment.mimetype}}) + {% else %} + + {% if attachment.name %}{{ attachment.name }}{% else %}{{ attachment.url | truncate(64, True) }}{% endif %} + ({{ attachment.mimetype }}) + {% endif %} + {% if object.sensitive and (attachment.type == "Image" or (attachment | has_media_type("image")) or attachment.type == "Video" or (attachment | has_media_type("video"))) %} +
+
+
+
+ {% else %} +
+ {% endif %} + {% endif %} + {% endfor %} +{% endblock %} +{% endmacro %} + +{% macro display_webmention_reply(wm_reply) %} +{% block display_webmention_reply scoped %} + +
+ + +

in reply to + this object +

+ +
+
+ {{ wm_reply.content | clean_html_wm | safe }} +
+
+ + +
+ +{% endblock %} +{% endmacro %} + +{% macro display_object(object, likes=[], shares=[], webmentions=[], expanded=False, actors_metadata={}, is_object_page=False, is_h_entry=True) %} +{% block display_object scoped %} +{% set is_article_mode = object.is_from_outbox and object.ap_type == "Article" and is_object_page %} +{% if object.ap_type in ["Note", "Article", "Video", "Page", "Question", "Event"] %} +
+ + {% if is_article_mode %} + + + + + + {% else %} + {{ display_actor(object.actor, actors_metadata, embedded=True) }} + {% endif %} + + {% if object.in_reply_to %} +

in reply to + this object +

+ {% endif %} + + {% if object.ap_type in ["Article", "Event"] %} +

{{ object.name }}

+ {% endif %} + + {% if object.ap_type == "Event" %} + {% if object.ap_object.get("endTime") and object.ap_object.get("startTime") %} +

On {{ object.ap_object.startTime | parse_datetime | format_date }} + (ends {{ object.ap_object.endTime | parse_datetime | format_date }})

+ {% endif %} + {% endif %} + + {% if object.ap_object.get("location") %} + {% set loc = object.ap_object.get("location") %} + {% if loc.type == "Place" and loc.latitude and loc.longitude %} +
+

Location

+ {% if loc.name %}{{ loc.name }}{% endif %} + + + + {{loc.latitude}},{{loc.longitude}} + +
+ {% endif %} + {% endif %} + + {% if is_article_mode %} + + {% endif %} + + {% if object.summary %} +
+ +
+

{{ object.summary | clean_html(object) | safe }}

+
+ +
+ {% endif %} +
+
+ {{ object.content | clean_html(object) | safe }} +
+ + {% if object.ap_type == "Question" %} + {% set can_vote = is_admin and object.is_from_inbox and not object.is_poll_ended and not object.voted_for_answers %} + {% if can_vote %} +
+ {{ embed_csrf_token() }} + {{ embed_redirect_url(object.permalink_id) }} + + {% endif %} + + {% if object.poll_items %} +
    + {% for item in object.poll_items %} +
  • + {% set pct = item | poll_item_pct(object.poll_voters_count) %} +

    + {% if can_vote %} + + + {% endif %} + + {{ pct }}% ({{ item.replies.totalItems }} votes) +

    + + + +
  • + {% endfor %} +
+ {% endif %} + + {% if can_vote %} +

+ +

+
+ {% endif %} + + + {% endif %} + + {{ display_og_meta(object) }} + +
+ {% if object.summary %} +
+ {% endif %} + +
+ {{ display_attachments(object) }} +
+ + + + {% if is_admin %} + + {% endif %} + + + {% if likes or shares or webmentions %} +
+ {% if likes %} +
Likes +
+ {% for like in likes %} + + {{ like.name }} + + {% endfor %} + {% if object.likes_count > likes | length %} +
+ and {{ object.likes_count - likes | length }} more. +
+ {% endif %} +
+
+ {% endif %} + + {% if shares %} +
Shares +
+ {% for share in shares %} + + {{ share.name }} + + {% endfor %} + {% if object.announces_count > shares | length %} +
+ and {{ object.announces_count - shares | length }} more. +
+ {% endif %} +
+
+ {% endif %} + + {% if webmentions %} +
Webmentions +
+ {% for webmention in webmentions %} + {% set wm = webmention.as_facepile_item %} + {% if wm %} + + {{ wm.actor_name }} + + {% endif %} + {% endfor %} +
+
+ {% endif %} + +
+ {% endif %} + + +
+{% endif %} +{% endblock %} +{% endmacro %} diff --git a/app/uploads.py b/app/uploads.py new file mode 100644 index 0000000..d5c2079 --- /dev/null +++ b/app/uploads.py @@ -0,0 +1,125 @@ +import hashlib +from shutil import COPY_BUFSIZE # type: ignore + +import blurhash # type: ignore +from fastapi import UploadFile +from loguru import logger +from PIL import Image +from PIL import ImageOps +from sqlalchemy import select + +from app import activitypub as ap +from app import models +from app.config import BASE_URL +from app.config import ROOT_DIR +from app.database import AsyncSession + +UPLOAD_DIR = ROOT_DIR / "data" / "uploads" + + +async def save_upload(db_session: AsyncSession, f: UploadFile) -> models.Upload: + # Compute the hash + h = hashlib.blake2b(digest_size=32) + while True: + buf = f.file.read(COPY_BUFSIZE) + if not buf: + break + h.update(buf) + + content_hash = h.hexdigest() + f.file.seek(0) + + existing_upload = ( + await db_session.execute( + select(models.Upload).where(models.Upload.content_hash == content_hash) + ) + ).scalar_one_or_none() + if existing_upload: + logger.info(f"Upload with {content_hash=} already exists") + return existing_upload + + logger.info(f"Creating new Upload with {content_hash=}") + dest_filename = UPLOAD_DIR / content_hash + + has_thumbnail = False + image_blurhash = None + width = None + height = None + + if f.content_type.startswith("image") and not f.content_type == "image/gif": + with Image.open(f.file) as _original_image: + # Fix image orientation (as we will remove the info from the EXIF + # metadata) + original_image = ImageOps.exif_transpose(_original_image) + + # Re-creating the image drop the EXIF metadata + destination_image = Image.new( + original_image.mode, + original_image.size, + ) + destination_image.putdata(original_image.getdata()) + destination_image.save( + dest_filename, + format=_original_image.format, # type: ignore + ) + + with open(dest_filename, "rb") as dest_f: + image_blurhash = blurhash.encode(dest_f, x_components=4, y_components=3) + + try: + width, height = destination_image.size + destination_image.thumbnail((740, 740)) + destination_image.save( + UPLOAD_DIR / f"{content_hash}_resized", + format="webp", + ) + except Exception: + logger.exception( + f"Failed to created thumbnail for {f.filename}/{content_hash}" + ) + else: + has_thumbnail = True + logger.info("Thumbnail generated") + else: + with open(dest_filename, "wb") as dest: + while True: + buf = f.file.read(COPY_BUFSIZE) + if not buf: + break + dest.write(buf) + + new_upload = models.Upload( + content_type=f.content_type, + content_hash=content_hash, + has_thumbnail=has_thumbnail, + blurhash=image_blurhash, + width=width, + height=height, + ) + db_session.add(new_upload) + await db_session.commit() + + return new_upload + + +def upload_to_attachment( + upload: models.Upload, + filename: str, + alt_text: str | None, +) -> ap.RawObject: + extra_attachment_fields = {} + if upload.blurhash: + extra_attachment_fields.update( + { + "blurhash": upload.blurhash, + "height": upload.height, + "width": upload.width, + } + ) + return { + "type": "Document", + "mediaType": upload.content_type, + "name": alt_text or filename, + "url": BASE_URL + f"/attachments/{upload.content_hash}/{filename}", + **extra_attachment_fields, + } diff --git a/core/__init__.py b/app/utils/__init__.py similarity index 100% rename from core/__init__.py rename to app/utils/__init__.py diff --git a/app/utils/custom_index_handler.py b/app/utils/custom_index_handler.py new file mode 100644 index 0000000..eb776e2 --- /dev/null +++ b/app/utils/custom_index_handler.py @@ -0,0 +1,32 @@ +from typing import Any +from typing import Awaitable +from typing import Callable + +from fastapi import Depends +from fastapi import Request +from fastapi.responses import JSONResponse + +from app.actor import LOCAL_ACTOR +from app.config import is_activitypub_requested +from app.database import AsyncSession +from app.database import get_db_session + +_Handler = Callable[[Request, AsyncSession], Awaitable[Any]] + + +def build_custom_index_handler(handler: _Handler) -> _Handler: + async def custom_index( + request: Request, + db_session: AsyncSession = Depends(get_db_session), + ) -> Any: + # Serve the AP actor if requested + if is_activitypub_requested(request): + return JSONResponse( + LOCAL_ACTOR.ap_actor, + media_type="application/activity+json", + ) + + # Defer to the custom handler + return await handler(request, db_session) + + return custom_index diff --git a/app/utils/datetime.py b/app/utils/datetime.py new file mode 100644 index 0000000..5bdcacb --- /dev/null +++ b/app/utils/datetime.py @@ -0,0 +1,16 @@ +from datetime import datetime +from datetime import timezone + +from dateutil.parser import isoparse + + +def parse_isoformat(isodate: str) -> datetime: + return isoparse(isodate).astimezone(timezone.utc) + + +def now() -> datetime: + return datetime.now(timezone.utc) + + +def as_utc(dt: datetime) -> datetime: + return dt.replace(tzinfo=timezone.utc) diff --git a/app/utils/emoji.py b/app/utils/emoji.py new file mode 100644 index 0000000..da4867c --- /dev/null +++ b/app/utils/emoji.py @@ -0,0 +1,51 @@ +import mimetypes +import re +import typing +from pathlib import Path + +if typing.TYPE_CHECKING: + from app.activitypub import RawObject + +EMOJI_REGEX = re.compile(r"(:[\d\w]+:)") + +EMOJIS: dict[str, "RawObject"] = {} +EMOJIS_BY_NAME: dict[str, "RawObject"] = {} + + +def _load_emojis(root_dir: Path, base_url: str) -> None: + if EMOJIS: + return + for dir_name, path in ( + (root_dir / "app" / "static" / "emoji", "static/emoji"), + (root_dir / "data" / "custom_emoji", "custom_emoji"), + ): + for emoji in dir_name.iterdir(): + mt = mimetypes.guess_type(emoji.name)[0] + if mt and mt.startswith("image/"): + name = emoji.name.split(".")[0] + if not re.match(EMOJI_REGEX, f":{name}:"): + continue + ap_emoji: "RawObject" = { + "type": "Emoji", + "name": f":{name}:", + "updated": "1970-01-01T00:00:00Z", # XXX: we don't track date + "id": f"{base_url}/e/{name}", + "icon": { + "mediaType": mt, + "type": "Image", + "url": f"{base_url}/{path}/{emoji.name}", + }, + } + EMOJIS[emoji.name] = ap_emoji + EMOJIS_BY_NAME[ap_emoji["name"]] = ap_emoji + + +def tags(content: str) -> list["RawObject"]: + tags = [] + added = set() + for e in re.findall(EMOJI_REGEX, content): + if e not in added and e in EMOJIS_BY_NAME: + tags.append(EMOJIS_BY_NAME[e]) + added.add(e) + + return tags diff --git a/app/utils/facepile.py b/app/utils/facepile.py new file mode 100644 index 0000000..a4a1595 --- /dev/null +++ b/app/utils/facepile.py @@ -0,0 +1,172 @@ +import datetime +from dataclasses import dataclass +from datetime import timezone +from typing import Any +from typing import Optional + +from loguru import logger + +from app import media +from app.models import InboxObject +from app.models import Webmention +from app.utils.datetime import parse_isoformat +from app.utils.url import must_make_abs + + +@dataclass +class Face: + ap_actor_id: str | None + url: str + name: str + picture_url: str + created_at: datetime.datetime + + @classmethod + def from_inbox_object(cls, like: InboxObject) -> "Face": + return cls( + ap_actor_id=like.actor.ap_id, + url=like.actor.url, # type: ignore + name=like.actor.handle, # type: ignore + picture_url=like.actor.resized_icon_url, + created_at=like.created_at, # type: ignore + ) + + @classmethod + def from_webmention(cls, webmention: Webmention) -> Optional["Face"]: + items = webmention.source_microformats.get("items", []) # type: ignore + for item in items: + if item["type"][0] == "h-card": + try: + return cls( + ap_actor_id=None, + url=( + must_make_abs( + item["properties"]["url"][0], webmention.source + ) + if item["properties"].get("url") + else webmention.source + ), + name=item["properties"]["name"][0], + picture_url=media.resized_media_url( + must_make_abs( + item["properties"]["photo"][0], webmention.source + ), # type: ignore + 50, + ), + created_at=webmention.created_at, # type: ignore + ) + except Exception: + logger.exception( + f"Failed to build Face for webmention id={webmention.id}" + ) + break + elif item["type"][0] == "h-entry": + author = item["properties"]["author"][0] + try: + return cls( + ap_actor_id=None, + url=webmention.source, + name=author["properties"]["name"][0], + picture_url=media.resized_media_url( + must_make_abs( + author["properties"]["photo"][0], webmention.source + ), # type: ignore + 50, + ), + created_at=webmention.created_at, # type: ignore + ) + except Exception: + logger.exception( + f"Failed to build Face for webmention id={webmention.id}" + ) + break + + return None + + +def merge_faces(faces: list[Face]) -> list[Face]: + return sorted( + faces, + key=lambda f: f.created_at, + reverse=True, + )[:10] + + +def _parse_face(webmention: Webmention, items: list[dict[str, Any]]) -> Face | None: + for item in items: + if item["type"][0] == "h-card": + try: + return Face( + ap_actor_id=None, + url=( + must_make_abs(item["properties"]["url"][0], webmention.source) + if item["properties"].get("url") + else webmention.source + ), + name=item["properties"]["name"][0], + picture_url=media.resized_media_url( + must_make_abs( + item["properties"]["photo"][0], webmention.source + ), # type: ignore + 50, + ), + created_at=webmention.created_at, # type: ignore + ) + except Exception: + logger.exception( + f"Failed to build Face for webmention id={webmention.id}" + ) + break + + return None + + +@dataclass +class WebmentionReply: + face: Face + content: str + url: str + published_at: datetime.datetime + in_reply_to: str + webmention_id: int + + @classmethod + def from_webmention(cls, webmention: Webmention) -> Optional["WebmentionReply"]: + items = webmention.source_microformats.get("items", []) # type: ignore + for item in items: + if item["type"][0] == "h-entry": + try: + face = _parse_face(webmention, item["properties"].get("author", [])) + if not face: + logger.info( + "Failed to build WebmentionReply/Face for " + f"webmention id={webmention.id}" + ) + break + + if "published" in item["properties"]: + published_at = ( + parse_isoformat(item["properties"]["published"][0]) + .astimezone(timezone.utc) + .replace(tzinfo=None) + ) + else: + published_at = webmention.created_at # type: ignore + + return cls( + face=face, + content=item["properties"]["content"][0]["html"], + url=must_make_abs( + item["properties"]["url"][0], webmention.source + ), + published_at=published_at, + in_reply_to=webmention.target, # type: ignore + webmention_id=webmention.id, # type: ignore + ) + except Exception: + logger.exception( + f"Failed to build Face for webmention id={webmention.id}" + ) + break + + return None diff --git a/app/utils/favicon.py b/app/utils/favicon.py new file mode 100644 index 0000000..cc71132 --- /dev/null +++ b/app/utils/favicon.py @@ -0,0 +1,22 @@ +import sass # type: ignore +from PIL import Image +from PIL import ImageColor +from PIL import ImageDraw + + +def _get_primary_color() -> str: + """Small hack to get the theme primary color.""" + compiled = sass.compile( + string=( + "@import 'app/scss/main.scss';\n" + "#favicon-color { color: $primary-color; }" + ) + ) + return compiled[len(compiled) - 11 : -4] + + +def build_favicon() -> None: + """Builds a basic favicon with the theme primary color.""" + im = Image.new("RGB", (32, 32), ImageColor.getrgb(_get_primary_color())) + ImageDraw.Draw(im) + im.save("app/static/favicon.ico") diff --git a/app/utils/highlight.py b/app/utils/highlight.py new file mode 100644 index 0000000..2e60a36 --- /dev/null +++ b/app/utils/highlight.py @@ -0,0 +1,53 @@ +import base64 +import hashlib +from functools import lru_cache + +from bs4 import BeautifulSoup # type: ignore +from pygments import highlight as phighlight # type: ignore +from pygments.formatters import HtmlFormatter # type: ignore +from pygments.lexers import get_lexer_by_name # type: ignore +from pygments.lexers import guess_lexer # type: ignore + +from app.config import CODE_HIGHLIGHTING_THEME + +_FORMATTER = HtmlFormatter(style=CODE_HIGHLIGHTING_THEME) + +HIGHLIGHT_CSS = _FORMATTER.get_style_defs() +HIGHLIGHT_CSS_HASH = base64.b64encode( + hashlib.sha256(HIGHLIGHT_CSS.encode()).digest() +).decode() + + +@lru_cache(256) +def highlight(html: str) -> str: + soup = BeautifulSoup(html, "html5lib") + for code in soup.find_all("code"): + if not code.parent.name == "pre": + continue + + # Replace
tags with line breaks (Mastodon sends code like this) + code_content = ( + code.encode_contents().decode().replace("
", "\n").replace("
", "\n") + ) + + # If this comes from a microblog.pub instance we may have the language + # in the class name + if "data-microblogpub-lexer" in code.attrs: + try: + lexer = get_lexer_by_name(code.attrs["data-microblogpub-lexer"]) + except Exception: + lexer = guess_lexer(code_content) + + # Replace the code with Pygment output + # XXX: the HTML escaping causes issue with Python type annotations + code_content = code_content.replace(") -> ", ") -> ") + code.parent.replaceWith( + BeautifulSoup( + phighlight(code_content, lexer, _FORMATTER), "html5lib" + ).body.next + ) + else: + code.name = "div" + code["class"] = code.get("class", []) + ["highlight"] + + return soup.body.encode_contents().decode() diff --git a/app/utils/indieauth.py b/app/utils/indieauth.py new file mode 100644 index 0000000..30dce0e --- /dev/null +++ b/app/utils/indieauth.py @@ -0,0 +1,53 @@ +from dataclasses import dataclass +from typing import Any +from urllib.parse import urlparse + +from app.utils import microformats +from app.utils.url import make_abs + + +@dataclass +class IndieAuthClient: + logo: str | None + name: str + url: str | None + + +def _get_prop(props: dict[str, Any], name: str, default=None) -> Any: + if name in props: + items = props.get(name) + if isinstance(items, list): + return items[0] + return items + return default + + +async def get_client_id_data(url: str) -> IndieAuthClient | None: + # Don't fetch localhost URL + if urlparse(url).hostname == "localhost": + return IndieAuthClient( + logo=None, + name=url, + url=url, + ) + + maybe_data_and_html = await microformats.fetch_and_parse(url) + if maybe_data_and_html is not None: + data: dict[str, Any] = maybe_data_and_html[0] + + for item in data["items"]: + if "h-x-app" in item["type"] or "h-app" in item["type"]: + props = item.get("properties", {}) + print(props) + logo = _get_prop(props, "logo") + return IndieAuthClient( + logo=make_abs(logo, url) if logo else None, + name=_get_prop(props, "name"), + url=_get_prop(props, "url", url), + ) + + return IndieAuthClient( + logo=None, + name=url, + url=url, + ) diff --git a/app/utils/mastodon.py b/app/utils/mastodon.py new file mode 100644 index 0000000..4afc7de --- /dev/null +++ b/app/utils/mastodon.py @@ -0,0 +1,32 @@ +from pathlib import Path + +from loguru import logger + +from app.webfinger import get_actor_url + + +def _load_mastodon_following_accounts_csv_file(path: str) -> list[str]: + handles = [] + for line in Path(path).read_text().splitlines()[1:]: + handle = line.split(",")[0] + handles.append(handle) + + return handles + + +async def get_actor_urls_from_following_accounts_csv_file( + path: str, +) -> list[tuple[str, str]]: + actor_urls = [] + for handle in _load_mastodon_following_accounts_csv_file(path): + try: + actor_url = await get_actor_url(handle) + except Exception: + logger.error("Failed to fetch actor URL for {handle=}") + else: + if actor_url: + actor_urls.append((handle, actor_url)) + else: + logger.info(f"No actor URL found for {handle=}") + + return actor_urls diff --git a/app/utils/microformats.py b/app/utils/microformats.py new file mode 100644 index 0000000..7e45c6b --- /dev/null +++ b/app/utils/microformats.py @@ -0,0 +1,34 @@ +from typing import Any + +import httpx +import mf2py # type: ignore +from loguru import logger + +from app import config + + +class URLNotFoundOrGone(Exception): + pass + + +async def fetch_and_parse(url: str) -> tuple[dict[str, Any], str]: + async with httpx.AsyncClient() as client: + resp = await client.get( + url, + headers={ + "User-Agent": config.USER_AGENT, + }, + follow_redirects=True, + ) + if resp.status_code in [404, 410]: + raise URLNotFoundOrGone + + try: + resp.raise_for_status() + except httpx.HTTPStatusError: + logger.error( + f"Failed to parse microformats for {url}: " f"got {resp.status_code}" + ) + raise + + return mf2py.parse(doc=resp.text), resp.text diff --git a/app/utils/opengraph.py b/app/utils/opengraph.py new file mode 100644 index 0000000..05e7752 --- /dev/null +++ b/app/utils/opengraph.py @@ -0,0 +1,182 @@ +import asyncio +import mimetypes +import re +import signal +from concurrent.futures import TimeoutError +from typing import Any +from urllib.parse import urlparse + +import httpx +from bs4 import BeautifulSoup # type: ignore +from loguru import logger +from pebble import concurrent # type: ignore +from pydantic import BaseModel + +from app import activitypub as ap +from app import ap_object +from app import config +from app.actor import LOCAL_ACTOR +from app.actor import fetch_actor +from app.database import AsyncSession +from app.models import InboxObject +from app.models import OutboxObject +from app.utils.url import is_url_valid +from app.utils.url import make_abs + + +class OpenGraphMeta(BaseModel): + url: str + title: str + image: str | None + description: str | None + site_name: str + + +@concurrent.process(timeout=5) +def _scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None: + # Prevent SIGTERM to bubble up to the worker + signal.signal(signal.SIGTERM, signal.SIG_IGN) + + soup = BeautifulSoup(html, "html5lib") + ogs = { + og.attrs["property"]: og.attrs.get("content") + for og in soup.html.head.findAll(property=re.compile(r"^og")) + } + # FIXME some page have no + raw = { + "url": url, + "title": soup.find("title").text.strip(), + "image": None, + "description": None, + "site_name": urlparse(url).hostname, + } + for field in OpenGraphMeta.__fields__.keys(): + og_field = f"og:{field}" + if ogs.get(og_field): + raw[field] = ogs.get(og_field, None) + + if "title" not in raw: + return None + + for maybe_rel in {"url", "image"}: + if u := raw.get(maybe_rel): + raw[maybe_rel] = make_abs(u, url) + + if not is_url_valid(raw[maybe_rel]): + logger.info(f"Invalid url {raw[maybe_rel]}") + if maybe_rel == "url": + raw["url"] = url + elif maybe_rel == "image": + raw["image"] = None + + return OpenGraphMeta.parse_obj(raw) + + +def scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None: + return _scrap_og_meta(url, html).result() + + +async def external_urls( + db_session: AsyncSession, + ro: ap_object.RemoteObject | OutboxObject | InboxObject, +) -> set[str]: + note_host = urlparse(ro.ap_id).hostname + + tags_hrefs = set() + for tag in ro.tags: + if tag_href := tag.get("href"): + tags_hrefs.add(tag_href) + if tag.get("type") == "Mention": + if tag["href"] != LOCAL_ACTOR.ap_id: + try: + mentioned_actor = await fetch_actor(db_session, tag["href"]) + except (ap.FetchError, ap.NotAnObjectError): + tags_hrefs.add(tag["href"]) + continue + + tags_hrefs.add(mentioned_actor.url) + tags_hrefs.add(mentioned_actor.ap_id) + else: + tags_hrefs.add(LOCAL_ACTOR.ap_id) + tags_hrefs.add(LOCAL_ACTOR.url) + + urls = set() + if ro.content: + soup = BeautifulSoup(ro.content, "html5lib") + for link in soup.find_all("a"): + h = link.get("href") + if not h: + continue + + try: + ph = urlparse(h) + mimetype, _ = mimetypes.guess_type(h) + if ( + ph.scheme in {"http", "https"} + and ph.hostname != note_host + and is_url_valid(h) + and ( + not mimetype + or mimetype.split("/")[0] not in ["image", "video", "audio"] + ) + ): + urls.add(h) + except Exception: + logger.exception(f"Failed to check {h}") + continue + + return urls - tags_hrefs + + +async def _og_meta_from_url(url: str) -> OpenGraphMeta | None: + async with httpx.AsyncClient() as client: + resp = await client.get( + url, + headers={ + "User-Agent": config.USER_AGENT, + }, + follow_redirects=True, + ) + + resp.raise_for_status() + + if not (ct := resp.headers.get("content-type")) or not ct.startswith("text/html"): + return None + + try: + return scrap_og_meta(url, resp.text) + except TimeoutError: + logger.info(f"Timed out when scraping OG meta for {url}") + return None + except Exception: + logger.info(f"Failed to scrap OG meta for {url}") + return None + + +async def og_meta_from_note( + db_session: AsyncSession, + ro: ap_object.RemoteObject, +) -> list[dict[str, Any]]: + og_meta = [] + urls = await external_urls(db_session, ro) + logger.debug(f"Lookig OG metadata in {urls=}") + for url in urls: + logger.debug(f"Processing {url}") + try: + maybe_og_meta = None + try: + maybe_og_meta = await asyncio.wait_for( + _og_meta_from_url(url), + timeout=5, + ) + except asyncio.TimeoutError: + logger.info(f"Timing out fetching {url}") + except Exception: + logger.exception(f"Failed scrap OG meta for {url}") + + if maybe_og_meta: + og_meta.append(maybe_og_meta.dict()) + except httpx.HTTPError: + pass + + return og_meta diff --git a/app/utils/pagination.py b/app/utils/pagination.py new file mode 100644 index 0000000..3fe0551 --- /dev/null +++ b/app/utils/pagination.py @@ -0,0 +1,12 @@ +import base64 +from datetime import datetime + +from dateutil.parser import isoparse + + +def encode_cursor(val: datetime) -> str: + return base64.urlsafe_b64encode(val.isoformat().encode()).decode() + + +def decode_cursor(cursor: str) -> datetime: + return isoparse(base64.urlsafe_b64decode(cursor).decode()) diff --git a/app/utils/privacy_replace.py b/app/utils/privacy_replace.py new file mode 100644 index 0000000..e815e85 --- /dev/null +++ b/app/utils/privacy_replace.py @@ -0,0 +1,39 @@ +from urllib.parse import urlparse + +from bs4 import BeautifulSoup # type: ignore +from loguru import logger + +from app.config import PRIVACY_REPLACE + + +def replace_content(content: str) -> str: + if not PRIVACY_REPLACE: + return content + + soup = BeautifulSoup(content, "html5lib") + links = list(soup.find_all("a", href=True)) + if not links: + return content + + for link in links: + link.attrs["href"] = replace_url(link.attrs["href"]) + + return soup.find("body").decode_contents() + + +def replace_url(u: str) -> str: + if not PRIVACY_REPLACE: + return u + + try: + parsed_href = urlparse(u) + if not parsed_href.hostname: + raise ValueError("Missing hostname") + except Exception: + logger.warning(f"Failed to parse url={u}") + return u + + if new_netloc := PRIVACY_REPLACE.get(parsed_href.hostname.removeprefix("www.")): + return parsed_href._replace(netloc=new_netloc).geturl() + + return u diff --git a/app/utils/stats.py b/app/utils/stats.py new file mode 100644 index 0000000..c9e318b --- /dev/null +++ b/app/utils/stats.py @@ -0,0 +1,207 @@ +import asyncio +from dataclasses import dataclass + +import humanize +from sqlalchemy import case +from sqlalchemy import func +from sqlalchemy import or_ +from sqlalchemy import select +from sqlalchemy.orm import joinedload +from tabulate import tabulate + +from app import models +from app.config import ROOT_DIR +from app.database import AsyncSession +from app.database import async_session +from app.utils.datetime import now + +_DATA_DIR = ROOT_DIR / "data" + + +@dataclass +class DiskUsageStats: + data_dir_size: int + upload_dir_size: int + + +def get_disk_usage_stats() -> DiskUsageStats: + du_stats = DiskUsageStats( + data_dir_size=0, + upload_dir_size=0, + ) + for f in _DATA_DIR.glob("**/*"): + if f.is_file(): + stat = f.stat() + du_stats.data_dir_size += stat.st_size + if str(f.parent).endswith("/data/uploads"): + du_stats.upload_dir_size += stat.st_size + + return du_stats + + +@dataclass +class OutgoingActivityStatsItem: + total_count: int + waiting_count: int + sent_count: int + errored_count: int + + +@dataclass +class OutgoingActivityStats: + total: OutgoingActivityStatsItem + from_inbox: OutgoingActivityStatsItem + from_outbox: OutgoingActivityStatsItem + + +async def get_outgoing_activity_stats( + db_session: AsyncSession, +) -> OutgoingActivityStats: + async def _get_stats(f) -> OutgoingActivityStatsItem: + row = ( + await db_session.execute( + select( + func.count(models.OutgoingActivity.id).label("total_count"), + func.sum( + case( + [ + ( + or_( + models.OutgoingActivity.next_try > now(), + models.OutgoingActivity.tries == 0, + ), + 1, + ), + ], + else_=0, + ) + ).label("waiting_count"), + func.sum( + case( + [ + (models.OutgoingActivity.is_sent.is_(True), 1), + ], + else_=0, + ) + ).label("sent_count"), + func.sum( + case( + [ + (models.OutgoingActivity.is_errored.is_(True), 1), + ], + else_=0, + ) + ).label("errored_count"), + ).where(f) + ) + ).one() + return OutgoingActivityStatsItem( + total_count=row.total_count or 0, + waiting_count=row.waiting_count or 0, + sent_count=row.sent_count or 0, + errored_count=row.errored_count or 0, + ) + + from_inbox = await _get_stats(models.OutgoingActivity.inbox_object_id.is_not(None)) + from_outbox = await _get_stats( + models.OutgoingActivity.outbox_object_id.is_not(None) + ) + + return OutgoingActivityStats( + from_inbox=from_inbox, + from_outbox=from_outbox, + total=OutgoingActivityStatsItem( + total_count=from_inbox.total_count + from_outbox.total_count, + waiting_count=from_inbox.waiting_count + from_outbox.waiting_count, + sent_count=from_inbox.sent_count + from_outbox.sent_count, + errored_count=from_inbox.errored_count + from_outbox.errored_count, + ), + ) + + +def print_stats() -> None: + async def _get_stats(): + async with async_session() as db_session: + outgoing_activity_stats = await get_outgoing_activity_stats(db_session) + + outgoing_activities = ( + ( + await db_session.scalars( + select(models.OutgoingActivity) + .options( + joinedload(models.OutgoingActivity.inbox_object), + joinedload(models.OutgoingActivity.outbox_object), + ) + .order_by(models.OutgoingActivity.last_try.desc()) + .limit(10) + ) + ) + .unique() + .all() + ) + + return outgoing_activity_stats, outgoing_activities + + outgoing_activity_stats, outgoing_activities = asyncio.run(_get_stats()) + disk_usage_stats = get_disk_usage_stats() + + print() + print( + tabulate( + [ + ( + "data/", + humanize.naturalsize(disk_usage_stats.data_dir_size), + ), + ( + "data/uploads/", + humanize.naturalsize(disk_usage_stats.upload_dir_size), + ), + ], + headers=["Disk usage", "size"], + ) + ) + print() + print( + tabulate( + [ + (name, s.total_count, s.waiting_count, s.sent_count, s.errored_count) + for (name, s) in [ + ("total", outgoing_activity_stats.total), + ("outbox", outgoing_activity_stats.from_outbox), + ("forwarded", outgoing_activity_stats.from_inbox), + ] + ], + headers=["Outgoing activities", "total", "waiting", "sent", "errored"], + ) + ) + print() + print("Outgoing activities log") + print("=======================") + print() + print( + tabulate( + [ + ( + row.anybox_object.ap_id, + humanize.naturaltime(row.last_try), + row.recipient, + row.tries, + row.last_status_code, + row.is_sent, + row.is_errored, + ) + for row in outgoing_activities + ], + headers=[ + "Object", + "last try", + "recipient", + "tries", + "status code", + "sent", + "errored", + ], + ) + ) + print() diff --git a/app/utils/text.py b/app/utils/text.py new file mode 100644 index 0000000..1505950 --- /dev/null +++ b/app/utils/text.py @@ -0,0 +1,8 @@ +import re +import unicodedata + + +def slugify(text: str) -> str: + value = unicodedata.normalize("NFKC", text) + value = re.sub(r"[^\w\s-]", "", value.lower()) + return re.sub(r"[-\s]+", "-", value).strip("-_") diff --git a/app/utils/url.py b/app/utils/url.py new file mode 100644 index 0000000..2a8979e --- /dev/null +++ b/app/utils/url.py @@ -0,0 +1,98 @@ +import functools +import ipaddress +import socket +from urllib.parse import urlparse + +from loguru import logger + +from app.config import BLOCKED_SERVERS +from app.config import DEBUG + + +def make_abs(url: str | None, parent: str) -> str | None: + if url is None: + return None + + if url.startswith("http"): + return url + + return ( + urlparse(parent)._replace(path=url, params="", query="", fragment="").geturl() + ) + + +def must_make_abs(url: str | None, parent: str) -> str: + abs_url = make_abs(url, parent) + if not abs_url: + raise ValueError("missing URL") + return abs_url + + +class InvalidURLError(Exception): + pass + + +@functools.lru_cache(maxsize=256) +def _getaddrinfo(hostname: str, port: int) -> str: + try: + ip_address = str(ipaddress.ip_address(hostname)) + except ValueError: + try: + ip_address = socket.getaddrinfo(hostname, port)[0][4][0] + logger.debug(f"DNS lookup: {hostname} -> {ip_address}") + except socket.gaierror: + logger.exception(f"failed to lookup addr info for {hostname}") + raise + + return ip_address + + +def is_url_valid(url: str) -> bool: + """Implements basic SSRF protection.""" + parsed = urlparse(url) + if parsed.scheme not in ["http", "https"]: + return False + + # XXX in debug mode, we want to allow requests to localhost to test the + # federation with local instances + if DEBUG: # pragma: no cover + return True + + if not parsed.hostname or parsed.hostname.lower() in ["localhost"]: + return False + + if is_hostname_blocked(parsed.hostname): + logger.warning(f"{parsed.hostname} is blocked") + return False + + if parsed.hostname.endswith(".onion"): + logger.warning(f"{url} is an onion service") + return False + + ip_address = _getaddrinfo( + parsed.hostname, parsed.port or (80 if parsed.scheme == "http" else 443) + ) + logger.debug(f"{ip_address=}") + + if ipaddress.ip_address(ip_address).is_private: + logger.info(f"rejecting private URL {url} -> {ip_address}") + return False + + return True + + +@functools.lru_cache(maxsize=512) +def check_url(url: str) -> None: + logger.debug(f"check_url {url=}") + if not is_url_valid(url): + raise InvalidURLError(f'"{url}" is invalid') + + return None + + +@functools.lru_cache(maxsize=256) +def is_hostname_blocked(hostname: str) -> bool: + for blocked_hostname in BLOCKED_SERVERS: + if hostname == blocked_hostname or hostname.endswith(f".{blocked_hostname}"): + return True + return False diff --git a/app/utils/version.py b/app/utils/version.py new file mode 100644 index 0000000..7b50bcb --- /dev/null +++ b/app/utils/version.py @@ -0,0 +1,12 @@ +import subprocess + + +def get_version_commit() -> str: + try: + return ( + subprocess.check_output(["git", "rev-parse", "--short=8", "v2"]) + .split()[0] + .decode() + ) + except Exception: + return "dev" diff --git a/app/utils/webmentions.py b/app/utils/webmentions.py new file mode 100644 index 0000000..5332550 --- /dev/null +++ b/app/utils/webmentions.py @@ -0,0 +1,92 @@ +from dataclasses import dataclass +from typing import Any +from typing import Optional + +import httpx +from bs4 import BeautifulSoup # type: ignore +from loguru import logger + +from app import config +from app.utils.datetime import now +from app.utils.url import check_url +from app.utils.url import is_url_valid +from app.utils.url import make_abs + + +async def _discover_webmention_endoint(url: str) -> str | None: + async with httpx.AsyncClient() as client: + try: + resp = await client.get( + url, + headers={ + "User-Agent": config.USER_AGENT, + }, + follow_redirects=True, + ) + resp.raise_for_status() + except Exception: + logger.exception(f"Failed to discover webmention endpoint for {url}") + return None + + for k, v in resp.links.items(): + if k and "webmention" in k: + return make_abs(resp.links[k].get("url"), url) + + soup = BeautifulSoup(resp.text, "html5lib") + wlinks = soup.find_all(["link", "a"], attrs={"rel": "webmention"}) + for wlink in wlinks: + if "href" in wlink.attrs: + return make_abs(wlink.attrs["href"], url) + + return None + + +async def discover_webmention_endpoint(url: str) -> str | None: + """Discover the Webmention endpoint of a given URL, if any. + + Passes all the tests at https://webmention.rocks! + + """ + check_url(url) + + wurl = await _discover_webmention_endoint(url) + if wurl is None: + return None + if not is_url_valid(wurl): + return None + return wurl + + +@dataclass +class Webmention: + actor_icon_url: str + actor_name: str + url: str + received_at: str + + @classmethod + def from_microformats( + cls, items: list[dict[str, Any]], url: str + ) -> Optional["Webmention"]: + for item in items: + if item["type"][0] == "h-card": + return cls( + actor_icon_url=make_abs( + item["properties"]["photo"][0], url + ), # type: ignore + actor_name=item["properties"]["name"][0], + url=url, + received_at=now().isoformat(), + ) + if item["type"][0] == "h-entry": + author = item["properties"]["author"][0] + return cls( + actor_icon_url=make_abs( + author["properties"]["photo"][0], url + ), # type: ignore + actor_name=author["properties"]["name"][0], + url=url, + received_at=now().isoformat(), + ) + + return None diff --git a/app/utils/workers.py b/app/utils/workers.py new file mode 100644 index 0000000..f1ef317 --- /dev/null +++ b/app/utils/workers.py @@ -0,0 +1,73 @@ +import asyncio +import signal +from typing import Generic +from typing import TypeVar + +from loguru import logger + +from app.database import AsyncSession +from app.database import async_session + +T = TypeVar("T") + + +class Worker(Generic[T]): + def __init__(self) -> None: + self._loop = asyncio.get_event_loop() + self._stop_event = asyncio.Event() + + async def process_message(self, db_session: AsyncSession, message: T) -> None: + raise NotImplementedError + + async def get_next_message(self, db_session: AsyncSession) -> T | None: + raise NotImplementedError + + async def startup(self, db_session: AsyncSession) -> None: + return None + + async def _main_loop(self, db_session: AsyncSession) -> None: + while not self._stop_event.is_set(): + next_message = await self.get_next_message(db_session) + if next_message: + await self.process_message(db_session, next_message) + await asyncio.sleep(0.5) + else: + await asyncio.sleep(2) + + async def _until_stopped(self) -> None: + await self._stop_event.wait() + + async def run_forever(self) -> None: + signals = (signal.SIGHUP, signal.SIGTERM, signal.SIGINT) + for s in signals: + self._loop.add_signal_handler( + s, + lambda s=s: asyncio.create_task(self._shutdown(s)), + ) + + async with async_session() as db_session: + await self.startup(db_session) + task = self._loop.create_task(self._main_loop(db_session)) + stop_task = self._loop.create_task(self._until_stopped()) + + done, pending = await asyncio.wait( + {task, stop_task}, return_when=asyncio.FIRST_COMPLETED + ) + logger.info(f"Waiting for tasks to finish {done=}/{pending=}") + tasks = [t for t in asyncio.all_tasks() if t is not asyncio.current_task()] + logger.info(f"Cancelling {len(tasks)} tasks") + [task.cancel() for task in tasks] + + try: + await asyncio.wait_for( + asyncio.gather(*tasks, return_exceptions=True), + timeout=15, + ) + except asyncio.TimeoutError: + logger.info("Tasks failed to cancel") + + logger.info("stopping loop") + + async def _shutdown(self, sig: signal.Signals) -> None: + logger.info(f"Caught {sig=}") + self._stop_event.set() diff --git a/app/utils/yunohost.py b/app/utils/yunohost.py new file mode 100644 index 0000000..8b03e85 --- /dev/null +++ b/app/utils/yunohost.py @@ -0,0 +1,56 @@ +"""Basic wizard for setting up microblog.pub configuration files.""" +import os +import sys +from pathlib import Path +from typing import Any + +import bcrypt +import tomli_w + +from app.key import generate_key + +_ROOT_DIR = Path().parent.parent.resolve() +_KEY_PATH = _ROOT_DIR / "data" / "key.pem" +_CONFIG_PATH = _ROOT_DIR / "data" / "profile.toml" + + +def setup_config_file( + domain: str, + username: str, + name: str, + summary: str, + password: str, +) -> None: + print("Generating microblog.pub config\n") + if _KEY_PATH.exists(): + sys.exit(2) + + generate_key(_KEY_PATH) + + config_file = _CONFIG_PATH + + if config_file.exists(): + # Spit out the relative path for the "config artifacts" + rconfig_file = "data/profile.toml" + print( + f"Existing setup detected, please delete {rconfig_file} " + "before restarting the wizard" + ) + sys.exit(2) + + dat: dict[str, Any] = {} + dat["domain"] = domain + dat["username"] = username + dat["admin_password"] = bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode() + dat["name"] = name + dat["summary"] = summary + dat["https"] = True + proto = "https" + dat["icon_url"] = f'{proto}://{dat["domain"]}/static/nopic.png' + dat["secret"] = os.urandom(16).hex() + + with config_file.open("w") as f: + f.write(tomli_w.dumps(dat)) + + print("Done") + sys.exit(0) diff --git a/app/webfinger.py b/app/webfinger.py new file mode 100644 index 0000000..d58bab3 --- /dev/null +++ b/app/webfinger.py @@ -0,0 +1,155 @@ +import xml.etree.ElementTree as ET +from typing import Any +from urllib.parse import urlparse + +import httpx +from loguru import logger + +from app import config +from app.utils.url import check_url + + +async def get_webfinger_via_host_meta(host: str) -> str | None: + resp: httpx.Response | None = None + is_404 = False + async with httpx.AsyncClient() as client: + for i, proto in enumerate({"http", "https"}): + try: + url = f"{proto}://{host}/.well-known/host-meta" + check_url(url) + resp = await client.get( + url, + headers={ + "User-Agent": config.USER_AGENT, + }, + follow_redirects=True, + ) + resp.raise_for_status() + break + except httpx.HTTPStatusError as http_error: + logger.exception("HTTP error") + if http_error.response.status_code in [403, 404, 410]: + is_404 = True + continue + raise + except httpx.HTTPError: + logger.exception("req failed") + # If we tried https first and the domain is "http only" + if i == 0: + continue + break + + if is_404: + return None + + if resp: + tree = ET.fromstring(resp.text) + maybe_link = tree.find( + "./{http://docs.oasis-open.org/ns/xri/xrd-1.0}Link[@rel='lrdd']" + ) + if maybe_link is not None: + return maybe_link.attrib.get("template") + + return None + + +async def webfinger( + resource: str, + webfinger_url: str | None = None, +) -> dict[str, Any] | None: # noqa: C901 + """Mastodon-like WebFinger resolution to retrieve the activity stream Actor URL.""" + resource = resource.strip() + logger.info(f"performing webfinger resolution for {resource}") + urls = [] + host = None + if webfinger_url: + urls = [webfinger_url] + else: + if resource.startswith("http://"): + host = urlparse(resource).netloc + url = f"http://{host}/.well-known/webfinger" + elif resource.startswith("https://"): + host = urlparse(resource).netloc + url = f"https://{host}/.well-known/webfinger" + else: + protos = ["https", "http"] + _, host = resource.split("@", 1) + urls = [f"{proto}://{host}/.well-known/webfinger" for proto in protos] + + if resource.startswith("acct:"): + resource = resource[5:] + if resource.startswith("@"): + resource = resource[1:] + resource = "acct:" + resource + + is_404 = False + + resp: httpx.Response | None = None + async with httpx.AsyncClient() as client: + for i, url in enumerate(urls): + try: + check_url(url) + resp = await client.get( + url, + params={"resource": resource}, + headers={ + "User-Agent": config.USER_AGENT, + }, + follow_redirects=True, + ) + resp.raise_for_status() + break + except httpx.HTTPStatusError as http_error: + logger.exception("HTTP error") + if http_error.response.status_code in [403, 404, 410]: + is_404 = True + continue + raise + except httpx.HTTPError: + logger.exception("req failed") + # If we tried https first and the domain is "http only" + if i == 0: + continue + break + + if is_404: + if not webfinger_url and host: + if webfinger_url := (await get_webfinger_via_host_meta(host)): + return await webfinger( + resource, + webfinger_url=webfinger_url, + ) + return None + + if resp: + return resp.json() + else: + return None + + +async def get_remote_follow_template(resource: str) -> str | None: + data = await webfinger(resource) + if data is None: + return None + for link in data["links"]: + if link.get("rel") == "http://ostatus.org/schema/1.0/subscribe": + return link.get("template") + return None + + +async def get_actor_url(resource: str) -> str | None: + """Mastodon-like WebFinger resolution to retrieve the activity stream Actor URL. + + Returns: + the Actor URL or None if the resolution failed. + """ + data = await webfinger(resource) + if data is None: + return None + for link in data["links"]: + if ( + link.get("rel") == "self" + and link.get("type") == "application/activity+json" + ): + return link.get("href") + return None diff --git a/app/webmentions.py b/app/webmentions.py new file mode 100644 index 0000000..7c3ab72 --- /dev/null +++ b/app/webmentions.py @@ -0,0 +1,232 @@ +from urllib.parse import urlparse + +import httpx +from bs4 import BeautifulSoup # type: ignore +from fastapi import APIRouter +from fastapi import Depends +from fastapi import HTTPException +from fastapi import Request +from fastapi.responses import JSONResponse +from loguru import logger +from sqlalchemy import func +from sqlalchemy import select + +from app import models +from app.boxes import _get_outbox_announces_count +from app.boxes import _get_outbox_likes_count +from app.boxes import _get_outbox_replies_count +from app.boxes import get_outbox_object_by_ap_id +from app.boxes import get_outbox_object_by_slug_and_short_id +from app.boxes import is_notification_enabled +from app.database import AsyncSession +from app.database import get_db_session +from app.utils import microformats +from app.utils.facepile import Face +from app.utils.facepile import WebmentionReply +from app.utils.url import check_url +from app.utils.url import is_url_valid + +router = APIRouter() + + +def is_source_containing_target(source_html: str, target_url: str) -> bool: + soup = BeautifulSoup(source_html, "html5lib") + for link in soup.find_all("a"): + h = link.get("href") + if not is_url_valid(h): + continue + + if h == target_url: + return True + + return False + + +@router.post("/webmentions") +async def webmention_endpoint( + request: Request, + db_session: AsyncSession = Depends(get_db_session), +) -> JSONResponse: + form_data = await request.form() + try: + source = form_data["source"] + target = form_data["target"] + + if source == target: + raise ValueError("source URL is the same as target") + + check_url(source) + check_url(target) + parsed_target_url = urlparse(target) + except Exception: + logger.exception("Invalid webmention request") + raise HTTPException(status_code=400, detail="Invalid payload") + + logger.info(f"Received webmention {source=} {target=}") + + existing_webmention_in_db = ( + await db_session.execute( + select(models.Webmention).where( + models.Webmention.source == source, + models.Webmention.target == target, + ) + ) + ).scalar_one_or_none() + if existing_webmention_in_db: + logger.info("Found existing Webmention, will try to update or delete") + + mentioned_object = await get_outbox_object_by_ap_id(db_session, target) + + if not mentioned_object and parsed_target_url.path.startswith("/articles/"): + try: + _, _, short_id, slug = parsed_target_url.path.split("/") + mentioned_object = await get_outbox_object_by_slug_and_short_id( + db_session, slug, short_id + ) + except Exception: + logger.exception(f"Failed to match {target}") + + if not mentioned_object: + logger.info(f"Invalid target {target=}") + + if existing_webmention_in_db: + logger.info("Deleting existing Webmention") + existing_webmention_in_db.is_deleted = True + await db_session.commit() + raise HTTPException(status_code=400, detail="Invalid target") + + is_webmention_deleted = False + try: + data_and_html = await microformats.fetch_and_parse(source) + except microformats.URLNotFoundOrGone: + is_webmention_deleted = True + except httpx.HTTPError: + raise HTTPException(status_code=500, detail=f"Fetch to process {source}") + + data, html = data_and_html + is_target_found_in_source = is_source_containing_target(html, target) + + data, html = data_and_html + if is_webmention_deleted or not is_target_found_in_source: + logger.warning(f"target {target=} not found in source") + if existing_webmention_in_db: + logger.info("Deleting existing Webmention") + existing_webmention_in_db.is_deleted = True + await db_session.flush() + + # Revert side effects + await _handle_webmention_side_effects( + db_session, existing_webmention_in_db, mentioned_object + ) + + if is_notification_enabled(models.NotificationType.DELETED_WEBMENTION): + notif = models.Notification( + notification_type=models.NotificationType.DELETED_WEBMENTION, + outbox_object_id=mentioned_object.id, + webmention_id=existing_webmention_in_db.id, + ) + db_session.add(notif) + + await db_session.commit() + + if not is_target_found_in_source: + raise HTTPException( + status_code=400, + detail="target not found in source", + ) + else: + return JSONResponse(content={}, status_code=200) + + webmention_type = models.WebmentionType.UNKNOWN + webmention: models.Webmention + if existing_webmention_in_db: + # Undelete if needed + existing_webmention_in_db.is_deleted = False + existing_webmention_in_db.source_microformats = data + await db_session.flush() + webmention = existing_webmention_in_db + + if is_notification_enabled(models.NotificationType.UPDATED_WEBMENTION): + notif = models.Notification( + notification_type=models.NotificationType.UPDATED_WEBMENTION, + outbox_object_id=mentioned_object.id, + webmention_id=existing_webmention_in_db.id, + ) + db_session.add(notif) + else: + new_webmention = models.Webmention( + source=source, + target=target, + source_microformats=data, + outbox_object_id=mentioned_object.id, + webmention_type=webmention_type, + ) + db_session.add(new_webmention) + await db_session.flush() + webmention = new_webmention + + if is_notification_enabled(models.NotificationType.NEW_WEBMENTION): + notif = models.Notification( + notification_type=models.NotificationType.NEW_WEBMENTION, + outbox_object_id=mentioned_object.id, + webmention_id=new_webmention.id, + ) + db_session.add(notif) + + # Determine the webmention type + for item in data.get("items", []): + if target in item.get("properties", {}).get( + "in-reply-to", [] + ) and WebmentionReply.from_webmention(webmention): + webmention_type = models.WebmentionType.REPLY + break + elif target in item.get("properties", {}).get( + "like-of", [] + ) and Face.from_webmention(webmention): + webmention_type = models.WebmentionType.LIKE + break + elif target in item.get("properties", {}).get( + "repost-of", [] + ) and Face.from_webmention(webmention): + webmention_type = models.WebmentionType.REPOST + break + + if webmention_type != models.WebmentionType.UNKNOWN: + webmention.webmention_type = webmention_type + await db_session.flush() + + # Handle side effect + await _handle_webmention_side_effects(db_session, webmention, mentioned_object) + await db_session.commit() + + return JSONResponse(content={}, status_code=200) + + +async def _handle_webmention_side_effects( + db_session: AsyncSession, + webmention: models.Webmention, + mentioned_object: models.OutboxObject, +) -> None: + if webmention.webmention_type == models.WebmentionType.UNKNOWN: + # TODO: recount everything + mentioned_object.webmentions_count = await db_session.scalar( + select(func.count(models.Webmention.id)).where( + models.Webmention.is_deleted.is_(False), + models.Webmention.outbox_object_id == mentioned_object.id, + models.Webmention.webmention_type == models.WebmentionType.UNKNOWN, + ) + ) + elif webmention.webmention_type == models.WebmentionType.LIKE: + mentioned_object.likes_count = await _get_outbox_likes_count( + db_session, mentioned_object + ) + elif webmention.webmention_type == models.WebmentionType.REPOST: + mentioned_object.announces_count = await _get_outbox_announces_count( + db_session, mentioned_object + ) + elif webmention.webmention_type == models.WebmentionType.REPLY: + mentioned_object.replies_count = await _get_outbox_replies_count( + db_session, mentioned_object + ) + else: + raise ValueError(f"Unhandled {webmention.webmention_type} webmention") diff --git a/blueprints/admin.py b/blueprints/admin.py deleted file mode 100644 index 4af4ef3..0000000 --- a/blueprints/admin.py +++ /dev/null @@ -1,661 +0,0 @@ -import json -from collections import defaultdict -from datetime import datetime -from datetime import timedelta -from datetime import timezone -from typing import Any -from typing import List -from urllib.parse import urlparse - -import flask -from flask import abort -from flask import current_app as app -from flask import redirect -from flask import render_template -from flask import request -from flask import session -from flask import url_for -from little_boxes import activitypub as ap -from little_boxes.webfinger import get_actor_url -from passlib.hash import bcrypt -from u2flib_server import u2f - -import config -from config import DB -from config import ID -from config import PASS -from core.activitypub import Box -from core.activitypub import _meta -from core.activitypub import post_to_outbox -from core.db import find_one_activity -from core.meta import by_object_id -from core.meta import by_object_visibility -from core.meta import by_remote_id -from core.meta import by_type -from core.meta import follow_request_accepted -from core.meta import in_outbox -from core.meta import not_deleted -from core.meta import not_poll_answer -from core.meta import not_undo -from core.shared import MY_PERSON -from core.shared import _build_thread -from core.shared import _Response -from core.shared import csrf -from core.shared import htmlify -from core.shared import login_required -from core.shared import noindex -from core.shared import p -from core.shared import paginated_query -from utils import now -from utils.emojis import EMOJIS_BY_NAME -from utils.lookup import lookup - -blueprint = flask.Blueprint("admin", __name__) - - -def verify_pass(pwd): - return bcrypt.verify(pwd, PASS) - - -@blueprint.route("/admin/update_actor") -@login_required -def admin_update_actor() -> _Response: - # FIXME(tsileo): make this a task, and keep track of our own actor_hash at startup - update = ap.Update( - actor=MY_PERSON.id, - object=MY_PERSON.to_dict(), - to=[MY_PERSON.followers], - cc=[ap.AS_PUBLIC], - published=now(), - ) - - post_to_outbox(update) - return "OK" - - -@blueprint.route("/admin/logout") -@login_required -def admin_logout() -> _Response: - session["logged_in"] = False - return redirect("/") - - -@blueprint.route("/login", methods=["POST", "GET"]) -@noindex -def admin_login() -> _Response: - if session.get("logged_in") is True: - return redirect(url_for("admin.admin_notifications")) - - devices = [doc["device"] for doc in DB.u2f.find()] - u2f_enabled = True if devices else False - if request.method == "POST": - csrf.protect() - # 1. Check regular password login flow - pwd = request.form.get("pass") - if pwd: - if verify_pass(pwd): - session.permanent = True - session["logged_in"] = True - return redirect( - request.args.get("redirect") or url_for("admin.admin_notifications") - ) - else: - abort(403) - # 2. Check for U2F payload, if any - elif devices: - resp = json.loads(request.form.get("resp")) # type: ignore - try: - u2f.complete_authentication(session["challenge"], resp) - except ValueError as exc: - print("failed", exc) - abort(403) - return - finally: - session["challenge"] = None - - session.permanent = True - session["logged_in"] = True - return redirect( - request.args.get("redirect") or url_for("admin.admin_notifications") - ) - else: - abort(401) - - payload = None - if devices: - payload = u2f.begin_authentication(ID, devices) - session["challenge"] = payload - - return htmlify( - render_template("login.html", u2f_enabled=u2f_enabled, payload=payload) - ) - - -@blueprint.route("/admin", methods=["GET"]) -@login_required -def admin_index() -> _Response: - q = { - "meta.deleted": False, - "meta.undo": False, - "type": ap.ActivityType.LIKE.value, - "box": Box.OUTBOX.value, - } - col_liked = DB.activities.count(q) - - return htmlify( - render_template( - "admin.html", - instances=list(DB.instances.find()), - inbox_size=DB.activities.count({"box": Box.INBOX.value}), - outbox_size=DB.activities.count({"box": Box.OUTBOX.value}), - col_liked=col_liked, - col_followers=DB.activities.count( - { - "box": Box.INBOX.value, - "type": ap.ActivityType.FOLLOW.value, - "meta.undo": False, - } - ), - col_following=DB.activities.count( - { - "box": Box.OUTBOX.value, - "type": ap.ActivityType.FOLLOW.value, - "meta.undo": False, - } - ), - ) - ) - - -@blueprint.route("/admin/indieauth", methods=["GET"]) -@login_required -def admin_indieauth() -> _Response: - return htmlify( - render_template( - "admin_indieauth.html", - indieauth_actions=DB.indieauth.find().sort("ts", -1).limit(100), - ) - ) - - -@blueprint.route("/admin/tasks", methods=["GET"]) -@login_required -def admin_tasks() -> _Response: - return htmlify( - render_template( - "admin_tasks.html", - success=p.get_success(), - dead=p.get_dead(), - waiting=p.get_waiting(), - cron=p.get_cron(), - ) - ) - - -@blueprint.route("/admin/lookup", methods=["GET"]) -@login_required -def admin_lookup() -> _Response: - data = None - meta = None - follower = None - following = None - if request.args.get("url"): - data = lookup(request.args.get("url")) # type: ignore - if data: - if not data.has_type(ap.ACTOR_TYPES): - meta = _meta(data) - else: - follower = find_one_activity( - { - "box": "inbox", - "type": ap.ActivityType.FOLLOW.value, - "meta.actor_id": data.id, - "meta.undo": False, - } - ) - following = find_one_activity( - { - **by_type(ap.ActivityType.FOLLOW), - **by_object_id(data.id), - **not_undo(), - **in_outbox(), - **follow_request_accepted(), - } - ) - - if data.has_type(ap.ActivityType.QUESTION): - p.push(data.id, "/task/fetch_remote_question") - - print(data) - app.logger.debug(data.to_dict()) - return htmlify( - render_template( - "lookup.html", - data=data, - meta=meta, - follower=follower, - following=following, - url=request.args.get("url"), - ) - ) - - -@blueprint.route("/admin/profile", methods=["GET"]) -@login_required -def admin_profile() -> _Response: - if not request.args.get("actor_id"): - abort(404) - - actor_id = request.args.get("actor_id") - actor = ap.fetch_remote_activity(actor_id) - q = { - "meta.actor_id": actor_id, - "box": "inbox", - **not_deleted(), - "type": {"$in": [ap.ActivityType.CREATE.value, ap.ActivityType.ANNOUNCE.value]}, - } - inbox_data, older_than, newer_than = paginated_query( - DB.activities, q, limit=int(request.args.get("limit", 25)) - ) - follower = find_one_activity( - { - "box": "inbox", - "type": ap.ActivityType.FOLLOW.value, - "meta.actor_id": actor.id, - "meta.undo": False, - } - ) - following = find_one_activity( - { - **by_type(ap.ActivityType.FOLLOW), - **by_object_id(actor.id), - **not_undo(), - **in_outbox(), - **follow_request_accepted(), - } - ) - - return htmlify( - render_template( - "stream.html", - actor_id=actor_id, - actor=actor.to_dict(), - inbox_data=inbox_data, - older_than=older_than, - newer_than=newer_than, - follower=follower, - following=following, - lists=list(DB.lists.find()), - ) - ) - - -@blueprint.route("/admin/thread") -@login_required -def admin_thread() -> _Response: - oid = request.args.get("oid") - if not oid: - abort(404) - - data = find_one_activity({**by_type(ap.ActivityType.CREATE), **by_object_id(oid)}) - if not data: - dat = DB.replies.find_one({**by_remote_id(oid)}) - data = { - "activity": {"object": dat["activity"]}, - "meta": dat["meta"], - "_id": dat["_id"], - } - - if not data: - abort(404) - if data["meta"].get("deleted", False): - abort(410) - thread = _build_thread(data) - - tpl = "note.html" - if request.args.get("debug"): - tpl = "note_debug.html" - return htmlify(render_template(tpl, thread=thread, note=data)) - - -@blueprint.route("/admin/new", methods=["GET"]) -@login_required -def admin_new() -> _Response: - reply_id = None - content = "" - thread: List[Any] = [] - print(request.args) - default_visibility = None # ap.Visibility.PUBLIC - if request.args.get("reply"): - data = DB.activities.find_one({"activity.object.id": request.args.get("reply")}) - if data: - reply = ap.parse_activity(data["activity"]) - else: - obj = ap.get_backend().fetch_iri(request.args.get("reply")) - data = dict(meta=_meta(ap.parse_activity(obj)), activity=dict(object=obj)) - data["_id"] = obj["id"] - data["remote_id"] = obj["id"] - reply = ap.parse_activity(data["activity"]["object"]) - # Fetch the post visibility, in case it's follower only - default_visibility = ap.get_visibility(reply) - # If it's public, we default the reply to unlisted - if default_visibility == ap.Visibility.PUBLIC: - default_visibility = ap.Visibility.UNLISTED - - reply_id = reply.id - if reply.ACTIVITY_TYPE == ap.ActivityType.CREATE: - reply_id = reply.get_object().id - - actor = reply.get_actor() - domain = urlparse(actor.id).netloc - # FIXME(tsileo): if reply of reply, fetch all participants - content = f"@{actor.preferredUsername}@{domain} " - if reply.has_type(ap.ActivityType.CREATE): - reply = reply.get_object() - for mention in reply.get_mentions(): - if mention.href in [actor.id, ID]: - continue - m = ap.fetch_remote_activity(mention.href) - if m.has_type(ap.ACTOR_TYPES): - d = urlparse(m.id).netloc - content += f"@{m.preferredUsername}@{d} " - - thread = _build_thread(data) - - return htmlify( - render_template( - "new.html", - reply=reply_id, - content=content, - thread=thread, - default_visibility=default_visibility, - visibility=ap.Visibility, - emojis=config.EMOJIS.split(" "), - custom_emojis=sorted( - [ap.Emoji(**dat) for name, dat in EMOJIS_BY_NAME.items()], - key=lambda e: e.name, - ), - ) - ) - - -@blueprint.route("/admin/direct_messages", methods=["GET"]) -@login_required -def admin_direct_messages() -> _Response: - all_dms = DB.activities.find( - { - **not_poll_answer(), - **by_type(ap.ActivityType.CREATE), - **by_object_visibility(ap.Visibility.DIRECT), - } - ).sort("meta.published", -1) - - # Group by threads - _threads = defaultdict(list) # type: ignore - for dm in all_dms: - # Skip poll answers - if dm["activity"].get("object", {}).get("name"): - continue - - _threads[dm["meta"].get("thread_root_parent", dm["meta"]["object_id"])].append( - dm - ) - - # Now build the data needed for the UI - threads = [] - for thread_root, thread in _threads.items(): - # We need the list of participants - participants = set() - for raw_activity in thread: - activity = ap.parse_activity(raw_activity["activity"]) - actor = activity.get_actor() - domain = urlparse(actor.id).netloc - if actor.id != ID: - participants.add(f"@{actor.preferredUsername}@{domain}") - if activity.has_type(ap.ActivityType.CREATE): - activity = activity.get_object() - for mention in activity.get_mentions(): - if mention.href in [actor.id, ID]: - continue - m = ap.fetch_remote_activity(mention.href) - if m.has_type(ap.ACTOR_TYPES) and m.id != ID: - d = urlparse(m.id).netloc - participants.add(f"@{m.preferredUsername}@{d}") - - if not participants: - continue - # Build the UI data for this conversation - oid = thread[-1]["meta"]["object_id"] - threads.append( - { - "participants": list(participants), - "oid": oid, - "last_reply": thread[0], - "len": len(thread), - } - ) - return htmlify(render_template("direct_messages.html", threads=threads)) - - -@blueprint.route("/admin/lists", methods=["GET"]) -@login_required -def admin_lists() -> _Response: - lists = list(DB.lists.find()) - - return htmlify(render_template("lists.html", lists=lists)) - - -@blueprint.route("/admin/notifications") -@login_required -def admin_notifications() -> _Response: - # Setup the cron for deleting old activities - - # FIXME(tsileo): put back to 12h - p.push({}, "/task/cleanup", schedule="@every 1h") - - # Trigger a cleanup if asked - if request.args.get("cleanup"): - p.push({}, "/task/cleanup") - - # FIXME(tsileo): show unfollow (performed by the current actor) and liked??? - mentions_query = { - "type": ap.ActivityType.CREATE.value, - "activity.object.tag.type": "Mention", - "activity.object.tag.name": f"@{config.USERNAME}@{config.DOMAIN}", - "meta.deleted": False, - } - replies_query = { - "type": ap.ActivityType.CREATE.value, - "activity.object.inReplyTo": {"$regex": f"^{config.BASE_URL}"}, - "meta.poll_answer": False, - } - announced_query = { - "type": ap.ActivityType.ANNOUNCE.value, - "activity.object": {"$regex": f"^{config.BASE_URL}"}, - } - new_followers_query = {"type": ap.ActivityType.FOLLOW.value} - unfollow_query = { - "type": ap.ActivityType.UNDO.value, - "activity.object.type": ap.ActivityType.FOLLOW.value, - } - likes_query = { - "type": ap.ActivityType.LIKE.value, - "activity.object": {"$regex": f"^{config.BASE_URL}"}, - } - followed_query = {"type": ap.ActivityType.ACCEPT.value} - rejected_query = {"type": ap.ActivityType.REJECT.value} - q = { - "box": Box.INBOX.value, - "$or": [ - mentions_query, - announced_query, - replies_query, - new_followers_query, - followed_query, - rejected_query, - unfollow_query, - likes_query, - ], - } - inbox_data, older_than, newer_than = paginated_query(DB.activities, q) - if not newer_than: - nstart = datetime.now(timezone.utc).isoformat() - else: - nstart = inbox_data[0]["_id"].generation_time.isoformat() - if not older_than: - nend = (datetime.now(timezone.utc) - timedelta(days=15)).isoformat() - else: - nend = inbox_data[-1]["_id"].generation_time.isoformat() - print(nstart, nend) - notifs = list( - DB.notifications.find({"datetime": {"$lte": nstart, "$gt": nend}}) - .sort("_id", -1) - .limit(50) - ) - print(inbox_data) - - nid = None - if inbox_data: - nid = inbox_data[0]["_id"] - - inbox_data.extend(notifs) - inbox_data = sorted( - inbox_data, reverse=True, key=lambda doc: doc["_id"].generation_time - ) - - return htmlify( - render_template( - "stream.html", - inbox_data=inbox_data, - older_than=older_than, - newer_than=newer_than, - nid=nid, - ) - ) - - -@blueprint.route("/admin/stream") -@login_required -def admin_stream() -> _Response: - q = {"meta.stream": True, "meta.deleted": False} - - tpl = "stream.html" - if request.args.get("debug"): - tpl = "stream_debug.html" - if request.args.get("debug_inbox"): - q = {} - - inbox_data, older_than, newer_than = paginated_query( - DB.activities, q, limit=int(request.args.get("limit", 25)) - ) - - return htmlify( - render_template( - tpl, inbox_data=inbox_data, older_than=older_than, newer_than=newer_than - ) - ) - - -@blueprint.route("/admin/list/<name>") -@login_required -def admin_list(name: str) -> _Response: - list_ = DB.lists.find_one({"name": name}) - if not list_: - abort(404) - - q = { - "meta.stream": True, - "meta.deleted": False, - "meta.actor_id": {"$in": list_["members"]}, - } - - tpl = "stream.html" - if request.args.get("debug"): - tpl = "stream_debug.html" - if request.args.get("debug_inbox"): - q = {} - - inbox_data, older_than, newer_than = paginated_query( - DB.activities, q, limit=int(request.args.get("limit", 25)) - ) - - return htmlify( - render_template( - tpl, - inbox_data=inbox_data, - older_than=older_than, - newer_than=newer_than, - list_name=name, - ) - ) - - -@blueprint.route("/admin/bookmarks") -@login_required -def admin_bookmarks() -> _Response: - q = {"meta.bookmarked": True} - - tpl = "stream.html" - if request.args.get("debug"): - tpl = "stream_debug.html" - if request.args.get("debug_inbox"): - q = {} - - inbox_data, older_than, newer_than = paginated_query( - DB.activities, q, limit=int(request.args.get("limit", 25)) - ) - - return htmlify( - render_template( - tpl, inbox_data=inbox_data, older_than=older_than, newer_than=newer_than - ) - ) - - -@blueprint.route("/u2f/register", methods=["GET", "POST"]) -@login_required -def u2f_register(): - # TODO(tsileo): ensure no duplicates - if request.method == "GET": - payload = u2f.begin_registration(ID) - session["challenge"] = payload - return htmlify(render_template("u2f.html", payload=payload)) - else: - resp = json.loads(request.form.get("resp")) - device, device_cert = u2f.complete_registration(session["challenge"], resp) - session["challenge"] = None - DB.u2f.insert_one({"device": device, "cert": device_cert}) - session["logged_in"] = False - return redirect("/login") - - -@blueprint.route("/authorize_follow", methods=["GET", "POST"]) -@login_required -def authorize_follow(): - if request.method == "GET": - return htmlify( - render_template( - "authorize_remote_follow.html", profile=request.args.get("profile") - ) - ) - - csrf.protect() - actor = get_actor_url(request.form.get("profile")) - if not actor: - abort(500) - - q = { - "box": Box.OUTBOX.value, - "type": ap.ActivityType.FOLLOW.value, - "meta.undo": False, - "activity.object": actor, - } - if DB.activities.count(q) > 0: - return redirect("/following") - - follow = ap.Follow( - actor=MY_PERSON.id, object=actor, to=[actor], cc=[ap.AS_PUBLIC], published=now() - ) - post_to_outbox(follow) - - return redirect("/following") diff --git a/blueprints/api.py b/blueprints/api.py deleted file mode 100644 index 1b3aaac..0000000 --- a/blueprints/api.py +++ /dev/null @@ -1,770 +0,0 @@ -import logging -import mimetypes -from datetime import datetime -from datetime import timedelta -from datetime import timezone -from functools import wraps -from io import BytesIO -from shutil import copyfileobj -from typing import Any -from typing import List - -import flask -from bson.objectid import ObjectId -from flask import abort -from flask import current_app as app -from flask import redirect -from flask import request -from flask import session -from itsdangerous import BadSignature -from little_boxes import activitypub as ap -from little_boxes.content_helper import parse_markdown -from little_boxes.errors import ActivityNotFoundError -from little_boxes.errors import NotFromOutboxError -from werkzeug.utils import secure_filename - -import config -from config import ADMIN_API_KEY -from config import BASE_URL -from config import DB -from config import DEBUG_MODE -from config import ID -from config import JWT -from config import MEDIA_CACHE -from config import _drop_db -from core import feed -from core.activitypub import accept_follow -from core.activitypub import activity_url -from core.activitypub import new_context -from core.activitypub import post_to_outbox -from core.db import update_one_activity -from core.meta import Box -from core.meta import MetaKey -from core.meta import _meta -from core.meta import by_object_id -from core.meta import by_type -from core.shared import MY_PERSON -from core.shared import _Response -from core.shared import csrf -from core.shared import jsonify -from core.shared import login_required -from core.tasks import Tasks -from utils import emojis -from utils import now - -_logger = logging.getLogger(__name__) - -blueprint = flask.Blueprint("api", __name__) - - -def without_id(l): - out = [] - for d in l: - if "_id" in d: - del d["_id"] - out.append(d) - return out - - -def _api_required() -> None: - if session.get("logged_in"): - if request.method not in ["GET", "HEAD"]: - # If a standard API request is made with a "login session", it must havw a CSRF token - csrf.protect() - return - - # Token verification - token = request.headers.get("Authorization", "").replace("Bearer ", "") - if not token: - # IndieAuth token - token = request.form.get("access_token", "") - - # Will raise a BadSignature on bad auth - payload = JWT.loads(token) - flask.g.jwt_payload = payload - app.logger.info(f"api call by {payload}") - - -def api_required(f): - @wraps(f) - def decorated_function(*args, **kwargs): - try: - _api_required() - except BadSignature: - abort(401) - - return f(*args, **kwargs) - - return decorated_function - - -def _user_api_arg(key: str, **kwargs) -> Any: - """Try to get the given key from the requests, try JSON body, form data and query arg.""" - if request.is_json: - oid = request.json.get(key) - else: - oid = request.args.get(key) or request.form.get(key) - - if not oid: - if "default" in kwargs: - app.logger.info(f'{key}={kwargs.get("default")}') - return kwargs.get("default") - - raise ValueError(f"missing {key}") - - app.logger.info(f"{key}={oid}") - return oid - - -def _user_api_get_note(from_outbox: bool = False) -> ap.BaseActivity: - oid = _user_api_arg("id") - app.logger.info(f"fetching {oid}") - note = ap.parse_activity(ap.get_backend().fetch_iri(oid)) - if from_outbox and not note.id.startswith(ID): - raise NotFromOutboxError( - f"cannot load {note.id}, id must be owned by the server" - ) - - return note - - -def _user_api_response(**kwargs) -> _Response: - _redirect = _user_api_arg("redirect", default=None) - if _redirect: - return redirect(_redirect) - - resp = jsonify(kwargs) - resp.status_code = 201 - return resp - - -@blueprint.route("/api/key") -@login_required -def api_user_key() -> _Response: - return jsonify({"api_key": ADMIN_API_KEY}) - - -@blueprint.route("/note/delete", methods=["POST"]) -@api_required -def api_delete() -> _Response: - """API endpoint to delete a Note activity.""" - note = _user_api_get_note(from_outbox=True) - - # Create the delete, same audience as the Create object - delete = ap.Delete( - context=new_context(note), - actor=ID, - object=ap.Tombstone(id=note.id).to_dict(embed=True), - to=note.to, - cc=note.cc, - published=now(), - ) - - delete_id = post_to_outbox(delete) - - return _user_api_response(activity=delete_id) - - -@blueprint.route("/boost", methods=["POST"]) -@api_required -def api_boost() -> _Response: - note = _user_api_get_note() - - # Ensures the note visibility allow us to build an Announce (in respect to the post visibility) - if ap.get_visibility(note) not in [ap.Visibility.PUBLIC, ap.Visibility.UNLISTED]: - abort(400) - - announce = ap.Announce( - actor=MY_PERSON.id, - object=note.id, - to=[MY_PERSON.followers, note.attributedTo], - cc=[ap.AS_PUBLIC], - published=now(), - context=new_context(note), - ) - announce_id = post_to_outbox(announce) - - return _user_api_response(activity=announce_id) - - -@blueprint.route("/ack_reply", methods=["POST"]) -@api_required -def api_ack_reply() -> _Response: - reply_iri = _user_api_arg("reply_iri") - obj = ap.fetch_remote_activity(reply_iri) - if obj.has_type(ap.ActivityType.CREATE): - obj = obj.get_object() - # TODO(tsileo): tweak the adressing? - update_one_activity( - {**by_type(ap.ActivityType.CREATE), **by_object_id(obj.id)}, - {"$set": {"meta.reply_acked": True}}, - ) - read = ap.Read( - actor=MY_PERSON.id, - object=obj.id, - to=[MY_PERSON.followers], - cc=[ap.AS_PUBLIC, obj.get_actor().id], - published=now(), - context=new_context(obj), - ) - - read_id = post_to_outbox(read) - return _user_api_response(activity=read_id) - - -@blueprint.route("/mark_notifications_as_read", methods=["POST"]) -@api_required -def api_mark_notification_as_read() -> _Response: - nid = ObjectId(_user_api_arg("nid")) - - DB.activities.update_many( - {_meta(MetaKey.NOTIFICATION_UNREAD): True, "_id": {"$lte": nid}}, - {"$set": {_meta(MetaKey.NOTIFICATION_UNREAD): False}}, - ) - - return _user_api_response() - - -@blueprint.route("/vote", methods=["POST"]) -@api_required -def api_vote() -> _Response: - oid = _user_api_arg("id") - app.logger.info(f"fetching {oid}") - note = ap.parse_activity(ap.get_backend().fetch_iri(oid)) - choice = _user_api_arg("choice") - - raw_note = dict( - attributedTo=MY_PERSON.id, - cc=[], - to=note.get_actor().id, - name=choice, - tag=[], - context=new_context(note), - inReplyTo=note.id, - ) - raw_note["@context"] = config.DEFAULT_CTX - - note = ap.Note(**raw_note) - create = note.build_create() - create_id = post_to_outbox(create) - - return _user_api_response(activity=create_id) - - -@blueprint.route("/like", methods=["POST"]) -@api_required -def api_like() -> _Response: - note = _user_api_get_note() - - to: List[str] = [] - cc: List[str] = [] - - note_visibility = ap.get_visibility(note) - - if note_visibility == ap.Visibility.PUBLIC: - to = [ap.AS_PUBLIC] - cc = [ID + "/followers", note.get_actor().id] - elif note_visibility == ap.Visibility.UNLISTED: - to = [ID + "/followers", note.get_actor().id] - cc = [ap.AS_PUBLIC] - else: - to = [note.get_actor().id] - - like = ap.Like( - object=note.id, - actor=MY_PERSON.id, - to=to, - cc=cc, - published=now(), - context=new_context(note), - ) - - like_id = post_to_outbox(like) - - return _user_api_response(activity=like_id) - - -@blueprint.route("/bookmark", methods=["POST"]) -@api_required -def api_bookmark() -> _Response: - note = _user_api_get_note() - - undo = _user_api_arg("undo", default=None) == "yes" - - # Try to bookmark the `Create` first - if not DB.activities.update_one( - {"activity.object.id": note.id}, {"$set": {"meta.bookmarked": not undo}} - ).modified_count: - # Then look for the `Announce` - DB.activities.update_one( - {"meta.object.id": note.id}, {"$set": {"meta.bookmarked": not undo}} - ) - - return _user_api_response() - - -@blueprint.route("/note/pin", methods=["POST"]) -@api_required -def api_pin() -> _Response: - note = _user_api_get_note(from_outbox=True) - - DB.activities.update_one( - {"activity.object.id": note.id, "box": Box.OUTBOX.value}, - {"$set": {"meta.pinned": True}}, - ) - - return _user_api_response(pinned=True) - - -@blueprint.route("/note/unpin", methods=["POST"]) -@api_required -def api_unpin() -> _Response: - note = _user_api_get_note(from_outbox=True) - - DB.activities.update_one( - {"activity.object.id": note.id, "box": Box.OUTBOX.value}, - {"$set": {"meta.pinned": False}}, - ) - - return _user_api_response(pinned=False) - - -@blueprint.route("/undo", methods=["POST"]) -@api_required -def api_undo() -> _Response: - oid = _user_api_arg("id") - doc = DB.activities.find_one( - { - "box": Box.OUTBOX.value, - "$or": [{"remote_id": activity_url(oid)}, {"remote_id": oid}], - } - ) - if not doc: - raise ActivityNotFoundError(f"cannot found {oid}") - - obj = ap.parse_activity(doc.get("activity")) - - undo = ap.Undo( - actor=MY_PERSON.id, - context=new_context(obj), - object=obj.to_dict(embed=True, embed_object_id_only=True), - published=now(), - to=obj.to, - cc=obj.cc, - ) - - # FIXME(tsileo): detect already undo-ed and make this API call idempotent - undo_id = post_to_outbox(undo) - - return _user_api_response(activity=undo_id) - - -@blueprint.route("/accept_follow", methods=["POST"]) -@api_required -def api_accept_follow() -> _Response: - oid = _user_api_arg("id") - doc = DB.activities.find_one({"box": Box.INBOX.value, "remote_id": oid}) - print(doc) - if not doc: - raise ActivityNotFoundError(f"cannot found {oid}") - - obj = ap.parse_activity(doc.get("activity")) - if not obj.has_type(ap.ActivityType.FOLLOW): - raise ValueError(f"{obj} is not a Follow activity") - - accept_id = accept_follow(obj) - - return _user_api_response(activity=accept_id) - - -@blueprint.route("/new_list", methods=["POST"]) -@api_required -def api_new_list() -> _Response: - name = _user_api_arg("name") - if not name: - raise ValueError("missing name") - - if not DB.lists.find_one({"name": name}): - DB.lists.insert_one({"name": name, "members": []}) - - return _user_api_response(name=name) - - -@blueprint.route("/delete_list", methods=["POST"]) -@api_required -def api_delete_list() -> _Response: - name = _user_api_arg("name") - if not name: - raise ValueError("missing name") - - if not DB.lists.find_one({"name": name}): - abort(404) - - DB.lists.delete_one({"name": name}) - - return _user_api_response() - - -@blueprint.route("/add_to_list", methods=["POST"]) -@api_required -def api_add_to_list() -> _Response: - list_name = _user_api_arg("list_name") - if not list_name: - raise ValueError("missing list_name") - - if not DB.lists.find_one({"name": list_name}): - raise ValueError(f"list {list_name} does not exist") - - actor_id = _user_api_arg("actor_id") - if not actor_id: - raise ValueError("missing actor_id") - - DB.lists.update_one({"name": list_name}, {"$addToSet": {"members": actor_id}}) - - return _user_api_response() - - -@blueprint.route("/remove_from_list", methods=["POST"]) -@api_required -def api_remove_from_list() -> _Response: - list_name = _user_api_arg("list_name") - if not list_name: - raise ValueError("missing list_name") - - if not DB.lists.find_one({"name": list_name}): - raise ValueError(f"list {list_name} does not exist") - - actor_id = _user_api_arg("actor_id") - if not actor_id: - raise ValueError("missing actor_id") - - DB.lists.update_one({"name": list_name}, {"$pull": {"members": actor_id}}) - - return _user_api_response() - - -@blueprint.route("/new_note", methods=["POST", "GET"]) # noqa: C901 too complex -@api_required -def api_new_note() -> _Response: - # Basic Micropub (https://www.w3.org/TR/micropub/) query configuration support - if request.method == "GET" and request.args.get("q") == "config": - return jsonify({}) - elif request.method == "GET": - abort(405) - - source = None - summary = None - location = None - - # Basic Micropub (https://www.w3.org/TR/micropub/) "create" support - is_micropub = False - # First, check if the Micropub specific fields are present - if ( - _user_api_arg("h", default=None) == "entry" - or _user_api_arg("type", default=[None])[0] == "h-entry" - ): - is_micropub = True - # Ensure the "create" scope is set - if "jwt_payload" not in flask.g or "create" not in flask.g.jwt_payload["scope"]: - abort(403) - - # Handle location sent via form-data - # `geo:28.5,9.0,0.0` - location = _user_api_arg("location", default="") - if location.startswith("geo:"): - slat, slng, *_ = location[4:].split(",") - location = { - "type": ap.ActivityType.PLACE.value, - "latitude": float(slat), - "longitude": float(slng), - } - - # Handle JSON microformats2 data - if _user_api_arg("type", default=None): - _logger.info(f"Micropub request: {request.json}") - try: - source = request.json["properties"]["content"][0] - except (ValueError, KeyError): - pass - - # Handle HTML - if isinstance(source, dict): - source = source.get("html") - - try: - summary = request.json["properties"]["name"][0] - except (ValueError, KeyError): - pass - - # Try to parse the name as summary if the payload is POSTed using form-data - if summary is None: - summary = _user_api_arg("name", default=None) - - # This step will also parse content from Micropub request - if source is None: - source = _user_api_arg("content", default=None) - - if not source: - raise ValueError("missing content") - - if summary is None: - summary = _user_api_arg("summary", default="") - - if not location: - if _user_api_arg("location_lat", default=None): - lat = float(_user_api_arg("location_lat")) - lng = float(_user_api_arg("location_lng")) - loc_name = _user_api_arg("location_name", default="") - location = { - "type": ap.ActivityType.PLACE.value, - "name": loc_name, - "latitude": lat, - "longitude": lng, - } - - # All the following fields are specific to the API (i.e. not Micropub related) - _reply, reply = None, None - try: - _reply = _user_api_arg("reply") - except ValueError: - pass - - visibility = ap.Visibility[ - _user_api_arg("visibility", default=ap.Visibility.PUBLIC.name) - ] - - content, tags = parse_markdown(source) - - # Check for custom emojis - tags = tags + emojis.tags(content) - - to: List[str] = [] - cc: List[str] = [] - - if visibility == ap.Visibility.PUBLIC: - to = [ap.AS_PUBLIC] - cc = [ID + "/followers"] - elif visibility == ap.Visibility.UNLISTED: - to = [ID + "/followers"] - cc = [ap.AS_PUBLIC] - elif visibility == ap.Visibility.FOLLOWERS_ONLY: - to = [ID + "/followers"] - cc = [] - - if _reply: - reply = ap.fetch_remote_activity(_reply) - if visibility == ap.Visibility.DIRECT: - to.append(reply.attributedTo) - else: - cc.append(reply.attributedTo) - - context = new_context(reply) - - for tag in tags: - if tag["type"] == "Mention": - to.append(tag["href"]) - - raw_note = dict( - attributedTo=MY_PERSON.id, - cc=list(set(cc) - set([MY_PERSON.id])), - to=list(set(to) - set([MY_PERSON.id])), - summary=summary, - content=content, - tag=tags, - source={"mediaType": "text/markdown", "content": source}, - inReplyTo=reply.id if reply else None, - context=context, - ) - - if location: - raw_note["location"] = location - - if request.files: - for f in request.files.keys(): - if not request.files[f].filename: - continue - - file = request.files[f] - rfilename = secure_filename(file.filename) - with BytesIO() as buf: - # bypass file.save(), because it can't save to a file-like object - copyfileobj(file.stream, buf, 16384) - oid = MEDIA_CACHE.save_upload(buf, rfilename) - mtype = mimetypes.guess_type(rfilename)[0] - - raw_note["attachment"] = [ - { - "mediaType": mtype, - "name": _user_api_arg("file_description", default=rfilename), - "type": "Document", - "url": f"{BASE_URL}/uploads/{oid}/{rfilename}", - } - ] - - note = ap.Note(**raw_note) - create = note.build_create() - create_id = post_to_outbox(create) - - # Return a 201 with the note URL in the Location header if this was a Micropub request - if is_micropub: - resp = flask.Response("", headers={"Location": create_id}) - resp.status_code = 201 - return resp - - return _user_api_response(activity=create_id) - - -@blueprint.route("/new_question", methods=["POST"]) -@api_required -def api_new_question() -> _Response: - source = _user_api_arg("content") - if not source: - raise ValueError("missing content") - - visibility = ap.Visibility[ - _user_api_arg("visibility", default=ap.Visibility.PUBLIC.name) - ] - - content, tags = parse_markdown(source) - tags = tags + emojis.tags(content) - - to: List[str] = [] - cc: List[str] = [] - - if visibility == ap.Visibility.PUBLIC: - to = [ap.AS_PUBLIC] - cc = [ID + "/followers"] - elif visibility == ap.Visibility.UNLISTED: - to = [ID + "/followers"] - cc = [ap.AS_PUBLIC] - elif visibility == ap.Visibility.FOLLOWERS_ONLY: - to = [ID + "/followers"] - cc = [] - - for tag in tags: - if tag["type"] == "Mention": - cc.append(tag["href"]) - - answers = [] - for i in range(4): - a = _user_api_arg(f"answer{i}", default=None) - if not a: - break - answers.append( - { - "type": ap.ActivityType.NOTE.value, - "name": a, - "replies": {"type": ap.ActivityType.COLLECTION.value, "totalItems": 0}, - } - ) - - open_for = int(_user_api_arg("open_for")) - choices = { - "endTime": ap.format_datetime( - datetime.now(timezone.utc) + timedelta(minutes=open_for) - ) - } - of = _user_api_arg("of") - if of == "anyOf": - choices["anyOf"] = answers - else: - choices["oneOf"] = answers - - raw_question = dict( - attributedTo=MY_PERSON.id, - cc=list(set(cc)), - to=list(set(to)), - context=new_context(), - content=content, - tag=tags, - source={"mediaType": "text/markdown", "content": source}, - inReplyTo=None, - **choices, - ) - - question = ap.Question(**raw_question) - create = question.build_create() - create_id = post_to_outbox(create) - - Tasks.update_question_outbox(create_id, open_for) - - return _user_api_response(activity=create_id) - - -@blueprint.route("/block", methods=["POST"]) -@api_required -def api_block() -> _Response: - actor = _user_api_arg("actor") - - existing = DB.activities.find_one( - { - "box": Box.OUTBOX.value, - "type": ap.ActivityType.BLOCK.value, - "activity.object": actor, - "meta.undo": False, - } - ) - if existing: - return _user_api_response(activity=existing["activity"]["id"]) - - block = ap.Block(actor=MY_PERSON.id, object=actor) - block_id = post_to_outbox(block) - - return _user_api_response(activity=block_id) - - -@blueprint.route("/follow", methods=["POST"]) -@api_required -def api_follow() -> _Response: - actor = _user_api_arg("actor") - - q = { - "box": Box.OUTBOX.value, - "type": ap.ActivityType.FOLLOW.value, - "meta.undo": False, - "activity.object": actor, - } - - existing = DB.activities.find_one(q) - if existing: - return _user_api_response(activity=existing["activity"]["id"]) - - follow = ap.Follow( - actor=MY_PERSON.id, - object=actor, - to=[actor], - cc=[ap.AS_PUBLIC], - published=now(), - context=new_context(), - ) - follow_id = post_to_outbox(follow) - - return _user_api_response(activity=follow_id) - - -@blueprint.route("/debug", methods=["GET", "DELETE"]) -@api_required -def api_debug() -> _Response: - """Endpoint used/needed for testing, only works in DEBUG_MODE.""" - if not DEBUG_MODE: - return jsonify({"message": "DEBUG_MODE is off"}) - - if request.method == "DELETE": - _drop_db() - return jsonify(dict(message="DB dropped")) - - return jsonify( - dict( - inbox=DB.activities.count({"box": Box.INBOX.value}), - outbox=DB.activities.count({"box": Box.OUTBOX.value}), - outbox_data=without_id(DB.activities.find({"box": Box.OUTBOX.value})), - ) - ) - - -@blueprint.route("/stream") -@api_required -def api_stream() -> _Response: - return jsonify( - feed.build_inbox_json_feed("/api/stream", request.args.get("cursor")) - ) diff --git a/blueprints/indieauth.py b/blueprints/indieauth.py deleted file mode 100644 index 68e64dd..0000000 --- a/blueprints/indieauth.py +++ /dev/null @@ -1,241 +0,0 @@ -import binascii -import os -from datetime import datetime -from datetime import timedelta -from urllib.parse import urlencode - -import flask -import mf2py -from flask import Response -from flask import abort -from flask import redirect -from flask import render_template -from flask import request -from flask import session -from flask import url_for -from itsdangerous import BadSignature - -from config import DB -from config import JWT -from core.shared import _get_ip -from core.shared import htmlify -from core.shared import jsonify -from core.shared import login_required - -blueprint = flask.Blueprint("indieauth", __name__) - - -def build_auth_resp(payload): - if request.headers.get("Accept") == "application/json": - return jsonify(payload) - return Response( - status=200, - headers={"Content-Type": "application/x-www-form-urlencoded"}, - response=urlencode(payload), - ) - - -def _get_prop(props, name, default=None): - if name in props: - items = props.get(name) - if isinstance(items, list): - return items[0] - return items - return default - - -def get_client_id_data(url): - # FIXME(tsileo): ensure not localhost via `little_boxes.urlutils.is_url_valid` - data = mf2py.parse(url=url) - for item in data["items"]: - if "h-x-app" in item["type"] or "h-app" in item["type"]: - props = item.get("properties", {}) - print(props) - return dict( - logo=_get_prop(props, "logo"), - name=_get_prop(props, "name"), - url=_get_prop(props, "url"), - ) - return dict(logo=None, name=url, url=url) - - -@blueprint.route("/indieauth/flow", methods=["POST"]) -@login_required -def indieauth_flow(): - auth = dict( - scope=" ".join(request.form.getlist("scopes")), - me=request.form.get("me"), - client_id=request.form.get("client_id"), - state=request.form.get("state"), - redirect_uri=request.form.get("redirect_uri"), - response_type=request.form.get("response_type"), - ts=datetime.now().timestamp(), - code=binascii.hexlify(os.urandom(8)).decode("utf-8"), - verified=False, - ) - - # XXX(tsileo): a whitelist for me values? - - # TODO(tsileo): redirect_uri checks - if not auth["redirect_uri"]: - abort(400) - - DB.indieauth.insert_one(auth) - - # FIXME(tsileo): fetch client ID and validate redirect_uri - red = f'{auth["redirect_uri"]}?code={auth["code"]}&state={auth["state"]}&me={auth["me"]}' - return redirect(red) - - -@blueprint.route("/indieauth", methods=["GET", "POST"]) -def indieauth_endpoint(): - if request.method == "GET": - if not session.get("logged_in"): - return redirect(url_for("admin.admin_login", redirect=request.url)) - - me = request.args.get("me") - # FIXME(tsileo): ensure me == ID - client_id = request.args.get("client_id") - redirect_uri = request.args.get("redirect_uri") - state = request.args.get("state", "") - response_type = request.args.get("response_type", "id") - scope = request.args.get("scope", "").split() - - print("STATE", state) - return htmlify( - render_template( - "indieauth_flow.html", - client=get_client_id_data(client_id), - scopes=scope, - redirect_uri=redirect_uri, - state=state, - response_type=response_type, - client_id=client_id, - me=me, - ) - ) - - # Auth verification via POST - code = request.form.get("code") - redirect_uri = request.form.get("redirect_uri") - client_id = request.form.get("client_id") - - ip, geoip = _get_ip() - - auth = DB.indieauth.find_one_and_update( - { - "code": code, - "redirect_uri": redirect_uri, - "client_id": client_id, - "verified": False, - }, - { - "$set": { - "verified": True, - "verified_by": "id", - "verified_at": datetime.now().timestamp(), - "ip_address": ip, - "geoip": geoip, - } - }, - ) - print(auth) - print(code, redirect_uri, client_id) - - # Ensure the code is recent - if (datetime.now() - datetime.fromtimestamp(auth["ts"])) > timedelta(minutes=5): - abort(400) - - if not auth: - abort(403) - return - - session["logged_in"] = True - me = auth["me"] - state = auth["state"] - scope = auth["scope"] - print("STATE", state) - return build_auth_resp({"me": me, "state": state, "scope": scope}) - - -@blueprint.route("/token", methods=["GET", "POST"]) -def token_endpoint(): - # Generate a new token with the returned access code - if request.method == "POST": - code = request.form.get("code") - me = request.form.get("me") - redirect_uri = request.form.get("redirect_uri") - client_id = request.form.get("client_id") - - now = datetime.now() - ip, geoip = _get_ip() - - # This query ensure code, client_id, redirect_uri and me are matching with the code request - auth = DB.indieauth.find_one_and_update( - { - "code": code, - "me": me, - "redirect_uri": redirect_uri, - "client_id": client_id, - "verified": False, - }, - { - "$set": { - "verified": True, - "verified_by": "code", - "verified_at": now.timestamp(), - "ip_address": ip, - "geoip": geoip, - } - }, - ) - - if not auth: - abort(403) - - scope = auth["scope"].split() - - # Ensure there's at least one scope - if not len(scope): - abort(400) - - # Ensure the code is recent - if (now - datetime.fromtimestamp(auth["ts"])) > timedelta(minutes=5): - abort(400) - - payload = dict(me=me, client_id=client_id, scope=scope, ts=now.timestamp()) - token = JWT.dumps(payload).decode("utf-8") - DB.indieauth.update_one( - {"_id": auth["_id"]}, - { - "$set": { - "token": token, - "token_expires": (now + timedelta(minutes=30)).timestamp(), - } - }, - ) - - return build_auth_resp( - {"me": me, "scope": auth["scope"], "access_token": token} - ) - - # Token verification - token = request.headers.get("Authorization").replace("Bearer ", "") - try: - payload = JWT.loads(token) - except BadSignature: - abort(403) - - # Check the token expritation (valid for 3 hours) - if (datetime.now() - datetime.fromtimestamp(payload["ts"])) > timedelta( - minutes=180 - ): - abort(401) - - return build_auth_resp( - { - "me": payload["me"], - "scope": " ".join(payload["scope"]), - "client_id": payload["client_id"], - } - ) diff --git a/blueprints/tasks.py b/blueprints/tasks.py deleted file mode 100644 index e47e218..0000000 --- a/blueprints/tasks.py +++ /dev/null @@ -1,760 +0,0 @@ -import json -import traceback -from datetime import datetime -from datetime import timezone -from typing import Any -from typing import Dict - -import flask -import requests -from bs4 import BeautifulSoup -from flask import current_app as app -from little_boxes import activitypub as ap -from little_boxes.activitypub import _to_list -from little_boxes.errors import ActivityGoneError -from little_boxes.errors import ActivityNotFoundError -from little_boxes.errors import NotAnActivityError -from requests.exceptions import HTTPError - -import config -from config import DB -from config import MEDIA_CACHE -from core import gc -from core.activitypub import SIG_AUTH -from core.activitypub import Box -from core.activitypub import _actor_hash -from core.activitypub import _add_answers_to_question -from core.activitypub import _cache_actor_icon -from core.activitypub import is_from_outbox -from core.activitypub import new_context -from core.activitypub import post_to_outbox -from core.activitypub import save_reply -from core.activitypub import update_cached_actor -from core.db import find_one_activity -from core.db import update_one_activity -from core.inbox import process_inbox -from core.meta import MetaKey -from core.meta import by_object_id -from core.meta import by_remote_id -from core.meta import by_type -from core.meta import inc -from core.meta import upsert -from core.notifications import _NewMeta -from core.notifications import set_inbox_flags -from core.outbox import process_outbox -from core.remote import track_failed_send -from core.remote import track_successful_send -from core.shared import MY_PERSON -from core.shared import _Response -from core.shared import back -from core.shared import p -from core.tasks import Tasks -from utils import now -from utils import opengraph -from utils.media import is_video -from utils.webmentions import discover_webmention_endpoint - -blueprint = flask.Blueprint("tasks", __name__) - - -class TaskError(Exception): - """Raised to log the error for poussetaches.""" - - def __init__(self): - self.message = traceback.format_exc() - - -@blueprint.route("/task/update_question", methods=["POST"]) -def task_update_question() -> _Response: - """Sends an Update.""" - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - iri = task.payload - try: - app.logger.info(f"Updating question {iri}") - cc = [config.ID + "/followers"] - doc = DB.activities.find_one({"box": Box.OUTBOX.value, "remote_id": iri}) - _add_answers_to_question(doc) - question = ap.Question(**doc["activity"]["object"]) - - raw_update = dict( - actor=question.id, - object=question.to_dict(embed=True), - attributedTo=MY_PERSON.id, - cc=list(set(cc)), - to=[ap.AS_PUBLIC], - ) - raw_update["@context"] = config.DEFAULT_CTX - - update = ap.Update(**raw_update) - print(update) - print(update.to_dict()) - post_to_outbox(update) - - except HTTPError as err: - app.logger.exception("request failed") - if 400 <= err.response.status_code <= 499: - app.logger.info("client error, no retry") - return "" - - raise TaskError() from err - except Exception as err: - app.logger.exception("task failed") - raise TaskError() from err - - return "" - - -@blueprint.route("/task/send_actor_update", methods=["POST"]) -def task_send_actor_update() -> _Response: - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - try: - update = ap.Update( - actor=MY_PERSON.id, - object=MY_PERSON.to_dict(), - to=[MY_PERSON.followers], - cc=[ap.AS_PUBLIC], - published=now(), - context=new_context(), - ) - - post_to_outbox(update) - except Exception as err: - app.logger.exception(f"failed to send actor update") - raise TaskError() from err - - return "" - - -@blueprint.route("/task/fetch_og_meta", methods=["POST"]) -def task_fetch_og_meta() -> _Response: - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - iri = task.payload - try: - activity = ap.fetch_remote_activity(iri) - app.logger.info(f"activity={activity!r}") - if activity.has_type(ap.ActivityType.CREATE): - note = activity.get_object() - links = opengraph.links_from_note(note.to_dict()) - og_metadata = opengraph.fetch_og_metadata(config.USER_AGENT, links) - for og in og_metadata: - if not og.get("image"): - continue - config.MEDIA_CACHE.cache_og_image(og["image"], iri) - - app.logger.debug(f"OG metadata {og_metadata!r}") - DB.activities.update_one( - {"remote_id": iri}, {"$set": {"meta.og_metadata": og_metadata}} - ) - - app.logger.info(f"OG metadata fetched for {iri}: {og_metadata}") - except (ActivityGoneError, ActivityNotFoundError): - app.logger.exception(f"dropping activity {iri}, skip OG metedata") - return "" - except requests.exceptions.HTTPError as http_err: - if 400 <= http_err.response.status_code < 500: - app.logger.exception("bad request, no retry") - return "" - app.logger.exception("failed to fetch OG metadata") - raise TaskError() from http_err - except Exception as err: - app.logger.exception(f"failed to fetch OG metadata for {iri}") - raise TaskError() from err - - return "" - - -@blueprint.route("/task/cache_object", methods=["POST"]) -def task_cache_object() -> _Response: - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - iri = task.payload - try: - activity = ap.fetch_remote_activity(iri) - app.logger.info(f"activity={activity!r}") - obj = activity.get_object() - Tasks.cache_emojis(obj) - - # Refetch the object actor (without cache) - obj_actor = ap.fetch_remote_activity(obj.get_actor().id, no_cache=True) - - cache = {MetaKey.OBJECT: obj.to_dict(embed=True)} - - if activity.get_actor().id != obj_actor.id: - # Cache the object actor - obj_actor_hash = _actor_hash(obj_actor) - cache[MetaKey.OBJECT_ACTOR] = obj_actor.to_dict(embed=True) - cache[MetaKey.OBJECT_ACTOR_ID] = obj_actor.id - cache[MetaKey.OBJECT_ACTOR_HASH] = obj_actor_hash - - # Update the actor cache for the other activities - update_cached_actor(obj_actor) - - update_one_activity(by_remote_id(activity.id), upsert(cache)) - - except (ActivityGoneError, ActivityNotFoundError, NotAnActivityError): - DB.activities.update_one({"remote_id": iri}, {"$set": {"meta.deleted": True}}) - app.logger.exception(f"flagging activity {iri} as deleted, no object caching") - except Exception as err: - app.logger.exception(f"failed to cache object for {iri}") - raise TaskError() from err - - return "" - - -@blueprint.route("/task/finish_post_to_outbox", methods=["POST"]) # noqa:C901 -def task_finish_post_to_outbox() -> _Response: - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - iri = task.payload - try: - activity = ap.fetch_remote_activity(iri) - app.logger.info(f"activity={activity!r}") - - recipients = activity.recipients() - - process_outbox(activity, {}) - - app.logger.info(f"recipients={recipients}") - activity = ap.clean_activity(activity.to_dict()) - - payload = json.dumps(activity) - for recp in recipients: - app.logger.debug(f"posting to {recp}") - Tasks.post_to_remote_inbox(payload, recp) - except (ActivityGoneError, ActivityNotFoundError): - app.logger.exception(f"no retry") - except Exception as err: - app.logger.exception(f"failed to post to remote inbox for {iri}") - raise TaskError() from err - - return "" - - -@blueprint.route("/task/finish_post_to_inbox", methods=["POST"]) # noqa: C901 -def task_finish_post_to_inbox() -> _Response: - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - iri = task.payload - try: - activity = ap.fetch_remote_activity(iri) - app.logger.info(f"activity={activity!r}") - - process_inbox(activity, {}) - - except (ActivityGoneError, ActivityNotFoundError, NotAnActivityError): - app.logger.exception(f"no retry") - except Exception as err: - app.logger.exception(f"failed to cfinish post to inbox for {iri}") - raise TaskError() from err - - return "" - - -def select_video_to_cache(links): - """Try to find the 360p version from a video urls, or return the smallest one.""" - videos = [] - for link in links: - if link.get("mimeType", "").startswith("video/") or is_video(link["href"]): - videos.append({"href": link["href"], "height": link["height"]}) - - if not videos: - app.logger.warning(f"failed to select a video from {links!r}") - return None - - videos = sorted(videos, key=lambda l: l["height"]) - for video in videos: - if video["height"] == 360: - return video - - return videos[0] - - -@blueprint.route( - "/task/cache_attachments", methods=["POST"] -) # noqa: C910 # too complex -def task_cache_attachments() -> _Response: - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - iri = task.payload - try: - activity = ap.fetch_remote_activity(iri) - app.logger.info(f"caching attachment for activity={activity!r}") - # Generates thumbnails for the actor's icon and the attachments if any - - if activity.has_type([ap.ActivityType.CREATE, ap.ActivityType.ANNOUNCE]): - obj = activity.get_object() - else: - obj = activity - - if obj.content: - content_html = BeautifulSoup(obj.content, "html5lib") - for img in content_html.find_all("img"): - src = img.attrs.get("src") - if src: - Tasks.cache_attachment({"url": src}, iri) - - if obj.has_type(ap.ActivityType.VIDEO): - if isinstance(obj.url, list): - # TODO: filter only videogt - link = select_video_to_cache(obj.url) - if link: - Tasks.cache_attachment({"url": link["href"]}, iri) - elif isinstance(obj.url, str): - Tasks.cache_attachment({"url": obj.url}, iri) - else: - app.logger.warning(f"failed to parse video link {obj!r} for {iri}") - - # Iter the attachments - for attachment in obj._data.get("attachment", []): - Tasks.cache_attachment(attachment, iri) - - app.logger.info(f"attachments cached for {iri}") - - except (ActivityGoneError, ActivityNotFoundError, NotAnActivityError): - app.logger.exception(f"dropping activity {iri}, no attachment caching") - except Exception as err: - app.logger.exception(f"failed to cache attachments for {iri}") - raise TaskError() from err - - return "" - - -@blueprint.route("/task/cache_attachment", methods=["POST"]) -def task_cache_attachment() -> _Response: - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - iri = task.payload["iri"] - attachment = task.payload["attachment"] - try: - app.logger.info(f"caching attachment {attachment!r} for {iri}") - - config.MEDIA_CACHE.cache_attachment(attachment, iri) - - app.logger.info(f"attachment {attachment!r} cached for {iri}") - except Exception as err: - app.logger.exception(f"failed to cache attachment {attachment!r} for {iri}") - raise TaskError() from err - - return "" - - -@blueprint.route("/task/send_webmention", methods=["POST"]) -def task_send_webmention() -> _Response: - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - note_url = task.payload["note_url"] - link = task.payload["link"] - remote_id = task.payload["remote_id"] - try: - app.logger.info(f"trying to send webmention source={note_url} target={link}") - webmention_endpoint = discover_webmention_endpoint(link) - if not webmention_endpoint: - app.logger.info("no webmention endpoint") - return "" - - resp = requests.post( - webmention_endpoint, - data={"source": note_url, "target": link}, - headers={"User-Agent": config.USER_AGENT}, - ) - app.logger.info(f"webmention endpoint resp={resp}/{resp.text}") - resp.raise_for_status() - except HTTPError as err: - app.logger.exception("request failed") - if 400 <= err.response.status_code <= 499: - app.logger.info("client error, no retry") - return "" - - raise TaskError() from err - except Exception as err: - app.logger.exception(f"failed to cache actor for {link}/{remote_id}/{note_url}") - raise TaskError() from err - - return "" - - -@blueprint.route("/task/cache_actor", methods=["POST"]) # noqa: C910 # too complex -def task_cache_actor() -> _Response: - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - iri = task.payload["iri"] - try: - activity = ap.fetch_remote_activity(iri) - app.logger.info(f"activity={activity!r}") - - # Reload the actor without caching (in case it got upated) - actor = ap.fetch_remote_activity(activity.get_actor().id, no_cache=True) - - # Fetch the Open Grah metadata if it's a `Create` - if activity.has_type(ap.ActivityType.CREATE): - obj = activity.get_object() - try: - links = opengraph.links_from_note(obj.to_dict()) - if links: - Tasks.fetch_og_meta(iri) - - # Send Webmentions only if it's from the outbox, and public - if ( - is_from_outbox(obj) - and ap.get_visibility(obj) == ap.Visibility.PUBLIC - ): - Tasks.send_webmentions(activity, links) - except Exception: - app.logger.exception("failed to cache links") - - if activity.has_type(ap.ActivityType.FOLLOW): - if actor.id == config.ID: - # It's a new following, cache the "object" (which is the actor we follow) - DB.activities.update_one( - by_remote_id(iri), - upsert({MetaKey.OBJECT: activity.get_object().to_dict(embed=True)}), - ) - - # Cache the actor info - update_cached_actor(actor) - - app.logger.info(f"actor cached for {iri}") - if not activity.has_type([ap.ActivityType.CREATE, ap.ActivityType.ANNOUNCE]): - return "" - - if activity.get_object()._data.get( - "attachment", [] - ) or activity.get_object().has_type(ap.ActivityType.VIDEO): - Tasks.cache_attachments(iri) - - except (ActivityGoneError, ActivityNotFoundError): - DB.activities.update_one({"remote_id": iri}, {"$set": {"meta.deleted": True}}) - app.logger.exception(f"flagging activity {iri} as deleted, no actor caching") - except Exception as err: - app.logger.exception(f"failed to cache actor for {iri}") - raise TaskError() from err - - return "" - - -@blueprint.route("/task/cache_actor_icon", methods=["POST"]) -def task_cache_actor_icon() -> _Response: - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - actor_iri = task.payload["actor_iri"] - icon_url = task.payload["icon_url"] - try: - MEDIA_CACHE.cache_actor_icon(icon_url) - except Exception as exc: - err = f"failed to cache actor icon {icon_url} for {actor_iri}" - app.logger.exception(err) - raise TaskError() from exc - - return "" - - -@blueprint.route("/task/cache_emoji", methods=["POST"]) -def task_cache_emoji() -> _Response: - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - iri = task.payload["iri"] - url = task.payload["url"] - try: - MEDIA_CACHE.cache_emoji(url, iri) - except Exception as exc: - err = f"failed to cache emoji {url} at {iri}" - app.logger.exception(err) - raise TaskError() from exc - - return "" - - -@blueprint.route("/task/forward_activity", methods=["POST"]) -def task_forward_activity() -> _Response: - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - iri = task.payload - try: - activity = ap.fetch_remote_activity(iri) - recipients = back.followers_as_recipients() - app.logger.debug(f"Forwarding {activity!r} to {recipients}") - activity = ap.clean_activity(activity.to_dict()) - payload = json.dumps(activity) - for recp in recipients: - app.logger.debug(f"forwarding {activity!r} to {recp}") - Tasks.post_to_remote_inbox(payload, recp) - except Exception as err: - app.logger.exception("task failed") - raise TaskError() from err - - return "" - - -@blueprint.route("/task/post_to_remote_inbox", methods=["POST"]) -def task_post_to_remote_inbox() -> _Response: - """Post an activity to a remote inbox.""" - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - payload, to = task.payload["payload"], task.payload["to"] - try: - app.logger.info("payload=%s", payload) - app.logger.info("generating sig") - signed_payload = json.loads(payload) - - app.logger.info("to=%s", to) - resp = requests.post( - to, - data=json.dumps(signed_payload), - auth=SIG_AUTH, - headers={ - "Content-Type": config.HEADERS[1], - "Accept": config.HEADERS[1], - "User-Agent": config.USER_AGENT, - }, - ) - app.logger.info("resp=%s", resp) - app.logger.info("resp_body=%s", resp.text) - resp.raise_for_status() - except HTTPError as err: - track_failed_send(to) - - app.logger.exception("request failed") - if 400 <= err.response.status_code <= 499: - app.logger.info("client error, no retry") - return "" - - raise TaskError() from err - except requests.RequestException: - track_failed_send(to) - - app.logger.exception("request failed") - - except Exception as err: - app.logger.exception("task failed") - raise TaskError() from err - - track_successful_send(to) - - return "" - - -@blueprint.route("/task/fetch_remote_question", methods=["POST"]) -def task_fetch_remote_question() -> _Response: - """Fetch a remote question for implementation that does not send Update.""" - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - iri = task.payload - try: - app.logger.info(f"Fetching remote question {iri}") - local_question = DB.activities.find_one( - { - "box": Box.INBOX.value, - "type": ap.ActivityType.CREATE.value, - "activity.object.id": iri, - } - ) - try: - remote_question = ap.get_backend().fetch_iri(iri, no_cache=True) - except (ActivityGoneError, ActivityNotFoundError): - app.logger.info("f{iri} not found, no retry") - return "" - - # FIXME(tsileo): compute and set `meta.object_visiblity` (also update utils.py to do it) - if ( - local_question - and ( - local_question["meta"].get("voted_for") - or local_question["meta"].get("subscribed") - ) - and not DB.notifications.find_one({"activity.id": remote_question["id"]}) - ): - DB.notifications.insert_one( - { - "type": "question_ended", - "datetime": datetime.now(timezone.utc).isoformat(), - "activity": remote_question, - } - ) - - # Update the Create if we received it in the inbox - if local_question: - DB.activities.update_one( - {"remote_id": local_question["remote_id"], "box": Box.INBOX.value}, - {"$set": {"activity.object": remote_question}}, - ) - - # Also update all the cached copies (Like, Announce...) - DB.activities.update_many( - {"meta.object.id": remote_question["id"]}, - {"$set": {"meta.object": remote_question}}, - ) - - except HTTPError as err: - app.logger.exception("request failed") - if 400 <= err.response.status_code <= 499: - app.logger.info("client error, no retry") - return "" - - raise TaskError() from err - except Exception as err: - app.logger.exception("task failed") - raise TaskError() from err - - return "" - - -@blueprint.route("/task/cleanup", methods=["POST"]) -def task_cleanup() -> _Response: - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - gc.perform() - return "" - - -def _is_local_reply(activity: ap.BaseActivity) -> bool: - for dest in _to_list(activity.to or []): - if dest.startswith(config.BASE_URL): - return True - - for dest in _to_list(activity.cc or []): - if dest.startswith(config.BASE_URL): - return True - - return False - - -@blueprint.route("/task/process_reply", methods=["POST"]) -def task_process_reply() -> _Response: - """Process `Announce`d posts from Pleroma relays in order to process replies of activities that are in the inbox.""" - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - iri = task.payload - try: - activity = ap.fetch_remote_activity(iri) - app.logger.info(f"checking for reply activity={activity!r}") - - # Some AP server always return Create when requesting an object - if activity.has_type(ap.ActivityType.CREATE): - activity = activity.get_object() - - in_reply_to = activity.get_in_reply_to() - if not in_reply_to: - # If it's not reply, we can drop it - app.logger.info(f"activity={activity!r} is not a reply, dropping it") - return "" - - root_reply = in_reply_to - - # Fetch the activity reply - reply = ap.fetch_remote_activity(in_reply_to) - if reply.has_type(ap.ActivityType.CREATE): - reply = reply.get_object() - - new_replies = [activity, reply] - - while 1: - in_reply_to = reply.get_in_reply_to() - if not in_reply_to: - break - - root_reply = in_reply_to - reply = ap.fetch_remote_activity(root_reply) - - if reply.has_type(ap.ActivityType.CREATE): - reply = reply.get_object() - - new_replies.append(reply) - - app.logger.info(f"root_reply={reply!r} for activity={activity!r}") - - # In case the activity was from the inbox - update_one_activity( - {**by_object_id(activity.id), **by_type(ap.ActivityType.CREATE)}, - upsert({MetaKey.THREAD_ROOT_PARENT: root_reply}), - ) - - for (new_reply_idx, new_reply) in enumerate(new_replies): - if find_one_activity( - {**by_object_id(new_reply.id), **by_type(ap.ActivityType.CREATE)} - ) or DB.replies.find_one(by_remote_id(new_reply.id)): - continue - - actor = new_reply.get_actor() - is_root_reply = new_reply_idx == len(new_replies) - 1 - if is_root_reply: - reply_flags: Dict[str, Any] = {} - else: - reply_actor = new_replies[new_reply_idx + 1].get_actor() - is_in_reply_to_self = actor.id == reply_actor.id - reply_flags = { - MetaKey.IN_REPLY_TO_SELF.value: is_in_reply_to_self, - MetaKey.IN_REPLY_TO.value: new_reply.get_in_reply_to(), - } - if not is_in_reply_to_self: - reply_flags[MetaKey.IN_REPLY_TO_ACTOR.value] = reply_actor.to_dict( - embed=True - ) - - # Save the reply with the cached actor and the thread flag/ID - save_reply( - new_reply, - { - **reply_flags, - MetaKey.THREAD_ROOT_PARENT.value: root_reply, - MetaKey.ACTOR.value: actor.to_dict(embed=True), - MetaKey.ACTOR_HASH.value: _actor_hash(actor), - }, - ) - - # Update the reply counters - if new_reply.get_in_reply_to(): - update_one_activity( - { - **by_object_id(new_reply.get_in_reply_to()), - **by_type(ap.ActivityType.CREATE), - }, - inc(MetaKey.COUNT_REPLY, 1), - ) - DB.replies.update_one( - by_remote_id(new_reply.get_in_reply_to()), - inc(MetaKey.COUNT_REPLY, 1), - ) - - # Cache the actor icon - _cache_actor_icon(actor) - # And cache the attachments - Tasks.cache_attachments(new_reply.id) - except (ActivityGoneError, ActivityNotFoundError): - app.logger.exception(f"dropping activity {iri}, skip processing") - return "" - except Exception as err: - app.logger.exception(f"failed to process new activity {iri}") - raise TaskError() from err - - return "" - - -@blueprint.route("/task/process_new_activity", methods=["POST"]) # noqa:c901 -def task_process_new_activity() -> _Response: - """Process an activity received in the inbox""" - task = p.parse(flask.request) - app.logger.info(f"task={task!r}") - iri = task.payload - try: - activity = ap.fetch_remote_activity(iri) - app.logger.info(f"activity={activity!r}") - - flags: _NewMeta = {} - - set_inbox_flags(activity, flags) - app.logger.info(f"a={activity}, flags={flags!r}") - if flags: - DB.activities.update_one({"remote_id": activity.id}, {"$set": flags}) - - app.logger.info(f"new activity {iri} processed") - except (ActivityGoneError, ActivityNotFoundError): - app.logger.exception(f"dropping activity {iri}, skip processing") - return "" - except Exception as err: - app.logger.exception(f"failed to process new activity {iri}") - raise TaskError() from err - - return "" diff --git a/blueprints/well_known.py b/blueprints/well_known.py deleted file mode 100644 index bf152f2..0000000 --- a/blueprints/well_known.py +++ /dev/null @@ -1,95 +0,0 @@ -import mimetypes -from typing import Any - -import flask -from flask import abort -from flask import request -from little_boxes import activitypub as ap - -import config -from config import DB -from core.meta import Box -from core.shared import jsonify - -blueprint = flask.Blueprint("well_known", __name__) - - -@blueprint.route("/.well-known/webfinger") -def wellknown_webfinger() -> Any: - """Exposes/servers WebFinger data.""" - resource = request.args.get("resource") - if resource not in [f"acct:{config.USERNAME}@{config.DOMAIN}", config.ID]: - abort(404) - - out = { - "subject": f"acct:{config.USERNAME}@{config.DOMAIN}", - "aliases": [config.ID], - "links": [ - { - "rel": "http://webfinger.net/rel/profile-page", - "type": "text/html", - "href": config.ID, - }, - {"rel": "self", "type": "application/activity+json", "href": config.ID}, - { - "rel": "http://ostatus.org/schema/1.0/subscribe", - "template": config.BASE_URL + "/authorize_follow?profile={uri}", - }, - {"rel": "magic-public-key", "href": config.KEY.to_magic_key()}, - { - "href": config.ICON_URL, - "rel": "http://webfinger.net/rel/avatar", - "type": mimetypes.guess_type(config.ICON_URL)[0], - }, - ], - } - - return jsonify(out, "application/jrd+json; charset=utf-8") - - -@blueprint.route("/.well-known/nodeinfo") -def wellknown_nodeinfo() -> Any: - """Exposes the NodeInfo endpoint (http://nodeinfo.diaspora.software/).""" - return jsonify( - { - "links": [ - { - "rel": "http://nodeinfo.diaspora.software/ns/schema/2.1", - "href": f"{config.ID}/nodeinfo", - } - ] - } - ) - - -@blueprint.route("/nodeinfo") -def nodeinfo() -> Any: - """NodeInfo endpoint.""" - q = { - "box": Box.OUTBOX.value, - "meta.deleted": False, - "type": {"$in": [ap.ActivityType.CREATE.value, ap.ActivityType.ANNOUNCE.value]}, - } - - out = { - "version": "2.1", - "software": { - "name": "microblogpub", - "version": config.VERSION, - "repository": "https://github.com/tsileo/microblog.pub", - }, - "protocols": ["activitypub"], - "services": {"inbound": [], "outbound": []}, - "openRegistrations": False, - "usage": {"users": {"total": 1}, "localPosts": DB.activities.count(q)}, - "metadata": { - "nodeName": f"@{config.USERNAME}@{config.DOMAIN}", - "version": config.VERSION, - "versionDate": config.VERSION_DATE, - }, - } - - return jsonify( - out, - "application/json; profile=http://nodeinfo.diaspora.software/ns/schema/2.1#", - ) diff --git a/boussole.json b/boussole.json new file mode 100644 index 0000000..f6236b0 --- /dev/null +++ b/boussole.json @@ -0,0 +1,8 @@ +{ + "SOURCES_PATH": "app/scss", + "TARGET_PATH": "app/static/css", + "LIBRARY_PATHS": [], + "OUTPUT_STYLES": "nested", + "SOURCE_COMMENTS": false, + "EXCLUDES": [] +} diff --git a/config.py b/config.py deleted file mode 100644 index 205be92..0000000 --- a/config.py +++ /dev/null @@ -1,193 +0,0 @@ -import mimetypes -import os -import subprocess -from datetime import datetime -from enum import Enum -from pathlib import Path - -import yaml -from bleach import linkify -from itsdangerous import JSONWebSignatureSerializer -from little_boxes import strtobool -from little_boxes.activitypub import CTX_AS as AP_DEFAULT_CTX -from pymongo import MongoClient - -import sass -from utils.emojis import _load_emojis -from utils.key import KEY_DIR -from utils.key import get_key -from utils.key import get_secret_key -from utils.media import MediaCache - -ROOT_DIR = Path(__file__).parent.absolute() - - -class ThemeStyle(Enum): - LIGHT = "light" - DARK = "dark" - - -DEFAULT_THEME_STYLE = ThemeStyle.LIGHT.value - -DEFAULT_THEME_PRIMARY_COLOR = { - ThemeStyle.LIGHT: "#1d781d", # Green - ThemeStyle.DARK: "#33ff00", # Purple -} - - -VERSION = ( - subprocess.check_output(["git", "describe", "--always"]).split()[0].decode("utf-8") -) -VERSION_DATE = ( - subprocess.check_output(["git", "show", VERSION]) - .decode() - .splitlines()[2] - .split("Date:")[-1] - .strip() -) - -DEBUG_MODE = strtobool(os.getenv("MICROBLOGPUB_DEBUG", "false")) - -HEADERS = [ - "application/activity+json", - "application/ld+json;profile=https://www.w3.org/ns/activitystreams", - 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"', - "application/ld+json", -] - - -with open(os.path.join(KEY_DIR, "me.yml")) as f: - conf = yaml.safe_load(f) - - USERNAME = conf["username"] - NAME = conf["name"] - DOMAIN = conf["domain"] - SCHEME = "https" if conf.get("https", True) else "http" - BASE_URL = SCHEME + "://" + DOMAIN - ID = BASE_URL - SUMMARY = conf["summary"] - ICON_URL = conf["icon_url"] - FAVICON_URL = conf.get("favicon_url", "/static/favicon.png") - PASS = conf["pass"] - - PROFILE_METADATA = conf.get("profile_metadata", {}) - HIDE_FOLLOWING = conf.get("hide_following", True) - - ALIASES = conf.get('aliases', []) - - # Theme-related config - theme_conf = conf.get("theme", {}) - THEME_STYLE = ThemeStyle(theme_conf.get("style", DEFAULT_THEME_STYLE)) - THEME_COLOR = theme_conf.get("color", DEFAULT_THEME_PRIMARY_COLOR[THEME_STYLE]) - - -DEFAULT_CTX = [ - AP_DEFAULT_CTX, - f"{BASE_URL}/microblogpub-0.1.jsonld", - {"@language": "und"}, -] - -SASS_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "sass") -theme_css = f"$primary-color: {THEME_COLOR};\n" -with open(os.path.join(SASS_DIR, f"{THEME_STYLE.value}.scss")) as f: - theme_css += f.read() - theme_css += "\n" -with open(os.path.join(SASS_DIR, "base_theme.scss")) as f: - raw_css = theme_css + f.read() - CSS = sass.compile(string=raw_css, output_style="compressed") - - -USER_AGENT = f"microblog.pub/{VERSION}; +{BASE_URL}" - -mongo_client = MongoClient( - host=[os.getenv("MICROBLOGPUB_MONGODB_HOST", "localhost:27017")] -) - -DB_NAME = "{}_{}".format(USERNAME, DOMAIN.replace(".", "_")) -DB = mongo_client[DB_NAME] -GRIDFS = mongo_client[f"{DB_NAME}_gridfs"] -MEDIA_CACHE = MediaCache(GRIDFS, USER_AGENT) - - -def _drop_db(): - if not DEBUG_MODE: - return - - mongo_client.drop_database(DB_NAME) - - -KEY = get_key(ID, ID + "#main-key", USERNAME, DOMAIN) - - -JWT_SECRET = get_secret_key("jwt") -JWT = JSONWebSignatureSerializer(JWT_SECRET) - - -def _admin_jwt_token() -> str: - return JWT.dumps( # type: ignore - {"me": "ADMIN", "ts": datetime.now().timestamp()} - ).decode( # type: ignore - "utf-8" - ) - - -ADMIN_API_KEY = get_secret_key("admin_api_key", _admin_jwt_token) - -attachments = [] -if PROFILE_METADATA: - for key, value in PROFILE_METADATA.items(): - attachments.append( - {"type": "PropertyValue", "name": key, "value": linkify(value)} - ) - -MANUALLY_APPROVES_FOLLOWERS = bool(conf.get("manually_approves_followers", False)) - -ME = { - "@context": DEFAULT_CTX, - "type": "Person", - "id": ID, - "following": ID + "/following", - "followers": ID + "/followers", - "featured": ID + "/featured", - "inbox": ID + "/inbox", - "outbox": ID + "/outbox", - "preferredUsername": USERNAME, - "name": NAME, - "summary": SUMMARY, - "endpoints": {}, - "url": ID, - "manuallyApprovesFollowers": MANUALLY_APPROVES_FOLLOWERS, - "attachment": attachments, - "icon": { - "mediaType": mimetypes.guess_type(ICON_URL)[0], - "type": "Image", - "url": ICON_URL, - }, - "publicKey": KEY.to_dict(), - "alsoKnownAs": ALIASES, -} - -# Default emojis, space-separated, update `me.yml` to customize emojis -EMOJIS = "😺 😸 😹 😻 😼 😽 🙀 😿 😾" -if conf.get("emojis"): - EMOJIS = conf["emojis"] - -# Emoji template for the FE -EMOJI_TPL = '<img src="/static/twemoji/{filename}.svg" alt="{raw}" class="emoji">' -if conf.get("emoji_tpl"): - EMOJI_TPL = conf["emoji_tpl"] - -# Hosts blacklist -BLACKLIST = conf.get("blacklist", []) - -# Outbound Webmentions support for public posts -DISABLE_WEBMENTIONS = bool(conf.get("disable_webmentions", False)) - -# Whether replies should be displayed in the stream or not -REPLIES_IN_STREAM = bool(conf.get("replies_in_stream", False)) - -# By default, we keep 14 of inbox data ; outbox is kept forever (along with bookmarked stuff, outbox replies, liked...) -DAYS_TO_KEEP = int(conf.get("days_to_keep", 14)) - -# Load custom emojis (stored in static/emojis) -_load_emojis(ROOT_DIR, BASE_URL) diff --git a/config/me.sample.yml b/config/me.sample.yml deleted file mode 100644 index 54e64eb..0000000 --- a/config/me.sample.yml +++ /dev/null @@ -1,20 +0,0 @@ -username: 'username' -name: 'You Name' -domain: 'your-domain.tld' -https: true -summary: 'your summary' -icon_url: 'https://you-avatar-url' -pass: '<bcrypt password hash>' -#favicon_url: '/static/favicon.png' -#profile_metadata: -# name1: 'value' -# name2: 'value2' -#hide_following: true -#manually_approves_followers: false -#aliases: -# - "http://example.com/users/name" -# - ... -#theme: -# style: "light" #or "dark" -# color: "#1d781d" # (green, default for light) - # "#33ff00" (purple, default for dark) diff --git a/core/activitypub.py b/core/activitypub.py deleted file mode 100644 index 83822af..0000000 --- a/core/activitypub.py +++ /dev/null @@ -1,867 +0,0 @@ -import binascii -import hashlib -import logging -import os -from datetime import datetime -from datetime import timezone -from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from urllib.parse import urljoin -from urllib.parse import urlparse - -from bson.objectid import ObjectId -from flask import url_for -from little_boxes import activitypub as ap -from little_boxes import strtobool -from little_boxes.activitypub import _to_list -from little_boxes.activitypub import clean_activity -from little_boxes.activitypub import format_datetime -from little_boxes.backend import Backend -from little_boxes.errors import ActivityGoneError -from little_boxes.httpsig import HTTPSigAuth - -from config import BASE_URL -from config import DB -from config import DEFAULT_CTX -from config import ID -from config import KEY -from config import ME -from config import USER_AGENT -from core.db import find_one_activity -from core.db import update_many_activities -from core.db import update_one_activity -from core.meta import Box -from core.meta import FollowStatus -from core.meta import MetaKey -from core.meta import by_object_id -from core.meta import by_remote_id -from core.meta import by_type -from core.meta import flag -from core.meta import inc -from core.meta import upsert -from core.remote import server -from core.tasks import Tasks -from utils import now - -logger = logging.getLogger(__name__) - -_NewMeta = Dict[str, Any] - -SIG_AUTH = HTTPSigAuth(KEY) - -MY_PERSON = ap.Person(**ME) - -_LOCAL_NETLOC = urlparse(BASE_URL).netloc - - -def is_from_outbox(activity: ap.BaseActivity) -> bool: - return activity.id.startswith(BASE_URL) - - -def is_local_url(url: str) -> bool: - return urlparse(url).netloc == _LOCAL_NETLOC - - -def _remove_id(doc: ap.ObjectType) -> ap.ObjectType: - """Helper for removing MongoDB's `_id` field.""" - doc = doc.copy() - if "_id" in doc: - del doc["_id"] - return doc - - -def _answer_key(choice: str) -> str: - h = hashlib.new("sha1") - h.update(choice.encode()) - return h.hexdigest() - - -def _actor_url(actor: ap.ActivityType) -> str: - if isinstance(actor.url, dict): - if actor.url.get("type") == ap.ActivityType.LINK.value: - return actor.url["href"] - - raise ValueError(f"unkown actor url object type: {actor.url!r}") - - elif isinstance(actor.url, str): - return actor.url - - # Return the actor ID if we cannot get the URL - elif isinstance(actor.id, str): - return actor.id - - else: - raise ValueError(f"invalid actor URL: {actor.url!r}") - - -def _actor_hash(actor: ap.ActivityType, local: bool = False) -> str: - """Used to know when to update the meta actor cache, like an "actor version".""" - h = hashlib.new("sha1") - h.update(actor.id.encode()) - h.update((actor.name or "").encode()) - h.update((actor.preferredUsername or "").encode()) - h.update((actor.summary or "").encode()) - h.update(_actor_url(actor).encode()) - key = actor.get_key() - h.update(key.pubkey_pem.encode()) - h.update(key.key_id().encode()) - if isinstance(actor.icon, dict) and "url" in actor.icon: - h.update(actor.icon["url"].encode()) - if local: - # The local hash helps us detect when to send an Update - if actor.attachment: - for item in actor.attachment: - h.update(item["name"].encode()) - h.update(item["value"].encode()) - h.update(("1" if actor.manuallyApprovesFollowers else "0").encode()) - return h.hexdigest() - - -def _is_local_reply(create: ap.Create) -> bool: - for dest in _to_list(create.to or []): - if dest.startswith(BASE_URL): - return True - - for dest in _to_list(create.cc or []): - if dest.startswith(BASE_URL): - return True - - return False - - -def _meta(activity: ap.BaseActivity) -> _NewMeta: - visibility = ap.get_visibility(activity) - is_public = False - if visibility in [ap.Visibility.PUBLIC, ap.Visibility.UNLISTED]: - is_public = True - - object_id = None - try: - object_id = activity.get_object_id() - except Exception: # TODO(tsileo): should be ValueError, but replies trigger a KeyError on object - pass - - object_visibility = None - if activity.has_type( - [ap.ActivityType.CREATE, ap.ActivityType.ANNOUNCE, ap.ActivityType.LIKE] - ): - object_visibility = ap.get_visibility(activity.get_object()).name - - actor_id = activity.get_actor().id - - return { - MetaKey.UNDO.value: False, - MetaKey.DELETED.value: False, - MetaKey.PUBLIC.value: is_public, - MetaKey.SERVER.value: urlparse(activity.id).netloc, - MetaKey.VISIBILITY.value: visibility.name, - MetaKey.ACTOR_ID.value: actor_id, - MetaKey.OBJECT_ID.value: object_id, - MetaKey.OBJECT_VISIBILITY.value: object_visibility, - MetaKey.POLL_ANSWER.value: False, - MetaKey.PUBLISHED.value: activity.published if activity.published else now(), - } - - -def save(box: Box, activity: ap.BaseActivity) -> None: - """Custom helper for saving an activity to the DB.""" - # Set some "type"-related neta - meta = _meta(activity) - if box == Box.OUTBOX and activity.has_type(ap.ActivityType.FOLLOW): - meta[MetaKey.FOLLOW_STATUS.value] = FollowStatus.WAITING.value - elif activity.has_type(ap.ActivityType.CREATE): - mentions = [] - obj = activity.get_object() - for m in obj.get_mentions(): - mentions.append(m.href) - hashtags = [] - for h in obj.get_hashtags(): - hashtags.append(h.name[1:]) # Strip the # - meta.update( - {MetaKey.MENTIONS.value: mentions, MetaKey.HASHTAGS.value: hashtags} - ) - - DB.activities.insert_one( - { - "box": box.value, - "activity": activity.to_dict(), - "type": _to_list(activity.type), - "remote_id": activity.id, - "meta": meta, - } - ) - - -def outbox_is_blocked(actor_id: str) -> bool: - return bool( - DB.activities.find_one( - { - "box": Box.OUTBOX.value, - "type": ap.ActivityType.BLOCK.value, - "activity.object": actor_id, - "meta.undo": False, - } - ) - ) - - -def activity_url(item_id: str) -> str: - return urljoin(BASE_URL, url_for("outbox_detail", item_id=item_id)) - - -def post_to_inbox(activity: ap.BaseActivity) -> None: - # Check for Block activity - actor = activity.get_actor() - if outbox_is_blocked(actor.id): - logger.info( - f"actor {actor!r} is blocked, dropping the received activity {activity!r}" - ) - return - - # If the message is coming from a Pleroma relay, we process it as a possible reply for a stream activity - if ( - actor.has_type(ap.ActivityType.APPLICATION) - and actor.id.endswith("/relay") - and activity.has_type(ap.ActivityType.ANNOUNCE) - and not find_one_activity( - { - **by_object_id(activity.get_object_id()), - **by_type(ap.ActivityType.CREATE), - } - ) - and not DB.replies.find_one(by_remote_id(activity.get_object_id())) - ): - Tasks.process_reply(activity.get_object_id()) - return - - # Hubzilla sends Update with the same ID as the actor, and it poisons the cache - if ( - activity.has_type(ap.ActivityType.UPDATE) - and activity.id == activity.get_object_id() - ): - # Start a task to update the cached actor - Tasks.cache_actor(activity.id) - return - - # Honk forwards activities in a Read, process them as replies - if activity.has_type(ap.ActivityType.READ): - Tasks.process_reply(activity.get_object_id()) - return - - # TODO(tsileo): support ignore from Honk - - # Hubzilla forwards activities in a Create, process them as possible replies - if activity.has_type(ap.ActivityType.CREATE) and server(activity.id) != server( - activity.get_object_id() - ): - Tasks.process_reply(activity.get_object_id()) - return - - if DB.activities.find_one({"box": Box.INBOX.value, "remote_id": activity.id}): - # The activity is already in the inbox - logger.info(f"received duplicate activity {activity!r}, dropping it") - return - - save(Box.INBOX, activity) - logger.info(f"spawning tasks for {activity!r}") - if not activity.has_type([ap.ActivityType.DELETE, ap.ActivityType.UPDATE]): - Tasks.cache_actor(activity.id) - Tasks.process_new_activity(activity.id) - Tasks.finish_post_to_inbox(activity.id) - - -def save_reply(activity: ap.BaseActivity, meta: Dict[str, Any] = {}) -> None: - visibility = ap.get_visibility(activity) - is_public = False - if visibility in [ap.Visibility.PUBLIC, ap.Visibility.UNLISTED]: - is_public = True - - published = activity.published if activity.published else now() - DB.replies.insert_one( - { - "activity": activity.to_dict(), - "type": _to_list(activity.type), - "remote_id": activity.id, - "meta": { - "undo": False, - "deleted": False, - "public": is_public, - "server": urlparse(activity.id).netloc, - "visibility": visibility.name, - "actor_id": activity.get_actor().id, - MetaKey.PUBLISHED.value: published, - **meta, - }, - } - ) - - -def new_context(parent: Optional[ap.BaseActivity] = None) -> str: - """`context` is here to group related activities, it's not meant to be resolved. - We're just following the convention.""" - # Copy the context from the parent if any - if parent and (parent.context or parent.conversation): - if parent.context: - if isinstance(parent.context, str): - return parent.context - elif isinstance(parent.context, dict) and parent.context.get("id"): - return parent.context["id"] - return parent.conversation - - # Generate a new context - ctx_id = binascii.hexlify(os.urandom(12)).decode("utf-8") - return urljoin(BASE_URL, f"/contexts/{ctx_id}") - - -def post_to_outbox(activity: ap.BaseActivity) -> str: - if activity.has_type(ap.CREATE_TYPES): - activity = activity.build_create() - - # Assign create a random ID - obj_id = binascii.hexlify(os.urandom(12)).decode("utf-8") - uri = activity_url(obj_id) - activity._data["id"] = uri - if activity.has_type(ap.ActivityType.CREATE): - activity._data["object"]["id"] = urljoin( - BASE_URL, url_for("outbox_activity", item_id=obj_id) - ) - if "url" not in activity._data["object"]: - activity._data["object"]["url"] = urljoin( - BASE_URL, url_for("note_by_id", note_id=obj_id) - ) - activity.reset_object_cache() - - save(Box.OUTBOX, activity) - Tasks.cache_actor(activity.id) - Tasks.finish_post_to_outbox(activity.id) - return activity.id - - -class MicroblogPubBackend(Backend): - """Implements a Little Boxes backend, backed by MongoDB.""" - - def ap_context(self) -> Any: - return DEFAULT_CTX - - def base_url(self) -> str: - return BASE_URL - - def debug_mode(self) -> bool: - return strtobool(os.getenv("MICROBLOGPUB_DEBUG", "false")) - - def user_agent(self) -> str: - """Setup a custom user agent.""" - return USER_AGENT - - def followers(self) -> List[str]: - q = { - "box": Box.INBOX.value, - "type": ap.ActivityType.FOLLOW.value, - "meta.undo": False, - } - return [doc["activity"]["actor"] for doc in DB.activities.find(q)] - - def followers_as_recipients(self) -> List[str]: - q = { - "box": Box.INBOX.value, - "type": ap.ActivityType.FOLLOW.value, - "meta.undo": False, - } - recipients = [] - for doc in DB.activities.find(q): - recipients.append( - doc["meta"]["actor"]["sharedInbox"] or doc["meta"]["actor"]["inbox"] - ) - - return list(set(recipients)) - - def following(self) -> List[str]: - q = { - "box": Box.OUTBOX.value, - "type": ap.ActivityType.FOLLOW.value, - "meta.undo": False, - } - return [doc["activity"]["object"] for doc in DB.activities.find(q)] - - def parse_collection( - self, payload: Optional[Dict[str, Any]] = None, url: Optional[str] = None - ) -> List[str]: - """Resolve/fetch a `Collection`/`OrderedCollection`.""" - # Resolve internal collections via MongoDB directly - if url == ID + "/followers": - return self.followers() - elif url == ID + "/following": - return self.following() - - return super().parse_collection(payload, url) - - def _fetch_iri(self, iri: str) -> ap.ObjectType: # noqa: C901 - # Shortcut if the instance actor is fetched - if iri == ME["id"]: - return ME - - # Internal collecitons handling - # Followers - if iri == MY_PERSON.followers: - followers = [] - for data in DB.activities.find( - { - "box": Box.INBOX.value, - "type": ap.ActivityType.FOLLOW.value, - "meta.undo": False, - } - ): - followers.append(data["meta"]["actor_id"]) - return {"type": "Collection", "items": followers} - - # Following - if iri == MY_PERSON.following: - following = [] - for data in DB.activities.find( - { - "box": Box.OUTBOX.value, - "type": ap.ActivityType.FOLLOW.value, - "meta.undo": False, - } - ): - following.append(data["meta"]["object_id"]) - return {"type": "Collection", "items": following} - - # TODO(tsileo): handle the liked collection too - - # Check if the activity is owned by this server - if iri.startswith(BASE_URL): - is_a_note = False - if iri.endswith("/activity"): - iri = iri.replace("/activity", "") - is_a_note = True - data = DB.activities.find_one({"box": Box.OUTBOX.value, "remote_id": iri}) - if data and data["meta"]["deleted"]: - raise ActivityGoneError(f"{iri} is gone") - if data and is_a_note: - return data["activity"]["object"] - elif data: - return data["activity"] - else: - # Check if the activity is stored in the inbox - data = DB.activities.find_one({"remote_id": iri}) - if data: - if data["meta"]["deleted"]: - raise ActivityGoneError(f"{iri} is gone") - return data["activity"] - # Check if we're looking for an object wrapped in a Create - obj = DB.activities.find_one({"meta.object_id": iri, "type": "Create"}) - if obj: - if obj["meta"]["deleted"]: - raise ActivityGoneError(f"{iri} is gone") - cached_object = obj["meta"].get("object") - if cached_object: - return cached_object - - embedded_object = obj["activity"]["object"] - if isinstance(embedded_object, dict): - return embedded_object - - # TODO(tsileo): also check the REPLIES box - - # Check if it's cached because it's a follower - # Remove extra info (like the key hash if any) - cleaned_iri = iri.split("#")[0] - actor = DB.activities.find_one( - {"meta.actor_id": cleaned_iri, "meta.actor": {"$exists": True}} - ) - - # "type" check is here to skip old metadata for "old/buggy" followers - if ( - actor - and actor["meta"].get("actor") - and "type" in actor["meta"]["actor"] - ): - return actor["meta"]["actor"] - - # Check if it's cached because it's a following - actor2 = DB.activities.find_one( - { - "meta.object_id": cleaned_iri, - "type": ap.ActivityType.FOLLOW.value, - "meta.undo": False, - } - ) - if ( - actor2 - and actor2["meta"].get("object") - and "type" in actor2["meta"]["object"] - ): - return actor2["meta"]["object"] - - reply = DB.replies.find_one(by_remote_id(iri)) - if reply: - return reply["activity"] - - # Fetch the URL via HTTP - logger.info(f"dereference {iri} via HTTP") - return super().fetch_iri(iri) - - def fetch_iri(self, iri: str, **kwargs: Any) -> ap.ObjectType: - if not kwargs.pop("no_cache", False): - # Fetch the activity by checking the local DB first - data = self._fetch_iri(iri) - logger.debug(f"_fetch_iri({iri!r}) == {data!r}") - else: - # Pass the SIG_AUTH to enable "authenticated fetch" - data = super().fetch_iri(iri, auth=SIG_AUTH) - logger.debug(f"fetch_iri({iri!r}) == {data!r}") - - return data - - -def embed_collection(total_items, first_page_id): - """Helper creating a root OrderedCollection with a link to the first page.""" - return { - "type": ap.ActivityType.ORDERED_COLLECTION.value, - "totalItems": total_items, - "first": f"{first_page_id}?page=first", - "id": first_page_id, - } - - -def simple_build_ordered_collection(col_name, data): - return { - "@context": DEFAULT_CTX, - "id": BASE_URL + "/" + col_name, - "totalItems": len(data), - "type": ap.ActivityType.ORDERED_COLLECTION.value, - "orderedItems": data, - } - - -def build_ordered_collection( - col, q=None, cursor=None, map_func=None, limit=50, col_name=None, first_page=False -): - """Helper for building an OrderedCollection from a MongoDB query (with pagination support).""" - col_name = col_name or col.name - if q is None: - q = {} - - if cursor: - q["_id"] = {"$lt": ObjectId(cursor)} - data = list(col.find(q, limit=limit).sort("_id", -1)) - - if not data: - # Returns an empty page if there's a cursor - if cursor: - return { - "@context": DEFAULT_CTX, - "type": ap.ActivityType.ORDERED_COLLECTION_PAGE.value, - "id": BASE_URL + "/" + col_name + "?cursor=" + cursor, - "partOf": BASE_URL + "/" + col_name, - "totalItems": 0, - "orderedItems": [], - } - return { - "@context": DEFAULT_CTX, - "id": BASE_URL + "/" + col_name, - "totalItems": 0, - "type": ap.ActivityType.ORDERED_COLLECTION.value, - "orderedItems": [], - } - - start_cursor = str(data[0]["_id"]) - next_page_cursor = str(data[-1]["_id"]) - total_items = col.find(q).count() - - data = [_remove_id(doc) for doc in data] - if map_func: - data = [map_func(doc) for doc in data] - - # No cursor, this is the first page and we return an OrderedCollection - if not cursor: - resp = { - "@context": DEFAULT_CTX, - "id": f"{BASE_URL}/{col_name}", - "totalItems": total_items, - "type": ap.ActivityType.ORDERED_COLLECTION.value, - "first": { - "id": f"{BASE_URL}/{col_name}?cursor={start_cursor}", - "orderedItems": data, - "partOf": f"{BASE_URL}/{col_name}", - "totalItems": total_items, - "type": ap.ActivityType.ORDERED_COLLECTION_PAGE.value, - }, - } - - if len(data) == limit: - resp["first"]["next"] = ( - BASE_URL + "/" + col_name + "?cursor=" + next_page_cursor - ) - - if first_page: - return resp["first"] - - return resp - - # If there's a cursor, then we return an OrderedCollectionPage - resp = { - "@context": DEFAULT_CTX, - "type": ap.ActivityType.ORDERED_COLLECTION_PAGE.value, - "id": BASE_URL + "/" + col_name + "?cursor=" + start_cursor, - "totalItems": total_items, - "partOf": BASE_URL + "/" + col_name, - "orderedItems": data, - } - if len(data) == limit: - resp["next"] = BASE_URL + "/" + col_name + "?cursor=" + next_page_cursor - - if first_page: - return resp["first"] - - # XXX(tsileo): implements prev with prev=<first item cursor>? - - return resp - - -def _add_answers_to_question(raw_doc: Dict[str, Any]) -> None: - activity = raw_doc["activity"] - if ( - ap._has_type(activity["type"], ap.ActivityType.CREATE) - and "object" in activity - and ap._has_type(activity["object"]["type"], ap.ActivityType.QUESTION) - ): - for choice in activity["object"].get("oneOf", activity["object"].get("anyOf")): - choice["replies"] = { - "type": ap.ActivityType.COLLECTION.value, - "totalItems": raw_doc["meta"] - .get("question_answers", {}) - .get(_answer_key(choice["name"]), 0), - } - now = datetime.now(timezone.utc) - if format_datetime(now) >= activity["object"]["endTime"]: - activity["object"]["closed"] = activity["object"]["endTime"] - - -def add_extra_collection(raw_doc: Dict[str, Any]) -> Dict[str, Any]: - if not ap._has_type(raw_doc["activity"]["type"], ap.ActivityType.CREATE.value): - return raw_doc - - raw_doc["activity"]["object"]["replies"] = embed_collection( - raw_doc.get("meta", {}).get(MetaKey.COUNT_REPLY.value, 0), - f'{raw_doc["remote_id"]}/replies', - ) - - raw_doc["activity"]["object"]["likes"] = embed_collection( - raw_doc.get("meta", {}).get(MetaKey.COUNT_LIKE.value, 0), - f'{raw_doc["remote_id"]}/likes', - ) - - raw_doc["activity"]["object"]["shares"] = embed_collection( - raw_doc.get("meta", {}).get(MetaKey.COUNT_BOOST.value, 0), - f'{raw_doc["remote_id"]}/shares', - ) - - return raw_doc - - -def remove_context(activity: Dict[str, Any]) -> Dict[str, Any]: - if "@context" in activity: - del activity["@context"] - return activity - - -def activity_from_doc(raw_doc: Dict[str, Any], embed: bool = False) -> Dict[str, Any]: - raw_doc = add_extra_collection(raw_doc) - activity = clean_activity(raw_doc["activity"]) - - # Handle Questions - # TODO(tsileo): what about object embedded by ID/URL? - _add_answers_to_question(raw_doc) - if embed: - return remove_context(activity) - return activity - - -def _cache_actor_icon(actor: ap.BaseActivity) -> None: - if actor.icon: - if isinstance(actor.icon, dict) and "url" in actor.icon: - Tasks.cache_actor_icon(actor.icon["url"], actor.id) - else: - logger.warning(f"failed to parse icon {actor.icon} for {actor!r}") - - -def update_cached_actor(actor: ap.BaseActivity) -> None: - actor_hash = _actor_hash(actor) - update_many_activities( - { - **flag(MetaKey.ACTOR_ID, actor.id), - **flag(MetaKey.ACTOR_HASH, {"$ne": actor_hash}), - }, - upsert( - {MetaKey.ACTOR: actor.to_dict(embed=True), MetaKey.ACTOR_HASH: actor_hash} - ), - ) - update_many_activities( - { - **flag(MetaKey.OBJECT_ACTOR_ID, actor.id), - **flag(MetaKey.OBJECT_ACTOR_HASH, {"$ne": actor_hash}), - }, - upsert( - { - MetaKey.OBJECT_ACTOR: actor.to_dict(embed=True), - MetaKey.OBJECT_ACTOR_HASH: actor_hash, - } - ), - ) - DB.replies.update_many( - { - **flag(MetaKey.ACTOR_ID, actor.id), - **flag(MetaKey.ACTOR_HASH, {"$ne": actor_hash}), - }, - upsert( - {MetaKey.ACTOR: actor.to_dict(embed=True), MetaKey.ACTOR_HASH: actor_hash} - ), - ) - # TODO(tsileo): Also update following (it's in the object) - # DB.activities.update_many( - # {"meta.object_id": actor.id}, {"$set": {"meta.object": actor.to_dict(embed=True)}} - # ) - _cache_actor_icon(actor) - Tasks.cache_emojis(actor) - - -def handle_question_reply(create: ap.Create, question: ap.Question) -> None: - choice = create.get_object().name - - # Ensure it's a valid choice - if choice not in [c["name"] for c in question._data.get("oneOf", question.anyOf)]: - logger.info("invalid choice") - return - - # Hash the choice/answer (so we can use it as a key) - answer_key = _answer_key(choice) - - is_single_choice = bool(question._data.get("oneOf", [])) - dup_query = { - "activity.object.actor": create.get_actor().id, - "meta.answer_to": question.id, - **({} if is_single_choice else {"meta.poll_answer_choice": choice}), - } - - print(f"dup_q={dup_query}") - # Check for duplicate votes - if DB.activities.find_one(dup_query): - logger.info("duplicate response") - return - - # Update the DB - - DB.activities.update_one( - {**by_object_id(question.id), **by_type(ap.ActivityType.CREATE)}, - { - "$inc": { - "meta.question_replies": 1, - f"meta.question_answers.{answer_key}": 1, - } - }, - ) - - DB.activities.update_one( - by_remote_id(create.id), - { - "$set": { - "meta.poll_answer_to": question.id, - "meta.poll_answer_choice": choice, - "meta.stream": False, - "meta.poll_answer": True, - } - }, - ) - - return None - - -def handle_replies(create: ap.Create) -> None: - """Go up to the root reply, store unknown replies in the `threads` DB and set the "meta.thread_root_parent" - key to make it easy to query a whole thread.""" - in_reply_to = create.get_object().get_in_reply_to() - if not in_reply_to: - return - - reply = ap.fetch_remote_activity(in_reply_to) - if reply.has_type(ap.ActivityType.CREATE): - reply = reply.get_object() - # FIXME(tsileo): can be a 403 too, in this case what to do? not error at least - - # Ensure the this is a local reply, of a question, with a direct "to" addressing - if ( - reply.id.startswith(BASE_URL) - and reply.has_type(ap.ActivityType.QUESTION.value) - and _is_local_reply(create) - and not create.is_public() - ): - return handle_question_reply(create, reply) - elif ( - create.id.startswith(BASE_URL) - and reply.has_type(ap.ActivityType.QUESTION.value) - and not create.is_public() - ): - # Keep track of our own votes - DB.activities.update_one( - {"activity.object.id": reply.id, "box": "inbox"}, - { - "$set": { - f"meta.poll_answers_sent.{_answer_key(create.get_object().name)}": True - } - }, - ) - # Mark our reply as a poll answers, to "hide" it from the UI - update_one_activity( - by_remote_id(create.id), - upsert({MetaKey.POLL_ANSWER: True, MetaKey.POLL_ANSWER_TO: reply.id}), - ) - return None - - in_reply_to_data = {MetaKey.IN_REPLY_TO: in_reply_to} - # Update the activity to save some data about the reply - if reply.get_actor().id == create.get_actor().id: - in_reply_to_data.update({MetaKey.IN_REPLY_TO_SELF: True}) - else: - in_reply_to_data.update( - {MetaKey.IN_REPLY_TO_ACTOR: reply.get_actor().to_dict(embed=True)} - ) - update_one_activity(by_remote_id(create.id), upsert(in_reply_to_data)) - - # It's a regular reply, try to increment the reply counter - creply = DB.activities.find_one_and_update( - {**by_object_id(in_reply_to), **by_type(ap.ActivityType.CREATE)}, - inc(MetaKey.COUNT_REPLY, 1), - ) - if not creply: - # Maybe it's the reply of a reply? - DB.replies.find_one_and_update( - by_remote_id(in_reply_to), inc(MetaKey.COUNT_REPLY, 1) - ) - - # Spawn a task to process it (and determine if it needs to be saved) - Tasks.process_reply(create.get_object_id()) - - -def accept_follow(activity: ap.BaseActivity) -> str: - actor_id = activity.get_actor().id - accept = ap.Accept( - actor=ID, - context=new_context(activity), - object={ - "type": "Follow", - "id": activity.id, - "object": activity.get_object_id(), - "actor": actor_id, - }, - to=[actor_id], - published=now(), - ) - update_one_activity( - by_remote_id(activity.id), - upsert({MetaKey.FOLLOW_STATUS: FollowStatus.ACCEPTED.value}), - ) - return post_to_outbox(accept) diff --git a/core/db.py b/core/db.py deleted file mode 100644 index ee12047..0000000 --- a/core/db.py +++ /dev/null @@ -1,38 +0,0 @@ -from enum import Enum -from enum import unique -from typing import Any -from typing import Dict -from typing import Iterable -from typing import Optional - -from config import DB - -_Q = Dict[str, Any] -_D = Dict[str, Any] -_Doc = Optional[_D] - - -@unique -class CollectionName(Enum): - ACTIVITIES = "activities" - REMOTE = "remote" - - -def find_one_activity(q: _Q) -> _Doc: - return DB[CollectionName.ACTIVITIES.value].find_one(q) - - -def find_activities(q: _Q) -> Iterable[_D]: - return DB[CollectionName.ACTIVITIES.value].find(q) - - -def update_one_activity(q: _Q, update: _Q) -> bool: - return DB[CollectionName.ACTIVITIES.value].update_one(q, update).matched_count == 1 - - -def update_many_activities(q: _Q, update: _Q) -> None: - DB[CollectionName.ACTIVITIES.value].update_many(q, update) - - -def update_one_remote(filter_: _Q, update: _Q, upsert: bool = False) -> None: - DB[CollectionName.REMOTE.value].update_one(filter_, update, upsert) diff --git a/core/feed.py b/core/feed.py deleted file mode 100644 index 93b6f65..0000000 --- a/core/feed.py +++ /dev/null @@ -1,125 +0,0 @@ -from typing import Any -from typing import Dict -from typing import Optional - -from feedgen.feed import FeedGenerator -from html2text import html2text -from little_boxes import activitypub as ap - -from config import ID -from config import ME -from config import USERNAME -from core.db import DB -from core.meta import Box - - -def gen_feed(): - fg = FeedGenerator() - fg.id(f"{ID}") - fg.title(f"{USERNAME} notes") - fg.author({"name": USERNAME}) - fg.link(href=ID, rel="alternate") - fg.description(f"{USERNAME} notes") - fg.logo(ME.get("icon", {}).get("url")) - fg.language("en") - for item in DB.activities.find( - { - "box": Box.OUTBOX.value, - "type": "Create", - "meta.deleted": False, - "meta.public": True, - }, - limit=10, - ).sort("_id", -1): - fe = fg.add_entry() - fe.id(item["activity"]["object"].get("url")) - fe.link(href=item["activity"]["object"].get("url")) - fe.title(item["activity"]["object"]["content"]) - fe.description(item["activity"]["object"]["content"]) - return fg - - -def json_feed(path: str) -> Dict[str, Any]: - """JSON Feed (https://jsonfeed.org/) document.""" - data = [] - for item in DB.activities.find( - { - "box": Box.OUTBOX.value, - "type": "Create", - "meta.deleted": False, - "meta.public": True, - }, - limit=10, - ).sort("_id", -1): - data.append( - { - "id": item["activity"]["id"], - "url": item["activity"]["object"].get("url"), - "content_html": item["activity"]["object"]["content"], - "content_text": html2text(item["activity"]["object"]["content"]), - "date_published": item["activity"]["object"].get("published"), - } - ) - return { - "version": "https://jsonfeed.org/version/1", - "user_comment": ( - "This is a microblog feed. You can add this to your feed reader using the following URL: " - + ID - + path - ), - "title": USERNAME, - "home_page_url": ID, - "feed_url": ID + path, - "author": { - "name": USERNAME, - "url": ID, - "avatar": ME.get("icon", {}).get("url"), - }, - "items": data, - } - - -def build_inbox_json_feed( - path: str, request_cursor: Optional[str] = None -) -> Dict[str, Any]: - """Build a JSON feed from the inbox activities.""" - data = [] - cursor = None - - q: Dict[str, Any] = { - "type": "Create", - "meta.deleted": False, - "box": Box.INBOX.value, - } - if request_cursor: - q["_id"] = {"$lt": request_cursor} - - for item in DB.activities.find(q, limit=50).sort("_id", -1): - actor = ap.get_backend().fetch_iri(item["activity"]["actor"]) - data.append( - { - "id": item["activity"]["id"], - "url": item["activity"]["object"].get("url"), - "content_html": item["activity"]["object"]["content"], - "content_text": html2text(item["activity"]["object"]["content"]), - "date_published": item["activity"]["object"].get("published"), - "author": { - "name": actor.get("name", actor.get("preferredUsername")), - "url": actor.get("url"), - "avatar": actor.get("icon", {}).get("url"), - }, - } - ) - cursor = str(item["_id"]) - - resp = { - "version": "https://jsonfeed.org/version/1", - "title": f"{USERNAME}'s stream", - "home_page_url": ID, - "feed_url": ID + path, - "items": data, - } - if cursor and len(data) == 50: - resp["next_url"] = ID + path + "?cursor=" + cursor - - return resp diff --git a/core/gc.py b/core/gc.py deleted file mode 100644 index aeee1c4..0000000 --- a/core/gc.py +++ /dev/null @@ -1,245 +0,0 @@ -import logging -from datetime import datetime -from datetime import timedelta -from time import perf_counter -from typing import Any -from typing import Dict -from typing import List - -from little_boxes import activitypub as ap -from little_boxes.errors import ActivityGoneError -from little_boxes.errors import RemoteServerUnavailableError - -from config import DAYS_TO_KEEP -from config import ID -from config import ME -from config import MEDIA_CACHE -from core import activitypub -from core.meta import Box -from core.meta import MetaKey -from core.meta import _meta -from core.meta import by_type -from core.meta import in_inbox -from utils.migrations import DB - -back = activitypub.MicroblogPubBackend() -ap.use_backend(back) - -MY_PERSON = ap.Person(**ME) - -logger = logging.getLogger(__name__) - - -def threads_of_interest() -> List[str]: - out = set() - - # Fetch all the threads we've participed in - for data in DB.activities.find( - { - "meta.thread_root_parent": {"$exists": True}, - "box": Box.OUTBOX.value, - "type": ap.ActivityType.CREATE.value, - } - ): - out.add(data["meta"]["thread_root_parent"]) - - # Fetch all threads related to bookmarked activities - for data in DB.activities.find({"meta.bookmarked": True}): - # Keep the replies - out.add(data["meta"]["object_id"]) - # And the whole thread if any - if "thread_root_parent" in data["meta"]: - out.add(data["meta"]["thread_root_parent"]) - - return list(out) - - -def _keep(data: Dict[str, Any]) -> None: - DB.activities.update_one({"_id": data["_id"]}, {"$set": {"meta.gc_keep": True}}) - - -def perform() -> None: # noqa: C901 - start = perf_counter() - d = (datetime.utcnow() - timedelta(days=DAYS_TO_KEEP)).strftime("%Y-%m-%d") - toi = threads_of_interest() - logger.info(f"thread_of_interest={toi!r}") - - delete_deleted = DB.activities.delete_many( - { - **in_inbox(), - **by_type(ap.ActivityType.DELETE), - _meta(MetaKey.PUBLISHED): {"$lt": d}, - } - ).deleted_count - logger.info(f"{delete_deleted} Delete deleted") - - create_deleted = 0 - create_count = 0 - # Go over the old Create activities - for data in DB.activities.find( - { - "box": Box.INBOX.value, - "type": ap.ActivityType.CREATE.value, - _meta(MetaKey.PUBLISHED): {"$lt": d}, - "meta.gc_keep": {"$exists": False}, - } - ).limit(500): - try: - logger.info(f"data={data!r}") - create_count += 1 - remote_id = data["remote_id"] - meta = data["meta"] - - # This activity has been bookmarked, keep it - if meta.get("bookmarked"): - _keep(data) - continue - - obj = None - if not meta.get("deleted"): - try: - activity = ap.parse_activity(data["activity"]) - logger.info(f"activity={activity!r}") - obj = activity.get_object() - except (RemoteServerUnavailableError, ActivityGoneError): - logger.exception( - f"failed to load {remote_id}, this activity will be deleted" - ) - - # This activity mentions the server actor, keep it - if obj and obj.has_mention(ID): - _keep(data) - continue - - # This activity is a direct reply of one the server actor activity, keep it - if obj: - in_reply_to = obj.get_in_reply_to() - if in_reply_to and in_reply_to.startswith(ID): - _keep(data) - continue - - # This activity is part of a thread we want to keep, keep it - if obj and in_reply_to and meta.get("thread_root_parent"): - thread_root_parent = meta["thread_root_parent"] - if thread_root_parent.startswith(ID) or thread_root_parent in toi: - _keep(data) - continue - - # This activity was boosted or liked, keep it - if meta.get("boosted") or meta.get("liked"): - _keep(data) - continue - - # TODO(tsileo): remove after tests - if meta.get("keep"): - logger.warning( - f"{activity!r} would not have been deleted, skipping for now" - ) - _keep(data) - continue - - # Delete the cached attachment - for grid_item in MEDIA_CACHE.fs.find({"remote_id": remote_id}): - MEDIA_CACHE.fs.delete(grid_item._id) - - # Delete the activity - DB.activities.delete_one({"_id": data["_id"]}) - create_deleted += 1 - except Exception: - logger.exception(f"failed to process {data!r}") - - for data in DB.replies.find( - {_meta(MetaKey.PUBLISHED): {"$lt": d}, "meta.gc_keep": {"$exists": False}} - ).limit(500): - try: - logger.info(f"data={data!r}") - create_count += 1 - remote_id = data["remote_id"] - meta = data["meta"] - - # This activity has been bookmarked, keep it - if meta.get("bookmarked"): - _keep(data) - continue - - obj = ap.parse_activity(data["activity"]) - # This activity is a direct reply of one the server actor activity, keep it - in_reply_to = obj.get_in_reply_to() - - # This activity is part of a thread we want to keep, keep it - if in_reply_to and meta.get("thread_root_parent"): - thread_root_parent = meta["thread_root_parent"] - if thread_root_parent.startswith(ID) or thread_root_parent in toi: - _keep(data) - continue - - # This activity was boosted or liked, keep it - if meta.get("boosted") or meta.get("liked"): - _keep(data) - continue - - # Delete the cached attachment - for grid_item in MEDIA_CACHE.fs.find({"remote_id": remote_id}): - MEDIA_CACHE.fs.delete(grid_item._id) - - # Delete the activity - DB.replies.delete_one({"_id": data["_id"]}) - create_deleted += 1 - except Exception: - logger.exception(f"failed to process {data!r}") - - after_gc_create = perf_counter() - time_to_gc_create = after_gc_create - start - logger.info( - f"{time_to_gc_create:.2f} seconds to analyze {create_count} Create, {create_deleted} deleted" - ) - - announce_count = 0 - announce_deleted = 0 - # Go over the old Create activities - for data in DB.activities.find( - { - "box": Box.INBOX.value, - "type": ap.ActivityType.ANNOUNCE.value, - _meta(MetaKey.PUBLISHED): {"$lt": d}, - "meta.gc_keep": {"$exists": False}, - } - ).limit(500): - try: - announce_count += 1 - remote_id = data["remote_id"] - meta = data["meta"] - activity = ap.parse_activity(data["activity"]) - logger.info(f"activity={activity!r}") - - # This activity has been bookmarked, keep it - if meta.get("bookmarked"): - _keep(data) - continue - - object_id = activity.get_object_id() - - # This announce is for a local activity (i.e. from the outbox), keep it - if object_id.startswith(ID): - _keep(data) - continue - - for grid_item in MEDIA_CACHE.fs.find({"remote_id": remote_id}): - MEDIA_CACHE.fs.delete(grid_item._id) - - # TODO(tsileo): here for legacy reason, this needs to be removed at some point - for grid_item in MEDIA_CACHE.fs.find({"remote_id": object_id}): - MEDIA_CACHE.fs.delete(grid_item._id) - - # Delete the activity - DB.activities.delete_one({"_id": data["_id"]}) - - announce_deleted += 1 - except Exception: - logger.exception(f"failed to process {data!r}") - - after_gc_announce = perf_counter() - time_to_gc_announce = after_gc_announce - after_gc_create - logger.info( - f"{time_to_gc_announce:.2f} seconds to analyze {announce_count} Announce, {announce_deleted} deleted" - ) diff --git a/core/inbox.py b/core/inbox.py deleted file mode 100644 index 7aa2127..0000000 --- a/core/inbox.py +++ /dev/null @@ -1,263 +0,0 @@ -import logging -from functools import singledispatch -from typing import Any -from typing import Dict - -from little_boxes import activitypub as ap -from little_boxes.errors import NotAnActivityError - -import config -from core.activitypub import _answer_key -from core.activitypub import accept_follow -from core.activitypub import handle_replies -from core.activitypub import update_cached_actor -from core.db import DB -from core.db import update_one_activity -from core.meta import FollowStatus -from core.meta import MetaKey -from core.meta import by_object_id -from core.meta import by_remote_id -from core.meta import by_type -from core.meta import in_inbox -from core.meta import inc -from core.meta import upsert -from core.tasks import Tasks - -_logger = logging.getLogger(__name__) - -_NewMeta = Dict[str, Any] - - -@singledispatch -def process_inbox(activity: ap.BaseActivity, new_meta: _NewMeta) -> None: - _logger.warning(f"skipping {activity!r}") - return None - - -@process_inbox.register -def _delete_process_inbox(delete: ap.Delete, new_meta: _NewMeta) -> None: - _logger.info(f"process_inbox activity={delete!r}") - obj_id = delete.get_object_id() - _logger.debug(f"delete object={obj_id}") - try: - # FIXME(tsileo): call the DB here instead? like for the oubox - obj = ap.fetch_remote_activity(obj_id) - _logger.info(f"inbox_delete handle_replies obj={obj!r}") - in_reply_to = obj.get_in_reply_to() if obj.inReplyTo else None - if obj.has_type(ap.CREATE_TYPES): - post_query = {**by_object_id(obj_id), **by_type(ap.ActivityType.CREATE)} - in_reply_to = ap._get_id( - DB.activities.find_one(post_query)["activity"]["object"].get( - "inReplyTo" - ) - ) - if in_reply_to: - DB.activities.update_one( - {**by_object_id(in_reply_to), **by_type(ap.ActivityType.CREATE)}, - inc(MetaKey.COUNT_REPLY, -1), - ) - DB.replies.update_one( - by_remote_id(in_reply_to), inc(MetaKey.COUNT_REPLY, -1) - ) - except Exception: - _logger.exception(f"failed to handle delete replies for {obj_id}") - - update_one_activity( - {**by_object_id(obj_id), **by_type(ap.ActivityType.CREATE)}, - upsert({MetaKey.DELETED: True}), - ) - - # Foce undo other related activities - DB.activities.update(by_object_id(obj_id), upsert({MetaKey.UNDO: True})) - - -@process_inbox.register -def _update_process_inbox(update: ap.Update, new_meta: _NewMeta) -> None: - _logger.info(f"process_inbox activity={update!r}") - obj = update.get_object() - if obj.ACTIVITY_TYPE == ap.ActivityType.NOTE: - update_one_activity( - {"activity.object.id": obj.id}, {"$set": {"activity.object": obj.to_dict()}} - ) - elif obj.has_type(ap.ActivityType.QUESTION): - choices = obj._data.get("oneOf", obj.anyOf) - total_replies = 0 - _set = {} - for choice in choices: - answer_key = _answer_key(choice["name"]) - cnt = choice["replies"]["totalItems"] - total_replies += cnt - _set[f"meta.question_answers.{answer_key}"] = cnt - - _set["meta.question_replies"] = total_replies - - update_one_activity({**in_inbox(), **by_object_id(obj.id)}, {"$set": _set}) - # Also update the cached copies of the question (like Announce and Like) - DB.activities.update_many( - by_object_id(obj.id), upsert({MetaKey.OBJECT: obj.to_dict()}) - ) - - elif obj.has_type(ap.ACTOR_TYPES): - actor = ap.fetch_remote_activity(obj.id, no_cache=True) - update_cached_actor(actor) - - else: - raise ValueError(f"don't know how to update {obj!r}") - - -@process_inbox.register -def _create_process_inbox(create: ap.Create, new_meta: _NewMeta) -> None: - _logger.info(f"process_inbox activity={create!r}") - # If it's a `Quesiion`, trigger an async task for updating it later (by fetching the remote and updating the - # local copy) - obj = create.get_object() - if obj.has_type(ap.ActivityType.QUESTION): - Tasks.fetch_remote_question(obj) - - Tasks.cache_emojis(obj) - - handle_replies(create) - - -@process_inbox.register -def _announce_process_inbox(announce: ap.Announce, new_meta: _NewMeta) -> None: - _logger.info(f"process_inbox activity={announce!r}") - # TODO(tsileo): actually drop it without storing it and better logging, also move the check somewhere else - # or remove it? - try: - obj = announce.get_object() - except NotAnActivityError: - _logger.exception( - f'received an Annouce referencing an OStatus notice ({announce._data["object"]}), dropping the message' - ) - return - - if obj.has_type(ap.ActivityType.QUESTION): - Tasks.fetch_remote_question(obj) - - # Cache the announced object - Tasks.cache_object(announce.id) - - # Process the reply of the announced object if any - in_reply_to = obj.get_in_reply_to() - if in_reply_to: - reply = ap.fetch_remote_activity(in_reply_to) - if reply.has_type(ap.ActivityType.CREATE): - reply = reply.get_object() - - in_reply_to_data = {MetaKey.IN_REPLY_TO: in_reply_to} - # Update the activity to save some data about the reply - if reply.get_actor().id == obj.get_actor().id: - in_reply_to_data.update({MetaKey.IN_REPLY_TO_SELF: True}) - else: - in_reply_to_data.update( - {MetaKey.IN_REPLY_TO_ACTOR: reply.get_actor().to_dict(embed=True)} - ) - update_one_activity(by_remote_id(announce.id), upsert(in_reply_to_data)) - # Spawn a task to process it (and determine if it needs to be saved) - Tasks.process_reply(reply.id) - - update_one_activity( - {**by_type(ap.ActivityType.CREATE), **by_object_id(obj.id)}, - inc(MetaKey.COUNT_BOOST, 1), - ) - - -@process_inbox.register -def _like_process_inbox(like: ap.Like, new_meta: _NewMeta) -> None: - _logger.info(f"process_inbox activity={like!r}") - obj = like.get_object() - # Update the meta counter if the object is published by the server - update_one_activity( - {**by_type(ap.ActivityType.CREATE), **by_object_id(obj.id)}, - inc(MetaKey.COUNT_LIKE, 1), - ) - - -@process_inbox.register -def _emoji_reaction_process_inbox( - emoji_reaction: ap.EmojiReaction, new_meta: _NewMeta -) -> None: - _logger.info(f"process_inbox activity={emoji_reaction!r}") - obj = emoji_reaction.get_object() - # Try to update an existing emoji reaction counter entry for the activity emoji - if not update_one_activity( - { - **by_type(ap.ActivityType.CREATE), - **by_object_id(obj.id), - "meta.emoji_reactions.emoji": emoji_reaction.content, - }, - {"$inc": {"meta.emoji_reactions.$.count": 1}}, - ): - # Bootstrap the current emoji counter - update_one_activity( - {**by_type(ap.ActivityType.CREATE), **by_object_id(obj.id)}, - { - "$push": { - "meta.emoji_reactions": { - "emoji": emoji_reaction.content, - "count": 1, - } - } - }, - ) - - -@process_inbox.register -def _follow_process_inbox(activity: ap.Follow, new_meta: _NewMeta) -> None: - _logger.info(f"process_inbox activity={activity!r}") - # Reply to a Follow with an Accept if we're not manully approving them - if not config.MANUALLY_APPROVES_FOLLOWERS: - accept_follow(activity) - else: - update_one_activity( - by_remote_id(activity.id), - upsert({MetaKey.FOLLOW_STATUS: FollowStatus.WAITING.value}), - ) - - -def _update_follow_status(follow_id: str, status: FollowStatus) -> None: - _logger.info(f"{follow_id} is {status}") - update_one_activity( - by_remote_id(follow_id), upsert({MetaKey.FOLLOW_STATUS: status.value}) - ) - - -@process_inbox.register -def _accept_process_inbox(activity: ap.Accept, new_meta: _NewMeta) -> None: - _logger.info(f"process_inbox activity={activity!r}") - # Set a flag on the follow - follow = activity.get_object_id() - _update_follow_status(follow, FollowStatus.ACCEPTED) - - -@process_inbox.register -def _reject_process_inbox(activity: ap.Reject, new_meta: _NewMeta) -> None: - _logger.info(f"process_inbox activity={activity!r}") - follow = activity.get_object_id() - _update_follow_status(follow, FollowStatus.REJECTED) - - -@process_inbox.register -def _undo_process_inbox(activity: ap.Undo, new_meta: _NewMeta) -> None: - _logger.info(f"process_inbox activity={activity!r}") - # Fetch the object that's been undo'ed - obj = activity.get_object() - - # Set the undo flag on the mentionned activity - update_one_activity(by_remote_id(obj.id), upsert({MetaKey.UNDO: True})) - - # Handle cached counters - if obj.has_type(ap.ActivityType.LIKE): - # Update the meta counter if the object is published by the server - update_one_activity( - {**by_object_id(obj.get_object_id()), **by_type(ap.ActivityType.CREATE)}, - inc(MetaKey.COUNT_LIKE, -1), - ) - elif obj.has_type(ap.ActivityType.ANNOUNCE): - announced = obj.get_object() - # Update the meta counter if the object is published by the server - update_one_activity( - {**by_type(ap.ActivityType.CREATE), **by_object_id(announced.id)}, - inc(MetaKey.COUNT_BOOST, -1), - ) diff --git a/core/indexes.py b/core/indexes.py deleted file mode 100644 index 9df3485..0000000 --- a/core/indexes.py +++ /dev/null @@ -1,97 +0,0 @@ -import pymongo - -from config import DB -from config import MEDIA_CACHE -from core.meta import MetaKey -from core.meta import _meta - - -def create_indexes(): - if "trash" not in DB.collection_names(): - DB.create_collection("trash", capped=True, size=50 << 20) # 50 MB - - if "activities" in DB.collection_names(): - DB.command("compact", "activities") - - try: - MEDIA_CACHE.fs._GridFS__database.command("compact", "fs.files") - MEDIA_CACHE.fs._GridFS__database.command("compact", "fs.chunks") - except Exception: - pass - - DB.activities.create_index([(_meta(MetaKey.NOTIFICATION), pymongo.ASCENDING)]) - DB.activities.create_index( - [(_meta(MetaKey.NOTIFICATION_UNREAD), pymongo.ASCENDING)] - ) - DB.activities.create_index([("remote_id", pymongo.ASCENDING)]) - DB.activities.create_index([("meta.actor_id", pymongo.ASCENDING)]) - DB.activities.create_index([("meta.object_id", pymongo.ASCENDING)]) - DB.activities.create_index([("meta.mentions", pymongo.ASCENDING)]) - DB.activities.create_index([("meta.hashtags", pymongo.ASCENDING)]) - DB.activities.create_index([("meta.thread_root_parent", pymongo.ASCENDING)]) - DB.activities.create_index( - [ - ("meta.thread_root_parent", pymongo.ASCENDING), - ("meta.deleted", pymongo.ASCENDING), - ] - ) - DB.activities.create_index( - [("activity.object.id", pymongo.ASCENDING), ("meta.deleted", pymongo.ASCENDING)] - ) - DB.activities.create_index( - [("meta.object_id", pymongo.ASCENDING), ("type", pymongo.ASCENDING)] - ) - - # Index for the block query - DB.activities.create_index( - [ - ("box", pymongo.ASCENDING), - ("type", pymongo.ASCENDING), - ("meta.undo", pymongo.ASCENDING), - ] - ) - - # Index for count queries - DB.activities.create_index( - [ - ("box", pymongo.ASCENDING), - ("type", pymongo.ASCENDING), - ("meta.undo", pymongo.ASCENDING), - ("meta.deleted", pymongo.ASCENDING), - ] - ) - - DB.activities.create_index([("box", pymongo.ASCENDING)]) - - # Outbox query - DB.activities.create_index( - [ - ("box", pymongo.ASCENDING), - ("type", pymongo.ASCENDING), - ("meta.undo", pymongo.ASCENDING), - ("meta.deleted", pymongo.ASCENDING), - ("meta.public", pymongo.ASCENDING), - ] - ) - - DB.activities.create_index( - [ - ("type", pymongo.ASCENDING), - ("activity.object.type", pymongo.ASCENDING), - ("activity.object.inReplyTo", pymongo.ASCENDING), - ("meta.deleted", pymongo.ASCENDING), - ] - ) - - # For the is_actor_icon_cached query - MEDIA_CACHE.fs._GridFS__files.create_index([("url", 1), ("kind", 1)]) - - # Replies index - DB.replies.create_index([("remote_id", pymongo.ASCENDING)]) - DB.replies.create_index([("meta.thread_root_parent", pymongo.ASCENDING)]) - DB.replies.create_index( - [ - ("meta.thread_root_parent", pymongo.ASCENDING), - ("meta.deleted", pymongo.ASCENDING), - ] - ) diff --git a/core/jsonld.py b/core/jsonld.py deleted file mode 100644 index d9219ed..0000000 --- a/core/jsonld.py +++ /dev/null @@ -1,18 +0,0 @@ -MICROBLOGPUB = { - "@context": [ - "https://www.w3.org/ns/activitystreams", - "https://w3id.org/security/v1", - { - "Hashtag": "as:Hashtag", - "PropertyValue": "schema:PropertyValue", - "manuallyApprovesFollowers": "as:manuallyApprovesFollowers", - "ostatus": "http://ostatus.org#", - "schema": "http://schema.org", - "sensitive": "as:sensitive", - "toot": "http://joinmastodon.org/ns#", - "totalItems": "as:totalItems", - "value": "schema:value", - "Emoji": "toot:Emoji", - }, - ] -} diff --git a/core/meta.py b/core/meta.py deleted file mode 100644 index 1190cfe..0000000 --- a/core/meta.py +++ /dev/null @@ -1,169 +0,0 @@ -from datetime import datetime -from enum import Enum -from enum import unique -from typing import Any -from typing import Dict -from typing import List -from typing import Union - -from little_boxes import activitypub as ap - -_SubQuery = Dict[str, Any] - - -@unique -class Box(Enum): - INBOX = "inbox" - OUTBOX = "outbox" - REPLIES = "replies" - - -@unique -class FollowStatus(Enum): - WAITING = "waiting" - ACCEPTED = "accepted" - REJECTED = "rejected" - - -@unique -class MetaKey(Enum): - NOTIFICATION = "notification" - NOTIFICATION_UNREAD = "notification_unread" - NOTIFICATION_FOLLOWS_BACK = "notification_follows_back" - POLL_ANSWER = "poll_answer" - POLL_ANSWER_TO = "poll_answer_to" - STREAM = "stream" - ACTOR_ID = "actor_id" - ACTOR = "actor" - ACTOR_HASH = "actor_hash" - UNDO = "undo" - PUBLISHED = "published" - GC_KEEP = "gc_keep" - OBJECT = "object" - OBJECT_ID = "object_id" - OBJECT_ACTOR = "object_actor" - OBJECT_ACTOR_ID = "object_actor_id" - OBJECT_ACTOR_HASH = "object_actor_hash" - PUBLIC = "public" - - PINNED = "pinned" - HASHTAGS = "hashtags" - MENTIONS = "mentions" - - FOLLOW_STATUS = "follow_status" - - THREAD_ROOT_PARENT = "thread_root_parent" - - IN_REPLY_TO = "in_reply_to" - IN_REPLY_TO_SELF = "in_reply_to_self" - IN_REPLY_TO_ACTOR = "in_reply_to_actor" - - SERVER = "server" - VISIBILITY = "visibility" - OBJECT_VISIBILITY = "object_visibility" - - DELETED = "deleted" - BOOSTED = "boosted" - LIKED = "liked" - - COUNT_LIKE = "count_like" - COUNT_BOOST = "count_boost" - COUNT_REPLY = "count_reply" - - EMOJI_REACTIONS = "emoji_reactions" - - -def _meta(mk: MetaKey) -> str: - return f"meta.{mk.value}" - - -def flag(mk: MetaKey, val: Any) -> _SubQuery: - return {_meta(mk): val} - - -def by_remote_id(remote_id: str) -> _SubQuery: - return {"remote_id": remote_id} - - -def in_inbox() -> _SubQuery: - return {"box": Box.INBOX.value} - - -def in_outbox() -> _SubQuery: - return {"box": Box.OUTBOX.value} - - -def by_type(type_: Union[ap.ActivityType, List[ap.ActivityType]]) -> _SubQuery: - if isinstance(type_, list): - return {"type": {"$in": [t.value for t in type_]}} - - return {"type": type_.value} - - -def follow_request_accepted() -> _SubQuery: - return flag(MetaKey.FOLLOW_STATUS, FollowStatus.ACCEPTED.value) - - -def not_poll_answer() -> _SubQuery: - return flag(MetaKey.POLL_ANSWER, False) - - -def not_in_reply_to() -> _SubQuery: - return {"activity.object.inReplyTo": None} - - -def not_undo() -> _SubQuery: - return flag(MetaKey.UNDO, False) - - -def not_deleted() -> _SubQuery: - return flag(MetaKey.DELETED, False) - - -def pinned() -> _SubQuery: - return flag(MetaKey.PINNED, True) - - -def by_actor(actor: ap.BaseActivity) -> _SubQuery: - return flag(MetaKey.ACTOR_ID, actor.id) - - -def by_actor_id(actor_id: str) -> _SubQuery: - return flag(MetaKey.ACTOR_ID, actor_id) - - -def by_object_id(object_id: str) -> _SubQuery: - return flag(MetaKey.OBJECT_ID, object_id) - - -def is_public() -> _SubQuery: - return flag(MetaKey.PUBLIC, True) - - -def by_visibility(vis: ap.Visibility) -> _SubQuery: - return flag(MetaKey.VISIBILITY, vis.name) - - -def by_object_visibility(vis: ap.Visibility) -> _SubQuery: - return flag(MetaKey.OBJECT_VISIBILITY, vis.name) - - -def by_hashtag(ht: str) -> _SubQuery: - return flag(MetaKey.HASHTAGS, ht) - - -def inc(mk: MetaKey, val: int) -> _SubQuery: - return {"$inc": flag(mk, val)} - - -def upsert(data: Dict[MetaKey, Any]) -> _SubQuery: - sq: Dict[str, Any] = {} - - for mk, val in data.items(): - sq[_meta(mk)] = val - - return {"$set": sq} - - -def published_after(dt: datetime) -> _SubQuery: - return flag(MetaKey.PUBLISHED, {"$gt": ap.format_datetime(dt)}) diff --git a/core/migrations.py b/core/migrations.py deleted file mode 100644 index 3f57c46..0000000 --- a/core/migrations.py +++ /dev/null @@ -1,378 +0,0 @@ -"""Migrations that will be run automatically at startup.""" -from typing import Any -from typing import Dict -from urllib.parse import urlparse - -from little_boxes import activitypub as ap - -from config import ID -from core import activitypub -from core.db import DB -from core.db import find_activities -from core.db import update_one_activity -from core.meta import FollowStatus -from core.meta import MetaKey -from core.meta import _meta -from core.meta import by_actor_id -from core.meta import by_object_id -from core.meta import by_remote_id -from core.meta import by_type -from core.meta import in_inbox -from core.meta import in_outbox -from core.meta import not_deleted -from core.meta import not_undo -from core.meta import upsert -from utils.migrations import Migration -from utils.migrations import logger -from utils.migrations import perform # noqa: just here for export - -back = activitypub.MicroblogPubBackend() -ap.use_backend(back) - - -class _1_MetaMigration(Migration): - """Add new metadata to simplify querying.""" - - def __guess_visibility(self, data: Dict[str, Any]) -> ap.Visibility: - to = data.get("to", []) - cc = data.get("cc", []) - if ap.AS_PUBLIC in to: - return ap.Visibility.PUBLIC - elif ap.AS_PUBLIC in cc: - return ap.Visibility.UNLISTED - else: - # Uses a bit of heuristic here, it's too expensive to fetch the actor, so assume the followers - # collection has "/collection" in it (which is true for most software), and at worst, we will - # classify it as "DIRECT" which behave the same as "FOLLOWERS_ONLY" (i.e. no Announce) - followers_only = False - for item in to: - if "/followers" in item: - followers_only = True - break - if not followers_only: - for item in cc: - if "/followers" in item: - followers_only = True - break - if followers_only: - return ap.Visibility.FOLLOWERS_ONLY - - return ap.Visibility.DIRECT - - def migrate(self) -> None: # noqa: C901 # too complex - for data in DB.activities.find(): - logger.info(f"before={data}") - obj = data["activity"].get("object") - set_meta: Dict[str, Any] = {} - - # Set `meta.object_id` (str) - if not data["meta"].get("object_id"): - set_meta["meta.object_id"] = None - if obj: - if isinstance(obj, str): - set_meta["meta.object_id"] = data["activity"]["object"] - elif isinstance(obj, dict): - obj_id = obj.get("id") - if obj_id: - set_meta["meta.object_id"] = obj_id - - # Set `meta.object_visibility` (str) - if not data["meta"].get("object_visibility"): - set_meta["meta.object_visibility"] = None - object_id = data["meta"].get("object_id") or set_meta.get( - "meta.object_id" - ) - if object_id: - obj = data["meta"].get("object") or data["activity"].get("object") - if isinstance(obj, dict): - set_meta["meta.object_visibility"] = self.__guess_visibility( - obj - ).name - - # Set `meta.actor_id` (str) - if not data["meta"].get("actor_id"): - set_meta["meta.actor_id"] = None - actor = data["activity"].get("actor") - if actor: - if isinstance(actor, str): - set_meta["meta.actor_id"] = data["activity"]["actor"] - elif isinstance(actor, dict): - actor_id = actor.get("id") - if actor_id: - set_meta["meta.actor_id"] = actor_id - - # Set `meta.poll_answer` (bool) - if not data["meta"].get("poll_answer"): - set_meta["meta.poll_answer"] = False - if obj: - if isinstance(obj, dict): - if ( - obj.get("name") - and not obj.get("content") - and obj.get("inReplyTo") - ): - set_meta["meta.poll_answer"] = True - - # Set `meta.visibility` (str) - if not data["meta"].get("visibility"): - set_meta["meta.visibility"] = self.__guess_visibility( - data["activity"] - ).name - - if not data["meta"].get("server"): - set_meta["meta.server"] = urlparse(data["remote_id"]).netloc - - logger.info(f"meta={set_meta}\n") - if set_meta: - DB.activities.update_one({"_id": data["_id"]}, {"$set": set_meta}) - - -class _2_FollowMigration(Migration): - """Add new metadata to update the cached actor in Follow activities.""" - - def migrate(self) -> None: - actor_cache: Dict[str, Dict[str, Any]] = {} - for data in DB.activities.find({"type": ap.ActivityType.FOLLOW.value}): - try: - if data["meta"]["actor_id"] == ID: - # It's a "following" - actor = actor_cache.get(data["meta"]["object_id"]) - if not actor: - actor = ap.parse_activity( - ap.get_backend().fetch_iri( - data["meta"]["object_id"], no_cache=True - ) - ).to_dict(embed=True) - if not actor: - raise ValueError(f"missing actor {data!r}") - actor_cache[actor["id"]] = actor - DB.activities.update_one( - {"_id": data["_id"]}, {"$set": {"meta.object": actor}} - ) - - else: - # It's a "followers" - actor = actor_cache.get(data["meta"]["actor_id"]) - if not actor: - actor = ap.parse_activity( - ap.get_backend().fetch_iri( - data["meta"]["actor_id"], no_cache=True - ) - ).to_dict(embed=True) - if not actor: - raise ValueError(f"missing actor {data!r}") - actor_cache[actor["id"]] = actor - DB.activities.update_one( - {"_id": data["_id"]}, {"$set": {"meta.actor": actor}} - ) - except Exception: - logger.exception(f"failed to process actor {data!r}") - - -class _20190830_MetaPublishedMigration(Migration): - """Add the `meta.published` field to old activities.""" - - def migrate(self) -> None: - for data in find_activities({"meta.published": {"$exists": False}}): - try: - raw = data["activity"] - # If the activity has its own `published` field, we'll use it - if "published" in raw: - published = raw["published"] - else: - # Otherwise, we take the date we received the activity as the published time - published = ap.format_datetime(data["_id"].generation_time) - - # Set the field in the DB - update_one_activity( - {"_id": data["_id"]}, - {"$set": {_meta(MetaKey.PUBLISHED): published}}, - ) - - except Exception: - logger.exception(f"failed to process activity {data!r}") - - -class _20190830_FollowFollowBackMigration(Migration): - """Add the new meta flags for tracking accepted/rejected status and following/follows back info.""" - - def migrate(self) -> None: - for data in find_activities({**by_type(ap.ActivityType.ACCEPT), **in_inbox()}): - try: - update_one_activity( - { - **by_type(ap.ActivityType.FOLLOW), - **by_remote_id(data["meta"]["object_id"]), - }, - upsert({MetaKey.FOLLOW_STATUS: FollowStatus.ACCEPTED.value}), - ) - # Check if we are following this actor - follow_query = { - **in_inbox(), - **by_type(ap.ActivityType.FOLLOW), - **by_actor_id(data["meta"]["actor_id"]), - **not_undo(), - } - raw_follow = DB.activities.find_one(follow_query) - if raw_follow: - DB.activities.update_many( - follow_query, - {"$set": {_meta(MetaKey.NOTIFICATION_FOLLOWS_BACK): True}}, - ) - - except Exception: - logger.exception(f"failed to process activity {data!r}") - - for data in find_activities({**by_type(ap.ActivityType.REJECT), **in_inbox()}): - try: - update_one_activity( - { - **by_type(ap.ActivityType.FOLLOW), - **by_remote_id(data["meta"]["object_id"]), - }, - upsert({MetaKey.FOLLOW_STATUS: FollowStatus.REJECTED.value}), - ) - except Exception: - logger.exception(f"failed to process activity {data!r}") - - DB.activities.update_many( - { - **by_type(ap.ActivityType.FOLLOW), - **in_inbox(), - "meta.follow_status": {"$exists": False}, - }, - {"$set": {"meta.follow_status": "waiting"}}, - ) - - -class _20190901_FollowFollowBackMigrationFix(Migration): - """Add the new meta flags for tracking accepted/rejected status and following/follows back info.""" - - def migrate(self) -> None: - for data in find_activities({**by_type(ap.ActivityType.ACCEPT), **in_inbox()}): - try: - update_one_activity( - { - **by_type(ap.ActivityType.FOLLOW), - **by_remote_id(data["meta"]["object_id"]), - }, - upsert({MetaKey.FOLLOW_STATUS: FollowStatus.ACCEPTED.value}), - ) - # Check if we are following this actor - follow_query = { - **in_inbox(), - **by_type(ap.ActivityType.FOLLOW), - **by_object_id(data["meta"]["actor_id"]), - **not_undo(), - } - raw_follow = DB.activities.find_one(follow_query) - if raw_follow: - DB.activities.update_many( - follow_query, - {"$set": {_meta(MetaKey.NOTIFICATION_FOLLOWS_BACK): True}}, - ) - - except Exception: - logger.exception(f"failed to process activity {data!r}") - - for data in find_activities({**by_type(ap.ActivityType.FOLLOW), **in_outbox()}): - try: - print(data) - follow_query = { - **in_inbox(), - **by_type(ap.ActivityType.FOLLOW), - **by_actor_id(data["meta"]["object_id"]), - **not_undo(), - } - raw_accept = DB.activities.find_one(follow_query) - print(raw_accept) - if raw_accept: - DB.activities.update_many( - by_remote_id(data["remote_id"]), - {"$set": {_meta(MetaKey.NOTIFICATION_FOLLOWS_BACK): True}}, - ) - - except Exception: - logger.exception(f"failed to process activity {data!r}") - - -class _20190901_MetaHashtagsAndMentions(Migration): - def migrate(self) -> None: - for data in find_activities( - {**by_type(ap.ActivityType.CREATE), **not_deleted()} - ): - try: - activity = ap.parse_activity(data["activity"]) - mentions = [] - obj = activity.get_object() - for m in obj.get_mentions(): - mentions.append(m.href) - hashtags = [] - for h in obj.get_hashtags(): - hashtags.append(h.name[1:]) # Strip the # - - update_one_activity( - by_remote_id(data["remote_id"]), - upsert({MetaKey.MENTIONS: mentions, MetaKey.HASHTAGS: hashtags}), - ) - - except Exception: - logger.exception(f"failed to process activity {data!r}") - - -class _20190906_RedoFollowFollowBack(_20190901_FollowFollowBackMigrationFix): - """Add the new meta flags for tracking accepted/rejected status and following/follows back info.""" - - -class _20190906_InReplyToMigration(Migration): - def migrate(self) -> None: - for data in find_activities( - {**by_type(ap.ActivityType.CREATE), **not_deleted()} - ): - try: - in_reply_to = data["activity"]["object"].get("inReplyTo") - if in_reply_to: - update_one_activity( - by_remote_id(data["remote_id"]), - upsert({MetaKey.IN_REPLY_TO: in_reply_to}), - ) - except Exception: - logger.exception(f"failed to process activity {data!r}") - - for data in DB.replies.find({**not_deleted()}): - try: - in_reply_to = data["activity"].get("inReplyTo") - if in_reply_to: - DB.replies.update_one( - by_remote_id(data["remote_id"]), - upsert({MetaKey.IN_REPLY_TO: in_reply_to}), - ) - except Exception: - logger.exception(f"failed to process activity {data!r}") - - -class _20191020_ManuallyApprovesFollowerSupportMigrationn(Migration): - def migrate(self) -> None: - DB.activities.update_many( - { - **by_type(ap.ActivityType.FOLLOW), - **in_inbox(), - "meta.follow_status": {"$exists": False}, - }, - {"$set": {"meta.follow_status": "accepted"}}, - ) - - -class _20191106_PlaceTagToLocation(Migration): - def migrate(self) -> None: - for data in find_activities({"activity.object.tag.type": "Place"}): - for tag in data["activity"]["object"]["tag"]: - if tag["type"] == "Place": - break - DB.activities.update_one( - {"_id": data["_id"]}, - { - "$pull": {"activity.object.tag": {"type": "Place"}}, - "$set": {"activity.object.location": tag}, - }, - ) diff --git a/core/notifications.py b/core/notifications.py deleted file mode 100644 index a792a6a..0000000 --- a/core/notifications.py +++ /dev/null @@ -1,216 +0,0 @@ -import logging -from datetime import datetime -from datetime import timedelta -from datetime import timezone -from functools import singledispatch -from typing import Any -from typing import Dict - -from little_boxes import activitypub as ap - -from config import DB -from config import REPLIES_IN_STREAM -from core.activitypub import is_from_outbox -from core.activitypub import is_local_url -from core.db import find_one_activity -from core.meta import MetaKey -from core.meta import _meta -from core.meta import by_actor -from core.meta import by_object_id -from core.meta import by_type -from core.meta import flag -from core.meta import in_inbox -from core.meta import not_undo -from core.meta import published_after -from core.tasks import Tasks - -_logger = logging.getLogger(__name__) - -_NewMeta = Dict[str, Any] - - -def _flag_as_notification(activity: ap.BaseActivity, new_meta: _NewMeta) -> None: - new_meta.update( - {_meta(MetaKey.NOTIFICATION): True, _meta(MetaKey.NOTIFICATION_UNREAD): True} - ) - return None - - -def _set_flag(meta: _NewMeta, meta_key: MetaKey, value: Any = True) -> None: - meta.update({_meta(meta_key): value}) - return None - - -@singledispatch -def set_inbox_flags(activity: ap.BaseActivity, new_meta: _NewMeta) -> None: - _logger.warning(f"skipping {activity!r}") - return None - - -@set_inbox_flags.register -def _accept_set_inbox_flags(activity: ap.Accept, new_meta: _NewMeta) -> None: - """Handle notifications for "accepted" following requests.""" - _logger.info(f"set_inbox_flags activity={activity!r}") - # Check if this actor already follow us back - follows_back = False - follow_query = { - **in_inbox(), - **by_type(ap.ActivityType.FOLLOW), - **by_actor(activity.get_actor()), - **not_undo(), - } - raw_follow = DB.activities.find_one(follow_query) - if raw_follow: - follows_back = True - - DB.activities.update_many( - follow_query, {"$set": {_meta(MetaKey.NOTIFICATION_FOLLOWS_BACK): True}} - ) - - # This Accept will be a "You started following $actor" notification - _flag_as_notification(activity, new_meta) - _set_flag(new_meta, MetaKey.GC_KEEP) - _set_flag(new_meta, MetaKey.NOTIFICATION_FOLLOWS_BACK, follows_back) - return None - - -@set_inbox_flags.register -def _reject_set_inbox_flags(activity: ap.Reject, new_meta: _NewMeta) -> None: - """Handle notifications for "rejected" following requests.""" - _logger.info(f"set_inbox_flags activity={activity!r}") - # This Accept will be a "You started following $actor" notification - _flag_as_notification(activity, new_meta) - _set_flag(new_meta, MetaKey.GC_KEEP) - return None - - -@set_inbox_flags.register -def _follow_set_inbox_flags(activity: ap.Follow, new_meta: _NewMeta) -> None: - """Handle notification for new followers.""" - _logger.info(f"set_inbox_flags activity={activity!r}") - # Check if we're already following this actor - follows_back = False - accept_query = { - **in_inbox(), - **by_type(ap.ActivityType.ACCEPT), - **by_actor(activity.get_actor()), - **not_undo(), - } - raw_accept = DB.activities.find_one(accept_query) - if raw_accept: - follows_back = True - - DB.activities.update_many( - accept_query, {"$set": {_meta(MetaKey.NOTIFICATION_FOLLOWS_BACK): True}} - ) - - # This Follow will be a "$actor started following you" notification - _flag_as_notification(activity, new_meta) - _set_flag(new_meta, MetaKey.GC_KEEP) - _set_flag(new_meta, MetaKey.NOTIFICATION_FOLLOWS_BACK, follows_back) - return None - - -@set_inbox_flags.register -def _like_set_inbox_flags(activity: ap.Like, new_meta: _NewMeta) -> None: - _logger.info(f"set_inbox_flags activity={activity!r}") - # Is it a Like of local acitivty/from the outbox - if is_from_outbox(activity.get_object()): - # Flag it as a notification - _flag_as_notification(activity, new_meta) - - # Cache the object (for display on the notifcation page) - Tasks.cache_object(activity.id) - - # Also set the "keep mark" for the GC (as we want to keep it forever) - _set_flag(new_meta, MetaKey.GC_KEEP) - - return None - - -@set_inbox_flags.register -def _announce_set_inbox_flags(activity: ap.Announce, new_meta: _NewMeta) -> None: - _logger.info(f"set_inbox_flags activity={activity!r}") - obj = activity.get_object() - # Is it a Annnounce/boost of local acitivty/from the outbox - if is_from_outbox(obj): - # Flag it as a notification - _flag_as_notification(activity, new_meta) - - # Also set the "keep mark" for the GC (as we want to keep it forever) - _set_flag(new_meta, MetaKey.GC_KEEP) - - # Dedup boosts (it's annoying to see the same note multipe times on the same page) - if not find_one_activity( - { - **in_inbox(), - **by_type([ap.ActivityType.CREATE, ap.ActivityType.ANNOUNCE]), - **by_object_id(obj.id), - **flag(MetaKey.STREAM, True), - **published_after(datetime.now(timezone.utc) - timedelta(hours=12)), - } - ): - # Display it in the stream only it not there already (only looking at the last 12 hours) - _set_flag(new_meta, MetaKey.STREAM) - - return None - - -@set_inbox_flags.register -def _undo_set_inbox_flags(activity: ap.Undo, new_meta: _NewMeta) -> None: - _logger.info(f"set_inbox_flags activity={activity!r}") - obj = activity.get_object() - - if obj.has_type(ap.ActivityType.FOLLOW): - # Flag it as a noticiation (for the "$actor unfollowed you" - _flag_as_notification(activity, new_meta) - - # Also set the "keep mark" for the GC (as we want to keep it forever) - _set_flag(new_meta, MetaKey.GC_KEEP) - - return None - - -@set_inbox_flags.register -def _create_set_inbox_flags(activity: ap.Create, new_meta: _NewMeta) -> None: - _logger.info(f"set_inbox_flags activity={activity!r}") - obj = activity.get_object() - - _set_flag(new_meta, MetaKey.POLL_ANSWER, False) - - in_reply_to = obj.get_in_reply_to() - - # Check if it's a local reply - if in_reply_to and is_local_url(in_reply_to): - # TODO(tsileo): fetch the reply to check for poll answers more precisely - # reply_of = ap.fetch_remote_activity(in_reply_to) - - # Ensure it's not a poll answer - if obj.name and not obj.content: - _set_flag(new_meta, MetaKey.POLL_ANSWER) - return None - - # Flag it as a notification - _flag_as_notification(activity, new_meta) - - # Also set the "keep mark" for the GC (as we want to keep it forever) - _set_flag(new_meta, MetaKey.GC_KEEP) - - return None - - # Check for mention - for mention in obj.get_mentions(): - if mention.href and is_local_url(mention.href): - # Flag it as a notification - _flag_as_notification(activity, new_meta) - - # Also set the "keep mark" for the GC (as we want to keep it forever) - _set_flag(new_meta, MetaKey.GC_KEEP) - - if not in_reply_to or ( - REPLIES_IN_STREAM and obj.get_actor().id in ap.get_backend().following() - ): - # A good candidate for displaying in the stream - _set_flag(new_meta, MetaKey.STREAM) - - return None diff --git a/core/outbox.py b/core/outbox.py deleted file mode 100644 index 2399a66..0000000 --- a/core/outbox.py +++ /dev/null @@ -1,149 +0,0 @@ -import logging -from datetime import datetime -from functools import singledispatch -from typing import Any -from typing import Dict - -from little_boxes import activitypub as ap - -from core.activitypub import handle_replies -from core.db import find_one_activity -from core.db import update_many_activities -from core.db import update_one_activity -from core.meta import MetaKey -from core.meta import by_object_id -from core.meta import by_type -from core.meta import inc -from core.meta import upsert -from core.tasks import Tasks - -_logger = logging.getLogger(__name__) - -_NewMeta = Dict[str, Any] - - -@singledispatch -def process_outbox(activity: ap.BaseActivity, new_meta: _NewMeta) -> None: - _logger.warning(f"skipping {activity!r}") - return None - - -@process_outbox.register -def _delete_process_outbox(delete: ap.Delete, new_meta: _NewMeta) -> None: - _logger.info(f"process_outbox activity={delete!r}") - obj_id = delete.get_object_id() - - # Flag everything referencing the deleted object as deleted (except the Delete activity itself) - update_many_activities( - {**by_object_id(obj_id), "remote_id": {"$ne": delete.id}}, - upsert({MetaKey.DELETED: True, MetaKey.UNDO: True}), - ) - - # If the deleted activity was in DB, decrease some threads-related counter - data = find_one_activity( - {**by_object_id(obj_id), **by_type(ap.ActivityType.CREATE)} - ) - _logger.info(f"found local copy of deleted activity: {data}") - if data: - obj = ap.parse_activity(data["activity"]).get_object() - _logger.info(f"obj={obj!r}") - in_reply_to = obj.get_in_reply_to() - if in_reply_to: - update_one_activity( - {**by_type(ap.ActivityType.CREATE), **by_object_id(in_reply_to)}, - {"$inc": {"meta.count_reply": -1, "meta.count_direct_reply": -1}}, - ) - - -@process_outbox.register -def _update_process_outbox(update: ap.Update, new_meta: _NewMeta) -> None: - _logger.info(f"process_outbox activity={update!r}") - - obj = update._data["object"] - - update_prefix = "activity.object." - to_update: Dict[str, Any] = {"$set": dict(), "$unset": dict()} - to_update["$set"][f"{update_prefix}updated"] = ( - datetime.utcnow().replace(microsecond=0).isoformat() + "Z" - ) - for k, v in obj.items(): - if k in ["id", "type"]: - continue - if v is None: - to_update["$unset"][f"{update_prefix}{k}"] = "" - else: - to_update["$set"][f"{update_prefix}{k}"] = v - - if len(to_update["$unset"]) == 0: - del to_update["$unset"] - - _logger.info(f"updating note from outbox {obj!r} {to_update}") - update_one_activity({"activity.object.id": obj["id"]}, to_update) - # FIXME(tsileo): should send an Update (but not a partial one, to all the note's recipients - # (create a new Update with the result of the update, and send it without saving it?) - - -@process_outbox.register -def _create_process_outbox(create: ap.Create, new_meta: _NewMeta) -> None: - _logger.info(f"process_outbox activity={create!r}") - handle_replies(create) - - -@process_outbox.register -def _announce_process_outbox(announce: ap.Announce, new_meta: _NewMeta) -> None: - _logger.info(f"process_outbox activity={announce!r}") - - obj = announce.get_object() - if obj.has_type(ap.ActivityType.QUESTION): - Tasks.fetch_remote_question(obj) - - Tasks.cache_object(announce.id) - - update_one_activity( - {**by_object_id(obj.id), **by_type(ap.ActivityType.CREATE)}, - upsert({MetaKey.BOOSTED: announce.id}), - ) - - -@process_outbox.register -def _like_process_outbox(like: ap.Like, new_meta: _NewMeta) -> None: - _logger.info(f"process_outbox activity={like!r}") - - obj = like.get_object() - if obj.has_type(ap.ActivityType.QUESTION): - Tasks.fetch_remote_question(obj) - - # Cache the object for display on the "Liked" public page - Tasks.cache_object(like.id) - - update_one_activity( - {**by_object_id(obj.id), **by_type(ap.ActivityType.CREATE)}, - {**inc(MetaKey.COUNT_LIKE, 1), **upsert({MetaKey.LIKED: like.id})}, - ) - - -@process_outbox.register -def _undo_process_outbox(undo: ap.Undo, new_meta: _NewMeta) -> None: - _logger.info(f"process_outbox activity={undo!r}") - obj = undo.get_object() - update_one_activity({"remote_id": obj.id}, {"$set": {"meta.undo": True}}) - - # Undo Like - if obj.has_type(ap.ActivityType.LIKE): - liked = obj.get_object_id() - update_one_activity( - {**by_object_id(liked), **by_type(ap.ActivityType.CREATE)}, - {**inc(MetaKey.COUNT_LIKE, -1), **upsert({MetaKey.LIKED: False})}, - ) - - elif obj.has_type(ap.ActivityType.ANNOUNCE): - announced = obj.get_object_id() - update_one_activity( - {**by_object_id(announced), **by_type(ap.ActivityType.CREATE)}, - upsert({MetaKey.BOOSTED: False}), - ) - - # Undo Follow (undo new following) - elif obj.has_type(ap.ActivityType.FOLLOW): - pass - # do nothing diff --git a/core/remote.py b/core/remote.py deleted file mode 100644 index 820a89d..0000000 --- a/core/remote.py +++ /dev/null @@ -1,38 +0,0 @@ -from urllib.parse import urlparse - -from core.db import _Q -from core.db import update_one_remote -from utils import now - - -def server(url: str) -> str: - return urlparse(url).netloc - - -def _update(url: str, replace: _Q) -> None: - update_one_remote({"server": server(url)}, replace, upsert=True) - - -# TODO(tsileo): track receive (and the user agent to help debug issues) - - -def track_successful_send(url: str) -> None: - now_ = now() - _update( - url, - { - "$inc": {"successful_send": 1}, - "$set": { - "last_successful_contact": now_, - "last_successful_send": now_, - "last_contact": now_, - }, - }, - ) - return None - - -def track_failed_send(url: str) -> None: - now_ = now() - _update(url, {"$inc": {"failed_send": 1}, "$set": {"last_contact": now_}}) - return None diff --git a/core/shared.py b/core/shared.py deleted file mode 100644 index b12339b..0000000 --- a/core/shared.py +++ /dev/null @@ -1,267 +0,0 @@ -import gzip -import json -import os -from functools import lru_cache -from functools import wraps -from typing import Any - -import flask -from bson.objectid import ObjectId -from flask import Response -from flask import current_app as app -from flask import redirect -from flask import request -from flask import session -from flask import url_for -from flask_wtf.csrf import CSRFProtect -from little_boxes import activitypub as ap -from poussetaches import PousseTaches - -import config -from config import DB -from config import ME -from core import activitypub -from core.db import find_activities -from core.meta import MetaKey -from core.meta import by_object_id -from core.meta import by_type -from core.meta import flag -from core.meta import not_deleted - -# _Response = Union[flask.Response, werkzeug.wrappers.Response, str, Any] -_Response = Any - -p = PousseTaches( - os.getenv("MICROBLOGPUB_POUSSETACHES_HOST", "http://localhost:7991"), - os.getenv("MICROBLOGPUB_INTERNAL_HOST", "http://localhost:5000"), -) - - -csrf = CSRFProtect() - - -back = activitypub.MicroblogPubBackend() -ap.use_backend(back) - -MY_PERSON = ap.Person(**ME) - - -@lru_cache(512) -def build_resp(resp): - """Encode the response to gzip if supported by the client.""" - headers = {"Cache-Control": "max-age=0, private, must-revalidate"} - accept_encoding = request.headers.get("Accept-Encoding", "") - if "gzip" in accept_encoding.lower(): - return ( - gzip.compress(resp.encode(), compresslevel=6), - {**headers, "Vary": "Accept-Encoding", "Content-Encoding": "gzip"}, - ) - - return resp, headers - - -def jsonify(data, content_type="application/json"): - resp, headers = build_resp(json.dumps(data)) - return Response(headers={**headers, "Content-Type": content_type}, response=resp) - - -def htmlify(data): - resp, headers = build_resp(data) - return Response( - response=resp, headers={**headers, "Content-Type": "text/html; charset=utf-8"} - ) - - -def activitypubify(**data): - if "@context" not in data: - data["@context"] = config.DEFAULT_CTX - resp, headers = build_resp(json.dumps(data)) - return Response( - response=resp, headers={**headers, "Content-Type": "application/activity+json"} - ) - - -def is_api_request(): - h = request.headers.get("Accept") - if h is None: - return False - h = h.split(",")[0] - if h in config.HEADERS or h == "application/json": - return True - return False - - -def add_response_headers(headers={}): - """This decorator adds the headers passed in to the response""" - - def decorator(f): - @wraps(f) - def decorated_function(*args, **kwargs): - resp = flask.make_response(f(*args, **kwargs)) - h = resp.headers - for header, value in headers.items(): - h[header] = value - return resp - - return decorated_function - - return decorator - - -def noindex(f): - """This decorator passes X-Robots-Tag: noindex, nofollow""" - return add_response_headers({"X-Robots-Tag": "noindex, nofollow"})(f) - - -def login_required(f): - @wraps(f) - def decorated_function(*args, **kwargs): - if not session.get("logged_in"): - return redirect(url_for("admin.admin_login", next=request.url)) - return f(*args, **kwargs) - - return decorated_function - - -def _get_ip(): - """Guess the IP address from the request. Only used for security purpose (failed logins or bad payload). - - Geoip will be returned if the "broxy" headers are set (it does Geoip - using an offline database and append these special headers). - """ - ip = request.headers.get("X-Forwarded-For", request.remote_addr) - geoip = None - if request.headers.get("Broxy-Geoip-Country"): - geoip = ( - request.headers.get("Broxy-Geoip-Country") - + "/" - + request.headers.get("Broxy-Geoip-Region") - ) - return ip, geoip - - -def _build_thread(data, include_children=True, query=None): # noqa: C901 - if query is None: - query = {} - data["_requested"] = True - app.logger.info(f"_build_thread({data!r})") - root_id = ( - data["meta"].get(MetaKey.THREAD_ROOT_PARENT.value) - or data["meta"].get(MetaKey.OBJECT_ID.value) - or data["remote_id"] - ) - - replies = [data] - for dat in find_activities( - { - **by_object_id(root_id), - **not_deleted(), - **by_type(ap.ActivityType.CREATE), - **query, - } - ): - replies.append(dat) - - for dat in find_activities( - { - **flag(MetaKey.THREAD_ROOT_PARENT, root_id), - **not_deleted(), - **by_type(ap.ActivityType.CREATE), - **query, - } - ): - replies.append(dat) - - for dat in DB.replies.find( - {**flag(MetaKey.THREAD_ROOT_PARENT, root_id), **not_deleted(), **query} - ): - # Make a Note/Question/... looks like a Create - dat["meta"].update( - {MetaKey.OBJECT_VISIBILITY.value: dat["meta"][MetaKey.VISIBILITY.value]} - ) - dat = { - "activity": {"object": dat["activity"]}, - "meta": dat["meta"], - "_id": dat["_id"], - } - replies.append(dat) - - replies = sorted(replies, key=lambda d: d["meta"]["published"]) - - # Index all the IDs in order to build a tree - idx = {} - replies2 = [] - for rep in replies: - rep_id = rep["activity"]["object"]["id"] - if rep_id in idx: - continue - idx[rep_id] = rep.copy() - idx[rep_id]["_nodes"] = [] - replies2.append(rep) - - # Build the tree - for rep in replies2: - rep_id = rep["activity"]["object"]["id"] - if rep_id == root_id: - continue - reply_of = ap._get_id(rep["activity"]["object"].get("inReplyTo")) - try: - idx[reply_of]["_nodes"].append(rep) - except KeyError: - app.logger.info(f"{reply_of} is not there! skipping {rep}") - - # Flatten the tree - thread = [] - - def _flatten(node, level=0): - node["_level"] = level - thread.append(node) - - for snode in sorted( - idx[node["activity"]["object"]["id"]]["_nodes"], - key=lambda d: d["activity"]["object"]["published"], - ): - _flatten(snode, level=level + 1) - - try: - _flatten(idx[root_id]) - except KeyError: - app.logger.info(f"{root_id} is not there! skipping") - - return thread - - -def paginated_query(db, q, limit=25, sort_key="_id"): - older_than = newer_than = None - query_sort = -1 - first_page = not request.args.get("older_than") and not request.args.get( - "newer_than" - ) - - query_older_than = request.args.get("older_than") - query_newer_than = request.args.get("newer_than") - - if query_older_than: - q["_id"] = {"$lt": ObjectId(query_older_than)} - elif query_newer_than: - q["_id"] = {"$gt": ObjectId(query_newer_than)} - query_sort = 1 - - outbox_data = list(db.find(q, limit=limit + 1).sort(sort_key, query_sort)) - outbox_len = len(outbox_data) - outbox_data = sorted( - outbox_data[:limit], key=lambda x: str(x[sort_key]), reverse=True - ) - - if query_older_than: - newer_than = str(outbox_data[0]["_id"]) - if outbox_len == limit + 1: - older_than = str(outbox_data[-1]["_id"]) - elif query_newer_than: - older_than = str(outbox_data[-1]["_id"]) - if outbox_len == limit + 1: - newer_than = str(outbox_data[0]["_id"]) - elif first_page and outbox_len == limit + 1: - older_than = str(outbox_data[-1]["_id"]) - - return outbox_data, older_than, newer_than diff --git a/core/tasks.py b/core/tasks.py deleted file mode 100644 index c4f9ca8..0000000 --- a/core/tasks.py +++ /dev/null @@ -1,127 +0,0 @@ -import os -from datetime import datetime -from datetime import timezone -from typing import Any -from typing import Dict -from typing import Set - -from little_boxes import activitypub as ap -from poussetaches import PousseTaches - -from config import DISABLE_WEBMENTIONS -from config import MEDIA_CACHE -from utils import parse_datetime - -p = PousseTaches( - os.getenv("MICROBLOGPUB_POUSSETACHES_HOST", "http://localhost:7991"), - os.getenv("MICROBLOGPUB_INTERNAL_HOST", "http://localhost:5000"), -) - - -class Tasks: - @staticmethod - def cache_object(iri: str) -> None: - p.push(iri, "/task/cache_object") - - @staticmethod - def cache_actor(iri: str, also_cache_attachments: bool = True) -> None: - p.push( - {"iri": iri, "also_cache_attachments": also_cache_attachments}, - "/task/cache_actor", - ) - - @staticmethod - def cache_actor_icon(icon_url: str, actor_iri: str) -> None: - if MEDIA_CACHE.is_actor_icon_cached(icon_url): - return None - - p.push({"icon_url": icon_url, "actor_iri": actor_iri}, "/task/cache_actor_icon") - - @staticmethod - def cache_emoji(url: str, iri: str) -> None: - if MEDIA_CACHE.is_emoji_cached(iri): - return None - - p.push({"url": url, "iri": iri}, "/task/cache_emoji") - - @staticmethod - def send_webmentions(activity: ap.Create, links: Set[str]) -> None: - if DISABLE_WEBMENTIONS: - return None - - for link in links: - p.push( - { - "link": link, - "note_url": activity.get_object().get_url(), - "remote_id": activity.id, - }, - "/task/send_webmention", - ) - - @staticmethod - def cache_emojis(activity: ap.BaseActivity) -> None: - for emoji in activity.get_emojis(): - try: - Tasks.cache_emoji(emoji.get_icon_url(), emoji.id) - except KeyError: - # TODO(tsileo): log invalid emoji - pass - - @staticmethod - def post_to_remote_inbox(payload: str, recp: str) -> None: - p.push({"payload": payload, "to": recp}, "/task/post_to_remote_inbox") - - @staticmethod - def forward_activity(iri: str) -> None: - p.push(iri, "/task/forward_activity") - - @staticmethod - def fetch_og_meta(iri: str) -> None: - p.push(iri, "/task/fetch_og_meta") - - @staticmethod - def process_reply(iri: str) -> None: - p.push(iri, "/task/process_reply") - - @staticmethod - def process_new_activity(iri: str) -> None: - p.push(iri, "/task/process_new_activity") - - @staticmethod - def cache_attachments(iri: str) -> None: - p.push(iri, "/task/cache_attachments") - - @staticmethod - def cache_attachment(attachment: Dict[str, Any], iri: str) -> None: - p.push({"iri": iri, "attachment": attachment}, "/task/cache_attachment") - - @staticmethod - def finish_post_to_inbox(iri: str) -> None: - p.push(iri, "/task/finish_post_to_inbox") - - @staticmethod - def finish_post_to_outbox(iri: str) -> None: - p.push(iri, "/task/finish_post_to_outbox") - - @staticmethod - def send_actor_update() -> None: - p.push({}, "/task/send_actor_update", delay=2) - - @staticmethod - def update_question_outbox(iri: str, open_for: int) -> None: - p.push( - iri, "/task/update_question", delay=open_for - ) # XXX: delay expects minutes - - @staticmethod - def fetch_remote_question(question) -> None: - now = datetime.now(timezone.utc) - dt = parse_datetime(question.closed or question.endTime) - minutes = int((dt - now).total_seconds() / 60) - - if minutes > 0: - # Only push the task if the poll is not ended yet - p.push( - question.id, "/task/fetch_remote_question", delay=minutes - ) # XXX: delay expects minutes diff --git a/data/.gitignore b/data/.gitignore index d6b7ef3..7db6e4c 100644 --- a/data/.gitignore +++ b/data/.gitignore @@ -1,2 +1,3 @@ * +!uploads/ !.gitignore diff --git a/data/_theme.scss b/data/_theme.scss new file mode 100644 index 0000000..9fc2396 --- /dev/null +++ b/data/_theme.scss @@ -0,0 +1 @@ +// override vars for theming here diff --git a/static/emojis/.gitignore b/data/custom_emoji/.gitignore similarity index 100% rename from static/emojis/.gitignore rename to data/custom_emoji/.gitignore diff --git a/data/templates/app b/data/templates/app new file mode 120000 index 0000000..6a6e8a4 --- /dev/null +++ b/data/templates/app @@ -0,0 +1 @@ +../../app/templates/ \ No newline at end of file diff --git a/data/tests.toml b/data/tests.toml new file mode 100644 index 0000000..909b4a3 --- /dev/null +++ b/data/tests.toml @@ -0,0 +1,17 @@ +domain = "localhost:8000" +username = "test" +# toto +admin_password = "$2b$12$OwCyZM33uXQUVrChgER.h.qgFJ4fBp6tdFwArR3Lm1LV8NgMvIxVa" +name = "test" +summary = "<p>Hello</p>" +https = false +id = "http://localhost:8000" +icon_url = "https://localhost:8000/static/nopic.png" +secret = "1dd4079e0474d1a519052b8fe3cb5fa6" +debug = true + +# In-mem DB +sqlalchemy_database = "file:pytest?mode=memory&cache=shared&uri=true" +# sqlalchemy_database_url = "data/pytest.db" +key_path = "tests/test.key" +media_db_path = "tests/media.db" diff --git a/static/media/.gitignore b/data/uploads/.gitignore similarity index 100% rename from static/media/.gitignore rename to data/uploads/.gitignore diff --git a/dev-requirements.txt b/dev-requirements.txt deleted file mode 100644 index 7db7fab..0000000 --- a/dev-requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -git+https://github.com/tsileo/little-boxes.git -pytest -requests -html2text -pyyaml -flake8 -mypy -black diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml deleted file mode 100644 index 730f287..0000000 --- a/docker-compose-dev.yml +++ /dev/null @@ -1,16 +0,0 @@ -version: '3' -services: - mongo: - image: "mongo:latest" - volumes: - - "./data:/data/db" - ports: - - "27017:27017" - poussetaches: - image: "poussetaches/poussetaches:latest" - volumes: - - "${DATA_DIR}/poussetaches:/app/poussetaches_data" - environment: - - POUSSETACHES_AUTH_KEY=${POUSSETACHES_AUTH_KEY} - ports: - - "7991:7991" diff --git a/docker-compose.yml b/docker-compose.yml index f04be1b..c1a7edf 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,27 +1,13 @@ -version: '2' +version: "3" + services: - web: - image: 'microblogpub:latest' + server: + image: microblogpub/microblogpub:latest + container_name: microblogpub + user: 1000:1000 + restart: always + volumes: + - ./data:/app/data + - ./app/static:/app/app/static ports: - - "127.0.0.1:${WEB_PORT}:5005" - volumes: - - "${CONFIG_DIR}:/app/config" - - "./static:/app/static" - environment: - - MICROBLOGPUB_MONGODB_HOST=mongo:27017 - - MICROBLOGPUB_INTERNAL_HOST=http://${COMPOSE_PROJECT_NAME}_web_1:5005 - - MICROBLOGPUB_POUSSETACHES_HOST=http://${COMPOSE_PROJECT_NAME}_poussetaches_1:7991 - - POUSSETACHES_AUTH_KEY=${POUSSETACHES_AUTH_KEY} - - COMPOSE_PROJECT_NAME=${COMPOSE_PROJECT_NAME} - mongo: - image: "mongo:3" - volumes: - - "${DATA_DIR}/mongodb:/data/db" - poussetaches: - image: "poussetaches/poussetaches:latest" - volumes: - - "${DATA_DIR}/poussetaches:/app/poussetaches_data" - environment: - - POUSSETACHES_AUTH_KEY=${POUSSETACHES_AUTH_KEY} -# ports: -# - "127.0.0.1:${POUSSETACHES_PORT}:7991" + - "8000:8000" diff --git a/docs/activitypub.md b/docs/activitypub.md deleted file mode 100644 index 2ca78d9..0000000 --- a/docs/activitypub.md +++ /dev/null @@ -1,9 +0,0 @@ -## ActivityPub - -_microblog.pub_ implements an [ActivityPub](http://activitypub.rocks/) server, it implements both the client to server API and the federated server to server API. - -Activities are verified using HTTP Signatures or by fetching the content on the remote server directly. - -WebFinger is also required. - -TODO diff --git a/docs/api.md b/docs/api.md deleted file mode 100644 index 90e2321..0000000 --- a/docs/api.md +++ /dev/null @@ -1,216 +0,0 @@ -## API - -Your admin API key can be found at `config/admin_api_key.key`. - -## ActivityPub API - -### GET / - -Returns the actor profile, with links to all the "standard" collections. - -### GET /tags/:tag - -Special collection that reference notes with the given tag. - -### GET /stream - -Special collection that returns the stream/inbox as displayed in the UI. - -## User API - -The user API is used by the admin UI (and requires a CSRF token when used with a regular user session), but it can also be accessed with an API key. - -All the examples are using [HTTPie](https://httpie.org/). - -### POST /api/note/delete{?id} - -Deletes the given note `id` (the note must from the instance outbox). - -Answers a **201** (Created) status code. - -You can pass the `id` via JSON, form data or query argument. - -#### Example - -```shell -$ http POST https://microblog.pub/api/note/delete Authorization:'Bearer <token>' id=http://microblob.pub/outbox/<note_id>/activity -``` - -#### Response - -```json -{ - "activity": "https://microblog.pub/outbox/<delete_id>" -} -``` - -### POST /api/note/pin{?id} - -Adds the given note `id` (the note must from the instance outbox) to the featured collection (and pins it on the homepage). - -Answers a **201** (Created) status code. - -You can pass the `id` via JSON, form data or query argument. - -#### Example - -```shell -$ http POST https://microblog.pub/api/note/pin Authorization:'Bearer <token>' id=http://microblob.pub/outbox/<note_id>/activity -``` - -#### Response - -```json -{ - "pinned": true -} -``` - -### POST /api/note/unpin{?id} - -Removes the given note `id` (the note must from the instance outbox) from the featured collection (and un-pins it). - -Answers a **201** (Created) status code. - -You can pass the `id` via JSON, form data or query argument. - -#### Example - -```shell -$ http POST https://microblog.pub/api/note/unpin Authorization:'Bearer <token>' id=http://microblob.pub/outbox/<note_id>/activity -``` - -#### Response - -```json -{ - "pinned": false -} -``` - -### POST /api/like{?id} - -Likes the given activity. - -Answers a **201** (Created) status code. - -You can pass the `id` via JSON, form data or query argument. - -#### Example - -```shell -$ http POST https://microblog.pub/api/like Authorization:'Bearer <token>' id=http://activity-iri.tld -``` - -#### Response - -```json -{ - "activity": "https://microblog.pub/outbox/<like_id>" -} -``` - -### POST /api/boost{?id} - -Boosts/Announces the given activity. - -Answers a **201** (Created) status code. - -You can pass the `id` via JSON, form data or query argument. - -#### Example - -```shell -$ http POST https://microblog.pub/api/boost Authorization:'Bearer <token>' id=http://activity-iri.tld -``` - -#### Response - -```json -{ - "activity": "https://microblog.pub/outbox/<announce_id>" -} -``` - -### POST /api/block{?actor} - -Blocks the given actor, all activities from this actor will be dropped after that. - -Answers a **201** (Created) status code. - -You can pass the `id` via JSON, form data or query argument. - -#### Example - -```shell -$ http POST https://microblog.pub/api/block Authorization:'Bearer <token>' actor=http://actor-iri.tld/ -``` - -#### Response - -```json -{ - "activity": "https://microblog.pub/outbox/<block_id>" -} -``` - -### POST /api/follow{?actor} - -Follows the given actor. - -Answers a **201** (Created) status code. - -You can pass the `id` via JSON, form data or query argument. - -#### Example - -```shell -$ http POST https://microblog.pub/api/follow Authorization:'Bearer <token>' actor=http://actor-iri.tld/ -``` - -#### Response - -```json -{ - "activity": "https://microblog.pub/outbox/<follow_id>" -} -``` - -### POST /api/new_note{?content,reply} - -Creates a new note. `reply` is the IRI of the "replied" note if any. - -Answers a **201** (Created) status code. - -You can pass the `content` and `reply` via JSON, form data or query argument. - -#### Example - -```shell -$ http POST https://microblog.pub/api/new_note Authorization:'Bearer <token>' content=hello -``` - -#### Response - -```json -{ - "activity": "https://microblog.pub/outbox/<create_id>" -} -``` - - -### GET /api/stream - - -#### Example - -```shell -$ http GET https://microblog.pub/api/stream Authorization:'Bearer <token>' -``` - -#### Response - -```json -``` - - diff --git a/docs/developer_guide.md b/docs/developer_guide.md new file mode 100644 index 0000000..ef777ba --- /dev/null +++ b/docs/developer_guide.md @@ -0,0 +1,81 @@ +# Developer's guide + +This guide assumes you have some knowledge of [ActivityPub](https://activitypub.rocks/). + +[TOC] + +## Architecture + +Microblog.pub is a "modern" Python application with "old-school" server-rendered templates. + + - [Poetry](https://python-poetry.org/) is used for dependency management. + - Most of the code is asynchronous, using [asyncio](https://docs.python.org/3/library/asyncio.html). + - SQLite3 for data storage + +The server has 3 components: + + - The web server (powered by [FastAPI](https://fastapi.tiangolo.com/) and [Jinja2](https://jinja.palletsprojects.com/en/3.1.x/) templates) + - One process that takes care of sending "outgoing activities" + - One process that takes care of processing "incoming activities" + +### Tasks + +The project uses [Invoke](https://www.pyinvoke.org/) to manage tasks (a Python powered Makefile). + +You can find the tasks definition in `tasks.py` and list the tasks using: + +```bash +inv -l +``` + +### Media storage + +The uploads are stored in the `data/` directory, using a simple content-addressed storage system (file contents hash is BLOB filename). +Files metadata are stored in the database. + +## Installation + +Running a local version requires: + + - Python 3.10+ + - SQLite 3.35+ + +You can follow the [Python developer version of the install instructions](https://docs.microblog.pub/installing.html#python-developer-edition). + +## Documentation + +The documention is managed as Markdown files in `docs/` and the online documentation is built using a homegrown Python script (`scripts/build_docs.py`). + +You can build the documentation locally by running: + +```bash +inv build-docs +``` + +And check out the result by starting a static server using Python standard library: + +```bash +cd docs/dist +python -m http.server 8001 +``` + +## Contributing + +Contributions/patches are welcome, but please start a discussion in a [ticket](https://todo.sr.ht/~tsileo/microblog.pub) or a [thread in the mailing list](https://lists.sr.ht/~tsileo/microblog.pub-devel) before working on anything consequent. + +### Patches + +Please ensure your code passes the code quality checks: + +```bash +inv autoformat +inv lint +``` + +And that the tests suite is passing: + +```bash +inv tests +``` + +Please also consider adding new test cases if needed. diff --git a/docs/head.html b/docs/head.html deleted file mode 100644 index 86fc9c4..0000000 --- a/docs/head.html +++ /dev/null @@ -1,18 +0,0 @@ -<style> -body { - background: #eee; - color: #111; -} -code { - color: #111; -} -.purple, a { - color: #1d781d; -} -h1 { font-weight: bold; } -pre { - background: #e6e6e6; - padding: 10px; - overflow: auto; -} -</style> diff --git a/docs/install.md b/docs/install.md new file mode 100644 index 0000000..7912835 --- /dev/null +++ b/docs/install.md @@ -0,0 +1,262 @@ +# Installing + +[TOC] + +## Docker edition + +Assuming Docker and [Docker Compose](https://docs.docker.com/compose/install/) are already installed. + +For now, there's no image published on Docker Hub, this means you will have to build the image locally. + +Clone the repository, replace `you-domain.tld` by your own domain. + +Note that if you want to serve static assets via your reverse proxy (like nginx), clone it in a place +where it is accessible by your reverse proxy user. + +```bash +git clone https://git.sr.ht/~tsileo/microblog.pub your-domain.tld +``` + +Build the Docker image locally. + +```bash +make build +``` + +Run the configuration wizard. + +```bash +make config +``` + +Update `data/profile.toml` and add this line in order to process headers from the reverse proxy: + +```toml +trusted_hosts = ["*"] +``` + +Start the app with Docker Compose, it will listen on port 8000 by default. +The port can be tweaked in the `docker-compose.yml` file. + +```bash +docker compose up -d +``` + +Setup a reverse proxy (see the [Reverse Proxy section](/installing.html#reverse-proxy)). + +### Updating + +To update microblogpub, pull the latest changes, rebuild the Docker image and restart the process with `docker compose`. + +```bash +git pull +make build +docker compose stop +docker compose up -d +``` + +As you probably already know, Docker can (and will) eat a lot of disk space, when updating you should [prune old images](https://docs.docker.com/config/pruning/#prune-images) from time to time: + +```bash +docker image prune -a --filter "until=24h" +``` + +## Python developer edition + +Assuming you have a working **Python 3.10+** environment. + +Setup [Poetry](https://python-poetry.org/docs/master/#installing-with-the-official-installer). + +```bash +curl -sSL https://install.python-poetry.org | python3 - +``` + +Clone the repository. + +```bash +git clone https://git.sr.ht/~tsileo/microblog.pub testing.microblog.pub +``` + +Install deps. + +```bash +poetry install +``` + +Setup config. + +```bash +poetry run inv configuration-wizard +``` + +Setup the database. + +```bash +poetry run inv migrate-db +``` + +Grab your virtualenv path. + +```bash +poetry env info +``` + +Run the two processes with supervisord. + +```bash +VENV_DIR=/home/ubuntu/.cache/pypoetry/virtualenvs/microblogpub-chx-y1oE-py3.10 poetry run supervisord -c misc/supervisord.conf -n +``` + +Setup a reverse proxy (see the next section). + +### Updating + +To update microblogpub locally, pull the remote changes and run the `update` task to regenerate the CSS and run any DB migrations. + +```bash +git pull +poetry run inv update +``` + +## Reverse proxy + +You will also want to setup a reverse proxy like NGINX, see [uvicorn documentation](https://www.uvicorn.org/deployment/#running-behind-nginx): + +If you don't have a reverse proxy setup yet, [NGINX + certbot](https://www.nginx.com/blog/using-free-ssltls-certificates-from-lets-encrypt-with-nginx/) is recommended. + +```nginx +server { + client_max_body_size 4G; + + location / { + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + proxy_redirect off; + proxy_buffering off; + proxy_pass http://localhost:8000; + } + + # [...] +} + +# This should be outside the `server` block +map $http_upgrade $connection_upgrade { + default upgrade; + '' close; +} +``` + +Optionally, you can serve static files using NGINX directly, with an additional `location` block. +This will require the NGINX user to have access to the `static/` directory. + +```nginx +server { + # [...] + + location / { + # [...] + } + + location /static { + # path for static files + rewrite ^/static/(.*) /$1 break; + root /path/to/your-domain.tld/app/static/; + expires 1y; + } + + # [...] +} +``` + +### NGINX config tips + +Enable HTTP2 (which is disabled by default): + +```nginx +server { + # [...] + listen [::]:443 ssl http2; +} +``` + +Tweak `/etc/nginx/nginx.conf` and add gzip compression for ActivityPub responses: + +```nginx +http { + # [...] + gzip_types text/plain text/css application/json application/javascript application/activity+json application/octet-stream; +} +``` + + +## (Advanced) Running on a subdomain + +It is possible to run microblogpub on a subdomain (`sub.domain.tld`) while being reachable from the root root domain (`domain.tld`) using the `name@domain.tld` handle. + +This requires forwarding/proxying requests from the root domain to the subdomain, for example using NGINX: + +```nginx +location /.well-known/webfinger { + add_header Access-Control-Allow-Origin '*'; + return 301 https://sub.domain.tld$request_uri; +} +``` + +And updating `data/profile.toml` to specify the root domain as the webfinger domain: + +```toml +webfinger_domain = "domain.tld" +``` + +Once configured correctly, people will be able to follow you using `name@domain.tld`, while using `sub.domain.tld` for the web interface. + + +## (Advanced) Running from subpath + +It is possible to configure microblogpub to run from subpath. +To achieve this, do the following configuration _between_ config and start steps. +i.e. _after_ you run `make config` or `poetry run inv configuration-wizard`, +but _before_ you run `docker compose up` or `poetry run supervisord`. +Changing this settings on an instance which has some posts or was seen by other instances will likely break links to these posts or federation (i.e. links to your instance, posts and profile from other instances). + +The following steps will explain how to configure instance to be available at `https://example.com/subdir`. +Change them to your actual domain and subdir. + +* Edit `data/profile.toml` file, add this line: + + id = "https://example.com/subdir" + +* Edit `misc/*-supervisord.conf` file which is relevant to you (it depends on how you start microblogpub - if in doubt, do the same change in all of them) - in `[program:uvicorn]` section, in the line which starts with `command`, add this argument at the very end: ` --root-path /subdir` + +Above two steps are enough to configure microblogpub. +Next, you also need to configure reverse proxy. +It might slightly differ if you plan to have other services running on the same domain, but for [NGINX config shown above](#reverse-proxy), the following changes are enough: + +* Add subdir to location, so location block starts like this: + + location /subdir { + +* Add `/` at the end of `proxy_pass` directive, like this: + + proxy_pass http://localhost:8000/; + +These two changes will instruct NGINX that requests sent to `https://example.com/subdir/...` should be forwarded to `http://localhost:8000/...`. + +* Inside `server` block, add redirects for well-known URLs (add these lines after `client_max_body_size`, remember to replace `subdir` with your actual subdir!): + + location /.well-known/webfinger { return 301 /subdir$request_uri; } + location /.well-known/nodeinfo { return 301 /subdir$request_uri; } + location /.well-known/oauth-authorization-server { return 301 /subdir$request_uri; } + +* Optionally, [check robots.txt from a running microblogpub instance](https://microblog.pub/robots.txt) and integrate it into robots.txt file in the root of your server - remember to prepend `subdir` to URLs, so for example `Disallow: /admin` becomes `Disallow: /subdir/admin`. + +## YunoHost edition + +[YunoHost](https://yunohost.org/) support is available (although it is not an official package for now): <https://git.sr.ht/~tsileo/microblog.pub_ynh>. + +## Available tutorial/guides + + - [Opalstack](https://community.opalstack.com/d/1055-howto-install-and-run-microblogpub-on-opalstack), thanks to [@defulmere@mastodon.social](https://mastodon.online/@defulmere). diff --git a/docs/static/codehilite.css b/docs/static/codehilite.css new file mode 100644 index 0000000..9a05b3a --- /dev/null +++ b/docs/static/codehilite.css @@ -0,0 +1,74 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.codehilite .hll { background-color: #ffffcc } +.codehilite { background: #f8f8f8; } +.codehilite .c { color: #3D7B7B; font-style: italic } /* Comment */ +.codehilite .err { border: 1px solid #FF0000 } /* Error */ +.codehilite .k { color: #008000; font-weight: bold } /* Keyword */ +.codehilite .o { color: #666666 } /* Operator */ +.codehilite .ch { color: #3D7B7B; font-style: italic } /* Comment.Hashbang */ +.codehilite .cm { color: #3D7B7B; font-style: italic } /* Comment.Multiline */ +.codehilite .cp { color: #9C6500 } /* Comment.Preproc */ +.codehilite .cpf { color: #3D7B7B; font-style: italic } /* Comment.PreprocFile */ +.codehilite .c1 { color: #3D7B7B; font-style: italic } /* Comment.Single */ +.codehilite .cs { color: #3D7B7B; font-style: italic } /* Comment.Special */ +.codehilite .gd { color: #A00000 } /* Generic.Deleted */ +.codehilite .ge { font-style: italic } /* Generic.Emph */ +.codehilite .gr { color: #E40000 } /* Generic.Error */ +.codehilite .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.codehilite .gi { color: #008400 } /* Generic.Inserted */ +.codehilite .go { color: #717171 } /* Generic.Output */ +.codehilite .gp { color: #000080; font-weight: bold } /* Generic.Prompt */ +.codehilite .gs { font-weight: bold } /* Generic.Strong */ +.codehilite .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.codehilite .gt { color: #0044DD } /* Generic.Traceback */ +.codehilite .kc { color: #008000; font-weight: bold } /* Keyword.Constant */ +.codehilite .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */ +.codehilite .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */ +.codehilite .kp { color: #008000 } /* Keyword.Pseudo */ +.codehilite .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */ +.codehilite .kt { color: #B00040 } /* Keyword.Type */ +.codehilite .m { color: #666666 } /* Literal.Number */ +.codehilite .s { color: #BA2121 } /* Literal.String */ +.codehilite .na { color: #687822 } /* Name.Attribute */ +.codehilite .nb { color: #008000 } /* Name.Builtin */ +.codehilite .nc { color: #0000FF; font-weight: bold } /* Name.Class */ +.codehilite .no { color: #880000 } /* Name.Constant */ +.codehilite .nd { color: #AA22FF } /* Name.Decorator */ +.codehilite .ni { color: #717171; font-weight: bold } /* Name.Entity */ +.codehilite .ne { color: #CB3F38; font-weight: bold } /* Name.Exception */ +.codehilite .nf { color: #0000FF } /* Name.Function */ +.codehilite .nl { color: #767600 } /* Name.Label */ +.codehilite .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */ +.codehilite .nt { color: #008000; font-weight: bold } /* Name.Tag */ +.codehilite .nv { color: #19177C } /* Name.Variable */ +.codehilite .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */ +.codehilite .w { color: #bbbbbb } /* Text.Whitespace */ +.codehilite .mb { color: #666666 } /* Literal.Number.Bin */ +.codehilite .mf { color: #666666 } /* Literal.Number.Float */ +.codehilite .mh { color: #666666 } /* Literal.Number.Hex */ +.codehilite .mi { color: #666666 } /* Literal.Number.Integer */ +.codehilite .mo { color: #666666 } /* Literal.Number.Oct */ +.codehilite .sa { color: #BA2121 } /* Literal.String.Affix */ +.codehilite .sb { color: #BA2121 } /* Literal.String.Backtick */ +.codehilite .sc { color: #BA2121 } /* Literal.String.Char */ +.codehilite .dl { color: #BA2121 } /* Literal.String.Delimiter */ +.codehilite .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */ +.codehilite .s2 { color: #BA2121 } /* Literal.String.Double */ +.codehilite .se { color: #AA5D1F; font-weight: bold } /* Literal.String.Escape */ +.codehilite .sh { color: #BA2121 } /* Literal.String.Heredoc */ +.codehilite .si { color: #A45A77; font-weight: bold } /* Literal.String.Interpol */ +.codehilite .sx { color: #008000 } /* Literal.String.Other */ +.codehilite .sr { color: #A45A77 } /* Literal.String.Regex */ +.codehilite .s1 { color: #BA2121 } /* Literal.String.Single */ +.codehilite .ss { color: #19177C } /* Literal.String.Symbol */ +.codehilite .bp { color: #008000 } /* Name.Builtin.Pseudo */ +.codehilite .fm { color: #0000FF } /* Name.Function.Magic */ +.codehilite .vc { color: #19177C } /* Name.Variable.Class */ +.codehilite .vg { color: #19177C } /* Name.Variable.Global */ +.codehilite .vi { color: #19177C } /* Name.Variable.Instance */ +.codehilite .vm { color: #19177C } /* Name.Variable.Magic */ +.codehilite .il { color: #666666 } /* Literal.Number.Integer.Long */ diff --git a/docs/static/favicon.ico b/docs/static/favicon.ico new file mode 100644 index 0000000..19ac509 Binary files /dev/null and b/docs/static/favicon.ico differ diff --git a/docs/templates/layout.html b/docs/templates/layout.html new file mode 100644 index 0000000..07845e4 --- /dev/null +++ b/docs/templates/layout.html @@ -0,0 +1,109 @@ +<!DOCTYPE HTML> +<html lang="en"> +<head> +<title>microblog.pub - A self-hosted, single-user, ActivityPub powered microblog. + + + + + + + + +
+
+

microblog.pub

+

A self-hosted, single-user, ActivityPub powered microblog.

+
+ + + +
+ {{ content | safe }} +
+ +
+

Last updated {{ last_updated }} for {{ version }}

+
+
+ + diff --git a/docs/user_guide.md b/docs/user_guide.md new file mode 100644 index 0000000..d76de58 --- /dev/null +++ b/docs/user_guide.md @@ -0,0 +1,564 @@ +# User's guide + +[TOC] + +## ActivityPub + +Using microblog.pub efficiently requires knowing a bit about how [ActivityPub](https://activitypub.rocks/) works. + +Skimming over the [Overview section of the ActivityPub specification](https://www.w3.org/TR/activitypub/#Overview) should be enough. + +Also, you should know that the **Fediverse** is a common name used to describe all the interconnected/federated instances of servers supporting ActivityPub (like Mastodon, Pleroma, PeerTube, PixelFed...). + +## Configuration + +### Profile + +You initial profile configuration is generated via the setup wizard. + +You can manually edit the configuration file stored in `data/profile.toml` ([TOML](https://toml.io/en/)), note that the following config items cannot be updated (without breaking federation): + + - `domain` + - `username` + +As these two config items define your ActivityPub handle `@handle@domain`. + +You can tweak your profile by tweaking these items: + + - `name`: The name shown with your profile. + - `summary`: The summary or 'bio' part of your profile, written in Markdown. + - `icon_url`: Your profile image or avatar. + - `image_url`: This provides a 'header' or 'banner' image. Note that it is not shown by the default Microblog.pub templates. It will be used by Mastodon (which uses a 3:1 ratio image) and Pleroma. Pixelfed and Peertube, for example, don't show these images by default. + +Whenever one of these config items is updated, an `Update` activity will be sent to all known servers to update your remote profile. + +The server will need to be restarted for taking changes into account. + +Before restarting the server, you can ensure you haven't made any mistakes by running the [configuration checking task](/user_guide.html#configuration-checking). + +Note that currently `image_url` is not used anywhere in microblog.pub itself, but other clients/servers do occasionally use it when showing remote profiles as a background image. +Also, this image _can_ be used in microblog.pub - just add this: + +```html + +``` + +to an appropriate place of your template (most likely, `header.html`). +For more information, see a section about [custom templates](/user_guide.html#custom-templates) further in this document. + +### Profile metadata + +You can add metadata to your profile with the `metadata` config item. + +Markdown is supported in the `value` field. + +Be aware that most other software like Mastodon will limit the number of key/value to 4. + +```toml +metadata = [ + {key = "Documentation", value = "[https://docs.microblog.pub](https://docs.microblog.pub)"}, + {key = "Source code", value = "[https://sr.ht/~tsileo/microblog.pub/](https://sr.ht/~tsileo/microblog.pub/)"}, +] +``` + +### Manually approving followers + +If you wish to manually approve followers, add this config item to `profile.toml`: + +```toml +manually_approves_followers = true +``` + +The default value is `false`. + +### Hiding followers + +If you wish to hide your followers, add this config item to `profile.toml`: + +```toml +hides_followers = true +``` + +The default value is `false`. + +### Hiding who you are following + +If you wish to hide who you are following, add this config item to `profile.toml`: + +```toml +hides_following = true +``` + +The default value is `false`. + +### Privacy replace + +You can define domains to be rewritten to more "privacy friendly" alternatives, like [Invidious](https://invidious.io/) +or [Nitter](https://nitter.net/about). + +To do so, add these extra config items. This is a sample config that rewrite URLs for Twitter, Youtube, Reddit and Medium: + +```toml +privacy_replace = [ + {domain = "youtube.com", replace_by = "yewtu.be"}, + {domain = "youtu.be", replace_by = "yewtu.be"}, + {domain = "twitter.com", replace_by = "nitter.fdn.fr"}, + {domain = "medium.com", replace_by = "scribe.rip"}, + {domain = "reddit.com", replace_by = "teddit.net"}, +] +``` + +### Disabling certain notification types + +All notifications are enabled by default. + +You can disabled specific notifications by adding them to the `disabled_notifications` list. + +This example disables likes and shares notifications: + +``` +disabled_notifications = ["like", "announce"] +``` + +#### Available notification types + + - `new_follower` + - `rejected_follower` + - `unfollow` + - `follow_request_accepted` + - `follow_request_rejected` + - `move` + - `like` + - `undo_like` + - `announce` + - `undo_announce` + - `mention` + - `new_webmention` + - `updated_webmention` + - `deleted_webmention` + - `blocked` + - `unblocked` + - `block` + - `unblock` + +### Customization + +#### Default emoji + +If you don't like cats, or need more emoji, you can add your favorite emoji in `profile.toml` and it will replace the default ones: + +``` +emoji = "🙂🐹📌" +``` + +You can copy/paste them from [getemoji.com](https://getemoji.com/). + +#### Custom emoji + +You can add custom emoji in the `data/custom_emoji` directory and they will be picked automatically. +Do not use exotic characters in filename - only letters, numbers, and underscore symbol `_` are allowed. + +#### Custom CSS + +The CSS is written with [SCSS](https://sass-lang.com/documentation/syntax). + +You can override colors by editing `data/_theme.scss`: + +```scss +$primary-color: #e14eea; +$secondary-color: #32cd32; +``` + +See `app/scss/main.scss` to see what variables can be overridden. + +You will need to [recompile CSS](#recompiling-css-files) after doing any CSS changes (for actual css files to be updates) and restart microblog.pub (for css link in HTML documents to be updated with a new checksum - otherwise, browsers that downloaded old CSS will keep using it). + +#### Custom favicon + +By default, microblog.pub favicon is a square of `$primary-color` CSS color (see above section on how to redefine CSS colors). +You can change it to any icon you like - just save a desired file as `data/favicon.ico`. +After that, run the "[recompile CSS](#recompiling-css-files)" task to copy it to `app/static/favicon.ico`. + +#### Custom templates + +If you'd like to customize your instance's theme beyond CSS, you can modify the app's HTML by placing templates in `data/templates` which overwrite the defaults in `app/templates`. + +Templates are written using [Jinja](https://jinja.palletsprojects.com/en/latest/templates/) templating language. +Moreover, `utils.html` has scoped blocks around the body of every macro. +This allows macros to be overridden individually in `data/templates/utils.html`, without copying the whole file. +For example, to only override the display of a specific actor's name/icon, you can create `data/templates/utils.html` file with following content: + +```jinja +{% extends "app/utils.html" %} + +{% block display_actor %} + {% if actor.ap_id == "https://me.example.com" %} + + {% else %} + {{ super() }} + {% endif %} +{% endblock %} +``` + +#### Custom Content Security Policy (CSP) + +You can override the default Content Security Policy by adding a line in `data/profile.toml`: + +```toml +custom_content_security_policy = "default-src 'self'; style-src 'self' 'sha256-{HIGHLIGHT_CSS_HASH}'; frame-ancestors 'none'; base-uri 'self'; form-action 'self';" +``` + +This example will output the default CSP, note that `{HIGHLIGHT_CSS_HASH}` will be dynamically replaced by the correct value (the hash of the CSS needed for syntax highlighting). + +#### Code highlighting theme + +You can switch to one of the [styles supported by Pygments](https://pygments.org/styles/) by adding a line in `data/profile.toml`: + +```toml +code_highlighting_theme = "solarized-dark" +``` + +### Blocking servers + +In addition to blocking "single actors" via the admin interface, you can also prevent any communication with entire servers. + +Add a `blocked_servers` config item into `profile.toml`. + +The `reason` field is just there to help you document/remember why a server was blocked. + +You should unfollow any account from a server before blocking it. + +```toml +blocked_servers = [ + {hostname = "bad.tld", reason = "Bot spam"}, +] +``` + +## Public website + +Public notes will be visible on the homepage. + +Only the last 20 followers/follows you have will be shown on the public website. + +And only the last 20 interactions (likes/shares/webmentions) will be displayed, to keep things simple/clean. + +## Admin section + +You can login to the admin section by clicking on the `Admin` link in the footer or by visiting `https://yourdomain.tld/admin/login`. +The password is the one set during the initial configuration. + +### Lookup + +The `Lookup` section allows you to interact with any remote remote objects/content on the Fediverse. + +The lookup supports: + + - profile page, like `https://testing.microblog.pub` + - content page, like `https://testing.microblog.pub/o/4bccd2e31fad43a7896b5a33f0b8ded9` + - username handle like `@testing@testing.microblog.pub` + - ActivityPub ID, like `https://testing.microblog.pub/o/4bccd2e31fad43a7896b5a33f0b8ded9` + +## Authoring notes + +Notes are authored in [Markdown](https://commonmark.org/). There is no imposed characters limit. + +If you fill the content warning, the note will be automatically marked as sensitive. + +You can add attachments/upload files. +When attaching pictures, EXIF metadata (like GPS location) will be removed automatically before being stored. + +Consider marking attachments as sensitive using the checkbox if needed. + +## Webmentions + +Public notes that link to "Webmention-compatible" website will trigger an outgoing webmention. +Most websites that support Webmention will display your profile on the mentioned page. + +### Fenced code blocks + +You can include code blocks in notes, using the triple backtick syntax. + +The code will be highlighted using [Pygments](https://pygments.org/). + +Example: + +~~~ +Hello + +```python +print("I will be highlighted") +``` + +~~~ + +## Interactions + +microblog.pub supports the most common interactions supported by the Fediverse. + +### Shares + +Sharing (or announcing) an object will relay it to your followers and notify the author. +It will also be displayed on the homepage. + +Most receiving servers will increment the number of shares. + +Receiving a share will trigger a notification, increment the shares counter on the object and the actor avatar will be displayed on the object permalink. + +### Likes + +Liking an object will notify the author. + +Unlike sharing, liked objects are not displayed on the homepage. + +Most receiving servers will increment the number of likes. + +Receiving a like will trigger a notification, increment the likes counter on the object and the actor avatar will be displayed on the object permalink. + +### Bookmarks + +Bookmarks allow you to like objects without notifying the author. + +It is basically a "private like", and allows you to easily access them later. + +It will also prevent objects to be pruned. + +### Webmentions + +Sending webmentions to ping mentioned websites is done automatically once a public note is authored. + +Receiving a webmention will trigger a notification, increment the webmentions counter on the object and the source page will be displayed on the object permalink. + +## Backup and restore + +All the data generated by the server is located in the `data/` directory: + + - Configuration files + - Server secrets + - SQLite3 database + - Theme modifications + - Custom emoji + - Uploaded media + +Restoring is as easy as adding your backed up `data/` directory into a fresh deployment. + +## Moving from another instance + +If you want to move followers from your existing account, ensure it is supported in your software documentation. + +For [Mastodon you can look at Moving or leaving accounts](https://docs.joinmastodon.org/user/moving/). + +If you wish to move **to** another instance, see [Moving to another instance](/user_guide.html#moving-to-another-instance). + +First you need to grab the "ActivityPub actor URL" for your existing account: + +### Python edition + +```bash +# For a Python install +poetry run inv webfinger username@instance-you-want-to-move-from.tld +``` + +Edit the config. + +### Docker edition + +```bash +# For a Docker install +make account=username@instance-you-want-to-move-from.tld webfinger +``` + +Edit the config. + +### Edit the config + +And add a reference to your old/existing account in `profile.toml`: + +```toml +also_known_as = "https://instance-you-want-to-move-form.tld/users/username" +``` + +Restart the server, and you should be able to complete the move from your existing account. + +Note that if you already have a redirect in place on Mastodon, you may have to remove it before initiating the migration. + +## Import follows from Mastodon + +You can import the list of follows/following accounts from Mastodon. + +It requires downloading the "Follows" CSV file from your Mastodon instance via "Settings" / "Import and export" / "Data export". + +Then you need to run the import task: + +### Python edition + +```bash +# For a Python install +poetry run inv import-mastodon-following-accounts following_accounts.csv +``` + +### Docker edition + +```bash +# For a Docker install +make path=following_accounts.csv import-mastodon-following-accounts +``` + +## Tasks + +### Configuration checking + +You can confirm that your configuration file (`data/profile.toml`) is valid using the `check-config` + +#### Python edition + +```bash +poetry run inv check-config +``` + +#### Docker edition + +```bash +make check-config +``` + +### Recompiling CSS files + +You can ensure your custom theme is valid by recompiling the CSS manually using the `compile-scss` task. + +#### Python edition + +```bash +poetry run inv compile-scss +``` + +#### Docker edition + +```bash +make compile-scss +``` + + +### Password reset + +If have lost your password, you can generate a new one using the `reset-password` task. + +#### Python edition + +```bash +# shutdown supervisord +poetry run inv reset-password +# edit data/profile.toml +# restart supervisord +``` + +#### Docker edition + +```bash +docker compose stop +make reset-password +# edit data/profile.toml +docker compose up -d +``` + +### Pruning old data + +You should prune old data from time to time to free disk space. + +The default retention for the inbox data is 15 days. + +It's configurable via the `inbox_retention_days` config item in `profile.toml`: + +```toml +inbox_retention_days = 30 +``` + +Data owned by the server will never be deleted (at least for now), along with: + + - bookmarked objects + - liked objects + - shared objects + - inbox objects mentioning the local actor + - objects related to local conversations (i.e. direct messages, replies) + +For now, it's recommended to make a backup before running the task in case it deletes unwanted data. + +You should shutdown the server before running the task. + +#### Python edition + +```bash +# shutdown supervisord +cp -r data/microblogpub.db data/microblogpub.db.bak +poetry run inv prune-old-data +# relaunch supervisord and ensure it works as expected +rm data/microblogpub.db.bak +``` + +#### Docker edition + +```bash +docker compose stop +cp -r data/microblogpub.db data/microblogpub.db.bak +make prune-old-data +docker compose up -d +rm data/microblogpub.db.bak +``` + +### Moving to another instance + +If you want to migrate to another instance, you have the ability to move your existing followers to your new account. + +Your new account should reference the existing one, refer to your software configuration (for example [Moving or leaving accounts from the Mastodon doc](https://docs.joinmastodon.org/user/moving/)). + +If you wish to move **from** another instance, see [Moving from another instance](/user_guide.html#moving-from-another-instance). + +Execute the Move task: + +#### Python edition + +```bash +# For a Python install +poetry run inv move-to username@domain.tld +``` + +#### Docker edition + +```bash +# For a Docker install +make account=username@domain.tld move-to +``` + +### Deleting the instance + +If you want to delete your instance, you can request other instances to delete your remote profile. + +Note that this is a best-effort delete as some instances may not delete your data. + +The command won't remove any local data, it just broadcasts account deletion messages to all known servers. + +After executing the command, you should let the server run until all the outgoing delete tasks are sent. + +Once deleted, you won't be able to use your instance anymore, but you will be able to perform a fresh re-install of any ActivityPub software. + +#### Python edition + +```bash +# For a Python install +poetry run inv self-destruct +``` + +#### Docker edition + +```bash +# For a Docker install +make self-destruct +``` + +## Troubleshooting + +If the server is not (re)starting, you can: + + - [Ensure that the configuration is valid](/user_guide.html#configuration-checking). + - [Verify if you haven't any syntax error in the custom theme by recompiling the CSS](/user_guide.html#recompiling-css-files). + - Look at the log files (in `data/uvicorn.log`, `data/incoming.log` and `data/outgoing.log`). + - If the CSS is not working, ensure your reverse proxy is serving the static file correctly. diff --git a/misc/docker-supervisord.conf b/misc/docker-supervisord.conf new file mode 100644 index 0000000..f10cb9a --- /dev/null +++ b/misc/docker-supervisord.conf @@ -0,0 +1,29 @@ +[supervisord] +nodaemon=true +logfile=/dev/null +logfile_maxbytes=0 +pidfile=data/supervisord.pid + +[program:uvicorn] +command=uvicorn app.main:app --no-server-header --host 0.0.0.0 +numprocs=1 +autorestart=true +redirect_stderr=true +stdout_logfile=data/uvicorn.log +stdout_logfile_maxbytes=50MB + +[program:incoming_worker] +command=inv process-incoming-activities +numproc=1 +autorestart=true +redirect_stderr=true +stdout_logfile=data/incoming.log +stdout_logfile_maxbytes=50MB + +[program:outgoing_worker] +command=inv process-outgoing-activities +numproc=1 +autorestart=true +redirect_stderr=true +stdout_logfile=data/outgoing.log +stdout_logfile_maxbytes=50MB diff --git a/misc/docker_start.sh b/misc/docker_start.sh new file mode 100755 index 0000000..438e87e --- /dev/null +++ b/misc/docker_start.sh @@ -0,0 +1,3 @@ +#!/bin/sh +inv update --no-update-deps +exec supervisord -n -c misc/docker-supervisord.conf diff --git a/misc/supervisord.conf b/misc/supervisord.conf new file mode 100644 index 0000000..c3123c6 --- /dev/null +++ b/misc/supervisord.conf @@ -0,0 +1,25 @@ +[supervisord] + +[program:uvicorn] +command=%(ENV_VENV_DIR)s/bin/uvicorn app.main:app --no-server-header +numprocs=1 +autorestart=true +redirect_stderr=true +stdout_logfile=uvicorn.log +stdout_logfile_maxbytes=50MB + +[program:incoming_worker] +command=%(ENV_VENV_DIR)s/bin/inv process-incoming-activities +numproc=1 +autorestart=true +redirect_stderr=true +stdout_logfile=incoming_worker.log +stdout_logfile_maxbytes=50MB + +[program:outgoing_worker] +command=%(ENV_VENV_DIR)s/bin/inv process-outgoing-activities +numproc=1 +autorestart=true +redirect_stderr=true +stdout_logfile=outgoing_worker.log +stdout_logfile_maxbytes=50MB diff --git a/misc/ynh-supervisord.conf b/misc/ynh-supervisord.conf new file mode 100644 index 0000000..ce19ca6 --- /dev/null +++ b/misc/ynh-supervisord.conf @@ -0,0 +1,26 @@ +[supervisord] + +[program:uvicorn] +command=%(ENV_VENV_DIR)s/bin/uvicorn app.main:app --no-server-header +numprocs=1 +autorestart=true +process_name=uvicorn-%(process_num)d +redirect_stderr=true +stdout_logfile=%(ENV_LOG_PATH)s/uvicorn.log +stdout_logfile_maxbytes=0 + +[program:incoming_worker] +command=%(ENV_VENV_DIR)s/bin/inv process-incoming-activities +numproc=1 +autorestart=true +redirect_stderr=true +stdout_logfile=%(ENV_LOG_PATH)s/incoming.log +stdout_logfile_maxbytes=0 + +[program:outgoing_worker] +command=%(ENV_VENV_DIR)s/bin/inv process-outgoing-activities +numproc=1 +autorestart=true +redirect_stderr=true +stdout_logfile=%(ENV_LOG_PATH)s/outgoing.log +stdout_logfile_maxbytes=0 diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..6c1603e --- /dev/null +++ b/poetry.lock @@ -0,0 +1,2464 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "aiosqlite" +version = "0.17.0" +description = "asyncio bridge to the standard sqlite3 module" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aiosqlite-0.17.0-py3-none-any.whl", hash = "sha256:6c49dc6d3405929b1d08eeccc72306d3677503cc5e5e43771efc1e00232e8231"}, + {file = "aiosqlite-0.17.0.tar.gz", hash = "sha256:f0e6acc24bc4864149267ac82fb46dfb3be4455f99fe21df82609cc6e6baee51"}, +] + +[package.dependencies] +typing_extensions = ">=3.7.2" + +[[package]] +name = "alembic" +version = "1.11.1" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.7" +files = [ + {file = "alembic-1.11.1-py3-none-any.whl", hash = "sha256:dc871798a601fab38332e38d6ddb38d5e734f60034baeb8e2db5b642fccd8ab8"}, + {file = "alembic-1.11.1.tar.gz", hash = "sha256:6a810a6b012c88b33458fceb869aef09ac75d6ace5291915ba7fae44de372c01"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["python-dateutil"] + +[[package]] +name = "anyio" +version = "3.7.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.7" +files = [ + {file = "anyio-3.7.0-py3-none-any.whl", hash = "sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0"}, + {file = "anyio-3.7.0.tar.gz", hash = "sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce"}, +] + +[package.dependencies] +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=6.1.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme", "sphinxcontrib-jquery"] +test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (<0.22)"] + +[[package]] +name = "asgiref" +version = "3.7.2" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.7" +files = [ + {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, + {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + +[[package]] +name = "bcrypt" +version = "3.2.2" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.6" +files = [ + {file = "bcrypt-3.2.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:7180d98a96f00b1050e93f5b0f556e658605dd9f524d0b0e68ae7944673f525e"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:61bae49580dce88095d669226d5076d0b9d927754cedbdf76c6c9f5099ad6f26"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88273d806ab3a50d06bc6a2fc7c87d737dd669b76ad955f449c43095389bc8fb"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6d2cb9d969bfca5bc08e45864137276e4c3d3d7de2b162171def3d188bf9d34a"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b02d6bfc6336d1094276f3f588aa1225a598e27f8e3388f4db9948cb707b521"}, + {file = "bcrypt-3.2.2-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a2c46100e315c3a5b90fdc53e429c006c5f962529bc27e1dfd656292c20ccc40"}, + {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7d9ba2e41e330d2af4af6b1b6ec9e6128e91343d0b4afb9282e54e5508f31baa"}, + {file = "bcrypt-3.2.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cd43303d6b8a165c29ec6756afd169faba9396a9472cdff753fe9f19b96ce2fa"}, + {file = "bcrypt-3.2.2-cp36-abi3-win32.whl", hash = "sha256:4e029cef560967fb0cf4a802bcf4d562d3d6b4b1bf81de5ec1abbe0f1adb027e"}, + {file = "bcrypt-3.2.2-cp36-abi3-win_amd64.whl", hash = "sha256:7ff2069240c6bbe49109fe84ca80508773a904f5a8cb960e02a977f7f519b129"}, + {file = "bcrypt-3.2.2.tar.gz", hash = "sha256:433c410c2177057705da2a9f2cd01dd157493b2a7ac14c8593a16b3dab6b6bfb"}, +] + +[package.dependencies] +cffi = ">=1.1" + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "beautifulsoup4" +version = "4.12.2" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, + {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "black" +version = "22.12.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.7" +files = [ + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "bleach" +version = "5.0.1" +description = "An easy safelist-based HTML-sanitizing tool." +optional = false +python-versions = ">=3.7" +files = [ + {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"}, + {file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"}, +] + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.2)"] +dev = ["Sphinx (==4.3.2)", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "mypy (==0.961)", "pip-tools (==6.6.2)", "pytest (==7.1.2)", "tox (==3.25.0)", "twine (==4.0.1)", "wheel (==0.37.1)"] + +[[package]] +name = "blurhash-python" +version = "1.2.0" +description = "BlurHash encoder implementation for Python" +optional = false +python-versions = "*" +files = [ + {file = "blurhash-python-1.2.0.tar.gz", hash = "sha256:4caf57039c386ef53fb17598cd90a5c3eca1c76f529c35b4d67166070b9d2de2"}, + {file = "blurhash_python-1.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecbdd7c4f67787e23ce906940024e64cacc872a21a5e9eb60bcd94bfdb2c5c13"}, + {file = "blurhash_python-1.2.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:82210d27e744a1aab3fac59f95f34b9df50aaf300cf132d72588588ce137c746"}, + {file = "blurhash_python-1.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7631977ab489eb8d4338347e3d954aca80e2922f21a66c65b12cc1f4e130da71"}, + {file = "blurhash_python-1.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b154189c9eb34f72696d724ffd317c6d27f1f68d47e3752b88c3a89ccc44b13f"}, + {file = "blurhash_python-1.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c44df545af61fc8bb6a16723329a08a47f625278e17cfae6e81487c6d1903ffd"}, +] + +[package.dependencies] +cffi = "*" +Pillow = "*" +six = "*" + +[package.extras] +testing = ["pytest"] + +[[package]] +name = "boussole" +version = "2.1.0" +description = "Commandline interface to build Sass projects using libsass-python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "boussole-2.1.0.tar.gz", hash = "sha256:33b7b6eb5390181fb9a74dce979d984c440e1df58f26dceebf1e5cf75f284bab"}, +] + +[package.dependencies] +click = ">=5.1" +colorama = "*" +colorlog = "*" +libsass = ">=0.19.4" +pyaml = "*" +watchdog = ">=0.8.3" + +[package.extras] +dev = ["packaging", "pytest"] +doc = ["Sphinx", "livereload", "sphinx-rtd-theme"] +quality = ["flake8", "tox", "twine"] + +[[package]] +name = "brotli" +version = "1.0.9" +description = "Python bindings for the Brotli compression library" +optional = false +python-versions = "*" +files = [ + {file = "Brotli-1.0.9-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:268fe94547ba25b58ebc724680609c8ee3e5a843202e9a381f6f9c5e8bdb5c70"}, + {file = "Brotli-1.0.9-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:c2415d9d082152460f2bd4e382a1e85aed233abc92db5a3880da2257dc7daf7b"}, + {file = "Brotli-1.0.9-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5913a1177fc36e30fcf6dc868ce23b0453952c78c04c266d3149b3d39e1410d6"}, + {file = "Brotli-1.0.9-cp27-cp27m-win32.whl", hash = "sha256:afde17ae04d90fbe53afb628f7f2d4ca022797aa093e809de5c3cf276f61bbfa"}, + {file = "Brotli-1.0.9-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7cb81373984cc0e4682f31bc3d6be9026006d96eecd07ea49aafb06897746452"}, + {file = "Brotli-1.0.9-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:db844eb158a87ccab83e868a762ea8024ae27337fc7ddcbfcddd157f841fdfe7"}, + {file = "Brotli-1.0.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9744a863b489c79a73aba014df554b0e7a0fc44ef3f8a0ef2a52919c7d155031"}, + {file = "Brotli-1.0.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a72661af47119a80d82fa583b554095308d6a4c356b2a554fdc2799bc19f2a43"}, + {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ee83d3e3a024a9618e5be64648d6d11c37047ac48adff25f12fa4226cf23d1c"}, + {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:19598ecddd8a212aedb1ffa15763dd52a388518c4550e615aed88dc3753c0f0c"}, + {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:44bb8ff420c1d19d91d79d8c3574b8954288bdff0273bf788954064d260d7ab0"}, + {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e23281b9a08ec338469268f98f194658abfb13658ee98e2b7f85ee9dd06caa91"}, + {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3496fc835370da351d37cada4cf744039616a6db7d13c430035e901443a34daa"}, + {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83bb06a0192cccf1eb8d0a28672a1b79c74c3a8a5f2619625aeb6f28b3a82bb"}, + {file = "Brotli-1.0.9-cp310-cp310-win32.whl", hash = "sha256:26d168aac4aaec9a4394221240e8a5436b5634adc3cd1cdf637f6645cecbf181"}, + {file = "Brotli-1.0.9-cp310-cp310-win_amd64.whl", hash = "sha256:622a231b08899c864eb87e85f81c75e7b9ce05b001e59bbfbf43d4a71f5f32b2"}, + {file = "Brotli-1.0.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cc0283a406774f465fb45ec7efb66857c09ffefbe49ec20b7882eff6d3c86d3a"}, + {file = "Brotli-1.0.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:11d3283d89af7033236fa4e73ec2cbe743d4f6a81d41bd234f24bf63dde979df"}, + {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c1306004d49b84bd0c4f90457c6f57ad109f5cc6067a9664e12b7b79a9948ad"}, + {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1375b5d17d6145c798661b67e4ae9d5496920d9265e2f00f1c2c0b5ae91fbde"}, + {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cab1b5964b39607a66adbba01f1c12df2e55ac36c81ec6ed44f2fca44178bf1a"}, + {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ed6a5b3d23ecc00ea02e1ed8e0ff9a08f4fc87a1f58a2530e71c0f48adf882f"}, + {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cb02ed34557afde2d2da68194d12f5719ee96cfb2eacc886352cb73e3808fc5d"}, + {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b3523f51818e8f16599613edddb1ff924eeb4b53ab7e7197f85cbc321cdca32f"}, + {file = "Brotli-1.0.9-cp311-cp311-win32.whl", hash = "sha256:ba72d37e2a924717990f4d7482e8ac88e2ef43fb95491eb6e0d124d77d2a150d"}, + {file = "Brotli-1.0.9-cp311-cp311-win_amd64.whl", hash = "sha256:3ffaadcaeafe9d30a7e4e1e97ad727e4f5610b9fa2f7551998471e3736738679"}, + {file = "Brotli-1.0.9-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4"}, + {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:6b2ae9f5f67f89aade1fab0f7fd8f2832501311c363a21579d02defa844d9296"}, + {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430"}, + {file = "Brotli-1.0.9-cp35-cp35m-win32.whl", hash = "sha256:defed7ea5f218a9f2336301e6fd379f55c655bea65ba2476346340a0ce6f74a1"}, + {file = "Brotli-1.0.9-cp35-cp35m-win_amd64.whl", hash = "sha256:88c63a1b55f352b02c6ffd24b15ead9fc0e8bf781dbe070213039324922a2eea"}, + {file = "Brotli-1.0.9-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:503fa6af7da9f4b5780bb7e4cbe0c639b010f12be85d02c99452825dd0feef3f"}, + {file = "Brotli-1.0.9-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4"}, + {file = "Brotli-1.0.9-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a"}, + {file = "Brotli-1.0.9-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87fdccbb6bb589095f413b1e05734ba492c962b4a45a13ff3408fa44ffe6479b"}, + {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:6d847b14f7ea89f6ad3c9e3901d1bc4835f6b390a9c71df999b0162d9bb1e20f"}, + {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:495ba7e49c2db22b046a53b469bbecea802efce200dffb69b93dd47397edc9b6"}, + {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:4688c1e42968ba52e57d8670ad2306fe92e0169c6f3af0089be75bbac0c64a3b"}, + {file = "Brotli-1.0.9-cp36-cp36m-win32.whl", hash = "sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14"}, + {file = "Brotli-1.0.9-cp36-cp36m-win_amd64.whl", hash = "sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c"}, + {file = "Brotli-1.0.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126"}, + {file = "Brotli-1.0.9-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d"}, + {file = "Brotli-1.0.9-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6c772d6c0a79ac0f414a9f8947cc407e119b8598de7621f39cacadae3cf57d12"}, + {file = "Brotli-1.0.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29d1d350178e5225397e28ea1b7aca3648fcbab546d20e7475805437bfb0a130"}, + {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7bbff90b63328013e1e8cb50650ae0b9bac54ffb4be6104378490193cd60f85a"}, + {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ec1947eabbaf8e0531e8e899fc1d9876c179fc518989461f5d24e2223395a9e3"}, + {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12effe280b8ebfd389022aa65114e30407540ccb89b177d3fbc9a4f177c4bd5d"}, + {file = "Brotli-1.0.9-cp37-cp37m-win32.whl", hash = "sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1"}, + {file = "Brotli-1.0.9-cp37-cp37m-win_amd64.whl", hash = "sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5"}, + {file = "Brotli-1.0.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e16eb9541f3dd1a3e92b89005e37b1257b157b7256df0e36bd7b33b50be73bcb"}, + {file = "Brotli-1.0.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:160c78292e98d21e73a4cc7f76a234390e516afcd982fa17e1422f7c6a9ce9c8"}, + {file = "Brotli-1.0.9-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb"}, + {file = "Brotli-1.0.9-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5b6ef7d9f9c38292df3690fe3e302b5b530999fa90014853dcd0d6902fb59f26"}, + {file = "Brotli-1.0.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a674ac10e0a87b683f4fa2b6fa41090edfd686a6524bd8dedbd6138b309175c"}, + {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e2d9e1cbc1b25e22000328702b014227737756f4b5bf5c485ac1d8091ada078b"}, + {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b336c5e9cf03c7be40c47b5fd694c43c9f1358a80ba384a21969e0b4e66a9b17"}, + {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:85f7912459c67eaab2fb854ed2bc1cc25772b300545fe7ed2dc03954da638649"}, + {file = "Brotli-1.0.9-cp38-cp38-win32.whl", hash = "sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429"}, + {file = "Brotli-1.0.9-cp38-cp38-win_amd64.whl", hash = "sha256:269a5743a393c65db46a7bb982644c67ecba4b8d91b392403ad8a861ba6f495f"}, + {file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2aad0e0baa04517741c9bb5b07586c642302e5fb3e75319cb62087bd0995ab19"}, + {file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5cb1e18167792d7d21e21365d7650b72d5081ed476123ff7b8cac7f45189c0c7"}, + {file = "Brotli-1.0.9-cp39-cp39-manylinux1_i686.whl", hash = "sha256:16d528a45c2e1909c2798f27f7bf0a3feec1dc9e50948e738b961618e38b6a7b"}, + {file = "Brotli-1.0.9-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389"}, + {file = "Brotli-1.0.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bf919756d25e4114ace16a8ce91eb340eb57a08e2c6950c3cebcbe3dff2a5e7"}, + {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e4c4e92c14a57c9bd4cb4be678c25369bf7a092d55fd0866f759e425b9660806"}, + {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e48f4234f2469ed012a98f4b7874e7f7e173c167bed4934912a29e03167cf6b1"}, + {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9ed4c92a0665002ff8ea852353aeb60d9141eb04109e88928026d3c8a9e5433c"}, + {file = "Brotli-1.0.9-cp39-cp39-win32.whl", hash = "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3"}, + {file = "Brotli-1.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761"}, + {file = "Brotli-1.0.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9749a124280a0ada4187a6cfd1ffd35c350fb3af79c706589d98e088c5044267"}, + {file = "Brotli-1.0.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:73fd30d4ce0ea48010564ccee1a26bfe39323fde05cb34b5863455629db61dc7"}, + {file = "Brotli-1.0.9-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02177603aaca36e1fd21b091cb742bb3b305a569e2402f1ca38af471777fb019"}, + {file = "Brotli-1.0.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:76ffebb907bec09ff511bb3acc077695e2c32bc2142819491579a695f77ffd4d"}, + {file = "Brotli-1.0.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b43775532a5904bc938f9c15b77c613cb6ad6fb30990f3b0afaea82797a402d8"}, + {file = "Brotli-1.0.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5bf37a08493232fbb0f8229f1824b366c2fc1d02d64e7e918af40acd15f3e337"}, + {file = "Brotli-1.0.9-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:330e3f10cd01da535c70d09c4283ba2df5fb78e915bea0a28becad6e2ac010be"}, + {file = "Brotli-1.0.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e1abbeef02962596548382e393f56e4c94acd286bd0c5afba756cffc33670e8a"}, + {file = "Brotli-1.0.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3148362937217b7072cf80a2dcc007f09bb5ecb96dae4617316638194113d5be"}, + {file = "Brotli-1.0.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:336b40348269f9b91268378de5ff44dc6fbaa2268194f85177b53463d313842a"}, + {file = "Brotli-1.0.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b09a16a1950b9ef495a0f8b9d0a87599a9d1f179e2d4ac014b2ec831f87e7"}, + {file = "Brotli-1.0.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c8e521a0ce7cf690ca84b8cc2272ddaf9d8a50294fd086da67e517439614c755"}, + {file = "Brotli-1.0.9.zip", hash = "sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438"}, +] + +[[package]] +name = "bs4" +version = "0.0.1" +description = "Dummy package for Beautiful Soup" +optional = false +python-versions = "*" +files = [ + {file = "bs4-0.0.1.tar.gz", hash = "sha256:36ecea1fd7cc5c0c6e4a1ff075df26d50da647b75376626cc186e2212886dd3a"}, +] + +[package.dependencies] +beautifulsoup4 = "*" + +[[package]] +name = "cachetools" +version = "5.3.1" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, + {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, +] + +[[package]] +name = "certifi" +version = "2023.5.7" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, +] + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.1.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, +] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "colorlog" +version = "6.7.0" +description = "Add colours to the output of Python's logging module." +optional = false +python-versions = ">=3.6" +files = [ + {file = "colorlog-6.7.0-py2.py3-none-any.whl", hash = "sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662"}, + {file = "colorlog-6.7.0.tar.gz", hash = "sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + +[[package]] +name = "emoji" +version = "1.7.0" +description = "Emoji for Python" +optional = false +python-versions = "*" +files = [ + {file = "emoji-1.7.0.tar.gz", hash = "sha256:65c54533ea3c78f30d0729288998715f418d7467de89ec258a31c0ce8660a1d1"}, +] + +[package.extras] +dev = ["coverage", "coveralls", "pytest"] + +[[package]] +name = "exceptiongroup" +version = "1.1.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, + {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "factory-boy" +version = "3.2.1" +description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." +optional = false +python-versions = ">=3.6" +files = [ + {file = "factory_boy-3.2.1-py2.py3-none-any.whl", hash = "sha256:eb02a7dd1b577ef606b75a253b9818e6f9eaf996d94449c9d5ebb124f90dc795"}, + {file = "factory_boy-3.2.1.tar.gz", hash = "sha256:a98d277b0c047c75eb6e4ab8508a7f81fb03d2cb21986f627913546ef7a2a55e"}, +] + +[package.dependencies] +Faker = ">=0.7.0" + +[package.extras] +dev = ["Django", "Pillow", "SQLAlchemy", "coverage", "flake8", "isort", "mongoengine", "tox", "wheel (>=0.32.0)", "zest.releaser[recommended]"] +doc = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] + +[[package]] +name = "faker" +version = "18.11.2" +description = "Faker is a Python package that generates fake data for you." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Faker-18.11.2-py3-none-any.whl", hash = "sha256:21c2c29638e98502f3bba9ad6a4f07a4b09c5e2150bb491ff02411a5888f6955"}, + {file = "Faker-18.11.2.tar.gz", hash = "sha256:ec6e2824bb1d3546b36c156324b9df6bca5a3d6d03adf991e6a5586756dcab9d"}, +] + +[package.dependencies] +python-dateutil = ">=2.4" + +[[package]] +name = "fastapi" +version = "0.78.0" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "fastapi-0.78.0-py3-none-any.whl", hash = "sha256:15fcabd5c78c266fa7ae7d8de9b384bfc2375ee0503463a6febbe3bab69d6f65"}, + {file = "fastapi-0.78.0.tar.gz", hash = "sha256:3233d4a789ba018578658e2af1a4bb5e38bdd122ff722b313666a9b2c6786a83"}, +] + +[package.dependencies] +pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" +starlette = "0.19.1" + +[package.extras] +all = ["email_validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] +dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] +doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer (>=0.4.1,<0.5.0)"] +test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.3.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "email_validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "orjson (>=3.2.1,<4.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-dataclasses (==0.6.5)", "types-orjson (==3.6.2)", "types-ujson (==4.2.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] + +[[package]] +name = "feedgen" +version = "0.9.0" +description = "Feed Generator (ATOM, RSS, Podcasts)" +optional = false +python-versions = "*" +files = [ + {file = "feedgen-0.9.0.tar.gz", hash = "sha256:8e811bdbbed6570034950db23a4388453628a70e689a6e8303ccec430f5a804a"}, +] + +[package.dependencies] +lxml = "*" +python-dateutil = "*" + +[[package]] +name = "flake8" +version = "4.0.1" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.6" +files = [ + {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, + {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, +] + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.8.0,<2.9.0" +pyflakes = ">=2.4.0,<2.5.0" + +[[package]] +name = "frozendict" +version = "2.3.8" +description = "A simple immutable dictionary" +optional = false +python-versions = ">=3.6" +files = [ + {file = "frozendict-2.3.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d188d062084fba0e4bf32719ff7380b26c050b932ff164043ce82ab90587c52b"}, + {file = "frozendict-2.3.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f2a4e818ac457f6354401dcb631527af25e5a20fcfc81e6b5054b45fc245caca"}, + {file = "frozendict-2.3.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a506d807858fa961aaa5b48dab6154fdc6bd045bbe9310788bbff141bb42d13"}, + {file = "frozendict-2.3.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:750632cc890d8ee9484fe6d31b261159144b6efacc08e1317fe46accd1410373"}, + {file = "frozendict-2.3.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ee5fe2658a8ac9a57f748acaf563f6a47f80b8308cbf0a04fac0ba057d41f75"}, + {file = "frozendict-2.3.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23c4bb46e6b8246e1e7e49b5593c2bc09221db0d8f31f7c092be8dfb42b9e620"}, + {file = "frozendict-2.3.8-cp310-cp310-win_amd64.whl", hash = "sha256:c31abc8acea309b132dde441856829f6003a3d242da8b54bce4c0f2a3c8c63f0"}, + {file = "frozendict-2.3.8-cp310-cp310-win_arm64.whl", hash = "sha256:9ea5520e85447ff8d4681e181941e482662817ccba921b7cb3f87922056d892a"}, + {file = "frozendict-2.3.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f83fed36497af9562ead5e9fb8443224ba2781786bd3b92b1087cb7d0ff20135"}, + {file = "frozendict-2.3.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e27c5c1d29d0eda7979253ec88abc239da1313b38f39f4b16984db3b3e482300"}, + {file = "frozendict-2.3.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c785de7f1a13f15963945f400656b18f057c2fc76c089dacf127a2bb188c03"}, + {file = "frozendict-2.3.8-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8cf35ddd25513428ec152614def9696afb93ae5ec0eb54fa6aa6206eda77ac4c"}, + {file = "frozendict-2.3.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ffc684773de7c88724788fa9787d0016fd75830412d58acbd9ed1a04762c675b"}, + {file = "frozendict-2.3.8-cp36-cp36m-win_amd64.whl", hash = "sha256:4c258aab9c8488338634f2ec670ef049dbf0ab0e7a2fa9bc2c7b5009cb614801"}, + {file = "frozendict-2.3.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47fc26468407fdeb428cfc89495b7921419e670355c21b383765482fdf6c5c14"}, + {file = "frozendict-2.3.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ea638228692db2bf94bce40ea4b25f4077588497b516bd16576575560094bd9"}, + {file = "frozendict-2.3.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a75bf87e76c4386caecdbdd02a99e53ad43a6b5c38fb3d5a634a9fc9ce41462"}, + {file = "frozendict-2.3.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ed5a6c5c7a0f57269577c2a338a6002949aea21a23b7b7d06da7e7dced8b605b"}, + {file = "frozendict-2.3.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d086440328a465dea9bef2dbad7548d75d1a0a0d21f43a08c03e1ec79ac5240e"}, + {file = "frozendict-2.3.8-cp37-cp37m-win_amd64.whl", hash = "sha256:0bc4767e2f83db5b701c787e22380296977368b0c57e485ca71b2eedfa11c4a3"}, + {file = "frozendict-2.3.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:638cf363d3cbca31a341503cf2219eac52a5f5140449676fae3d9644cd3c5487"}, + {file = "frozendict-2.3.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b2fd8ce36277919b36e3c834d2389f3cd7ac068ae730c312671dd4439a5dd65"}, + {file = "frozendict-2.3.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3957d52f1906b0c85f641a1911d214255873f6408ab4e5ad657cc27a247fb145"}, + {file = "frozendict-2.3.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72cfe08ab8ae524e54848fa90b22d02c1b1ecfb3064438696bcaa4b953f18772"}, + {file = "frozendict-2.3.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4742e76c4111bd09198d3ab66cef94be8506212311338f9182d6ef5f5cb60493"}, + {file = "frozendict-2.3.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:313ed8d9ba6bac35d7635cd9580ee5721a0fb016f4d2d20f0efa05dbecbdb1be"}, + {file = "frozendict-2.3.8-cp38-cp38-win_amd64.whl", hash = "sha256:d3c6ce943946c2a61501c8cf116fff0892d11dd579877eb36e2aea2c27fddfef"}, + {file = "frozendict-2.3.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0f573dc4861dd7ec9e055c8cceaf45355e894e749f621f199aab7b311ac4bdb"}, + {file = "frozendict-2.3.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b3435e5f1ca5ae68a5e95e64b09d6d5c645cadd6b87569a0b3019dd248c8d00"}, + {file = "frozendict-2.3.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:145afd033ebfade28416093335261b8ec1af5cccc593482309e7add062ec8668"}, + {file = "frozendict-2.3.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da98427de26b5a2865727947480cbb53860089c4d195baa29c539da811cea617"}, + {file = "frozendict-2.3.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5e82befa7c385a668d569cebbebbdf49cee6fea4083f08e869a1b08cfb640a9f"}, + {file = "frozendict-2.3.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80abe81d36e889ceec665e06ec764a7638000fa3e7be09786ac4d3ddc64b76db"}, + {file = "frozendict-2.3.8-cp39-cp39-win_amd64.whl", hash = "sha256:8ccc94ac781710db44e142e1a11ff9b31d02c032c01c6868d51fcbef73086225"}, + {file = "frozendict-2.3.8-cp39-cp39-win_arm64.whl", hash = "sha256:e72dbc1bcc2203cef38d205f692396f5505921a5680f66aa9a7e8bb71fd38f28"}, + {file = "frozendict-2.3.8-py311-none-any.whl", hash = "sha256:ba41a7ed019bd03b62d63ed3f8dea35b8243d1936f7c9ed4b5298ca45a01928e"}, + {file = "frozendict-2.3.8.tar.gz", hash = "sha256:5526559eca8f1780a4ee5146896f59afc31435313560208dd394a3a5e537d3ff"}, +] + +[[package]] +name = "greenlet" +version = "1.1.3.post0" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "greenlet-1.1.3.post0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:949c9061b8c6d3e6e439466a9be1e787208dec6246f4ec5fffe9677b4c19fcc3"}, + {file = "greenlet-1.1.3.post0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:d7815e1519a8361c5ea2a7a5864945906f8e386fa1bc26797b4d443ab11a4589"}, + {file = "greenlet-1.1.3.post0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9649891ab4153f217f319914455ccf0b86986b55fc0573ce803eb998ad7d6854"}, + {file = "greenlet-1.1.3.post0-cp27-cp27m-win32.whl", hash = "sha256:11fc7692d95cc7a6a8447bb160d98671ab291e0a8ea90572d582d57361360f05"}, + {file = "greenlet-1.1.3.post0-cp27-cp27m-win_amd64.whl", hash = "sha256:05ae7383f968bba4211b1fbfc90158f8e3da86804878442b4fb6c16ccbcaa519"}, + {file = "greenlet-1.1.3.post0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ccbe7129a282ec5797df0451ca1802f11578be018a32979131065565da89b392"}, + {file = "greenlet-1.1.3.post0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a8b58232f5b72973350c2b917ea3df0bebd07c3c82a0a0e34775fc2c1f857e9"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:f6661b58412879a2aa099abb26d3c93e91dedaba55a6394d1fb1512a77e85de9"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c6e942ca9835c0b97814d14f78da453241837419e0d26f7403058e8db3e38f8"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a812df7282a8fc717eafd487fccc5ba40ea83bb5b13eb3c90c446d88dbdfd2be"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a7a6560df073ec9de2b7cb685b199dfd12519bc0020c62db9d1bb522f989fa"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:17a69967561269b691747e7f436d75a4def47e5efcbc3c573180fc828e176d80"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:60839ab4ea7de6139a3be35b77e22e0398c270020050458b3d25db4c7c394df5"}, + {file = "greenlet-1.1.3.post0-cp310-cp310-win_amd64.whl", hash = "sha256:8926a78192b8b73c936f3e87929931455a6a6c6c385448a07b9f7d1072c19ff3"}, + {file = "greenlet-1.1.3.post0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:c6f90234e4438062d6d09f7d667f79edcc7c5e354ba3a145ff98176f974b8132"}, + {file = "greenlet-1.1.3.post0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:814f26b864ed2230d3a7efe0336f5766ad012f94aad6ba43a7c54ca88dd77cba"}, + {file = "greenlet-1.1.3.post0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fda1139d87ce5f7bd80e80e54f9f2c6fe2f47983f1a6f128c47bf310197deb6"}, + {file = "greenlet-1.1.3.post0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0643250dd0756f4960633f5359884f609a234d4066686754e834073d84e9b51"}, + {file = "greenlet-1.1.3.post0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cb863057bed786f6622982fb8b2c122c68e6e9eddccaa9fa98fd937e45ee6c4f"}, + {file = "greenlet-1.1.3.post0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8c0581077cf2734569f3e500fab09c0ff6a2ab99b1afcacbad09b3c2843ae743"}, + {file = "greenlet-1.1.3.post0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:695d0d8b5ae42c800f1763c9fce9d7b94ae3b878919379150ee5ba458a460d57"}, + {file = "greenlet-1.1.3.post0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5662492df0588a51d5690f6578f3bbbd803e7f8d99a99f3bf6128a401be9c269"}, + {file = "greenlet-1.1.3.post0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:bffba15cff4802ff493d6edcf20d7f94ab1c2aee7cfc1e1c7627c05f1102eee8"}, + {file = "greenlet-1.1.3.post0-cp35-cp35m-win32.whl", hash = "sha256:7afa706510ab079fd6d039cc6e369d4535a48e202d042c32e2097f030a16450f"}, + {file = "greenlet-1.1.3.post0-cp35-cp35m-win_amd64.whl", hash = "sha256:3a24f3213579dc8459e485e333330a921f579543a5214dbc935bc0763474ece3"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:64e10f303ea354500c927da5b59c3802196a07468332d292aef9ddaca08d03dd"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:eb6ac495dccb1520667cfea50d89e26f9ffb49fa28496dea2b95720d8b45eb54"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:88720794390002b0c8fa29e9602b395093a9a766b229a847e8d88349e418b28a"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39464518a2abe9c505a727af7c0b4efff2cf242aa168be5f0daa47649f4d7ca8"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0914f02fcaa8f84f13b2df4a81645d9e82de21ed95633765dd5cc4d3af9d7403"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96656c5f7c95fc02c36d4f6ef32f4e94bb0b6b36e6a002c21c39785a4eec5f5d"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4f74aa0092602da2069df0bc6553919a15169d77bcdab52a21f8c5242898f519"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:3aeac044c324c1a4027dca0cde550bd83a0c0fbff7ef2c98df9e718a5086c194"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-win32.whl", hash = "sha256:fe7c51f8a2ab616cb34bc33d810c887e89117771028e1e3d3b77ca25ddeace04"}, + {file = "greenlet-1.1.3.post0-cp36-cp36m-win_amd64.whl", hash = "sha256:70048d7b2c07c5eadf8393e6398595591df5f59a2f26abc2f81abca09610492f"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:66aa4e9a726b70bcbfcc446b7ba89c8cec40f405e51422c39f42dfa206a96a05"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:025b8de2273d2809f027d347aa2541651d2e15d593bbce0d5f502ca438c54136"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:82a38d7d2077128a017094aff334e67e26194f46bd709f9dcdacbf3835d47ef5"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7d20c3267385236b4ce54575cc8e9f43e7673fc761b069c820097092e318e3b"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8ece5d1a99a2adcb38f69af2f07d96fb615415d32820108cd340361f590d128"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2794eef1b04b5ba8948c72cc606aab62ac4b0c538b14806d9c0d88afd0576d6b"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a8d24eb5cb67996fb84633fdc96dbc04f2d8b12bfcb20ab3222d6be271616b67"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0120a879aa2b1ac5118bce959ea2492ba18783f65ea15821680a256dfad04754"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-win32.whl", hash = "sha256:bef49c07fcb411c942da6ee7d7ea37430f830c482bf6e4b72d92fd506dd3a427"}, + {file = "greenlet-1.1.3.post0-cp37-cp37m-win_amd64.whl", hash = "sha256:62723e7eb85fa52e536e516ee2ac91433c7bb60d51099293671815ff49ed1c21"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d25cdedd72aa2271b984af54294e9527306966ec18963fd032cc851a725ddc1b"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:924df1e7e5db27d19b1359dc7d052a917529c95ba5b8b62f4af611176da7c8ad"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ec615d2912b9ad807afd3be80bf32711c0ff9c2b00aa004a45fd5d5dde7853d9"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0971d37ae0eaf42344e8610d340aa0ad3d06cd2eee381891a10fe771879791f9"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:325f272eb997916b4a3fc1fea7313a8adb760934c2140ce13a2117e1b0a8095d"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75afcbb214d429dacdf75e03a1d6d6c5bd1fa9c35e360df8ea5b6270fb2211c"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5c2d21c2b768d8c86ad935e404cc78c30d53dea009609c3ef3a9d49970c864b5"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:467b73ce5dcd89e381292fb4314aede9b12906c18fab903f995b86034d96d5c8"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-win32.whl", hash = "sha256:8149a6865b14c33be7ae760bcdb73548bb01e8e47ae15e013bf7ef9290ca309a"}, + {file = "greenlet-1.1.3.post0-cp38-cp38-win_amd64.whl", hash = "sha256:104f29dd822be678ef6b16bf0035dcd43206a8a48668a6cae4d2fe9c7a7abdeb"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:c8c9301e3274276d3d20ab6335aa7c5d9e5da2009cccb01127bddb5c951f8870"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8415239c68b2ec9de10a5adf1130ee9cb0ebd3e19573c55ba160ff0ca809e012"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:3c22998bfef3fcc1b15694818fc9b1b87c6cc8398198b96b6d355a7bcb8c934e"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa1845944e62f358d63fcc911ad3b415f585612946b8edc824825929b40e59e"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:890f633dc8cb307761ec566bc0b4e350a93ddd77dc172839be122be12bae3e10"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cf37343e43404699d58808e51f347f57efd3010cc7cee134cdb9141bd1ad9ea"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5edf75e7fcfa9725064ae0d8407c849456553a181ebefedb7606bac19aa1478b"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a954002064ee919b444b19c1185e8cce307a1f20600f47d6f4b6d336972c809"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-win32.whl", hash = "sha256:2ccdc818cc106cc238ff7eba0d71b9c77be868fdca31d6c3b1347a54c9b187b2"}, + {file = "greenlet-1.1.3.post0-cp39-cp39-win_amd64.whl", hash = "sha256:91a84faf718e6f8b888ca63d0b2d6d185c8e2a198d2a7322d75c303e7097c8b7"}, + {file = "greenlet-1.1.3.post0.tar.gz", hash = "sha256:f5e09dc5c6e1796969fd4b775ea1417d70e49a5df29aaa8e5d10675d9e11872c"}, +] + +[package.extras] +docs = ["Sphinx"] + +[[package]] +name = "h11" +version = "0.12.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.6" +files = [ + {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, + {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, +] + +[[package]] +name = "h2" +version = "4.1.0" +description = "HTTP/2 State-Machine based protocol implementation" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] + +[package.dependencies] +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" + +[[package]] +name = "hpack" +version = "4.0.0" +description = "Pure-Python HPACK header compression" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] + +[[package]] +name = "html2text" +version = "2020.1.16" +description = "Turn HTML into equivalent Markdown-structured text." +optional = false +python-versions = ">=3.5" +files = [ + {file = "html2text-2020.1.16-py3-none-any.whl", hash = "sha256:c7c629882da0cf377d66f073329ccf34a12ed2adf0169b9285ae4e63ef54c82b"}, + {file = "html2text-2020.1.16.tar.gz", hash = "sha256:e296318e16b059ddb97f7a8a1d6a5c1d7af4544049a01e261731d2d5cc277bbb"}, +] + +[[package]] +name = "html5lib" +version = "1.1" +description = "HTML parser based on the WHATWG HTML specification" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, + {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, +] + +[package.dependencies] +six = ">=1.9" +webencodings = "*" + +[package.extras] +all = ["chardet (>=2.2)", "genshi", "lxml"] +chardet = ["chardet (>=2.2)"] +genshi = ["genshi"] +lxml = ["lxml"] + +[[package]] +name = "httpcore" +version = "0.15.0" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"}, + {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"}, +] + +[package.dependencies] +anyio = "==3.*" +certifi = "*" +h11 = ">=0.11,<0.13" +sniffio = "==1.*" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "httptools" +version = "0.5.0" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.5.0" +files = [ + {file = "httptools-0.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f470c79061599a126d74385623ff4744c4e0f4a0997a353a44923c0b561ee51"}, + {file = "httptools-0.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e90491a4d77d0cb82e0e7a9cb35d86284c677402e4ce7ba6b448ccc7325c5421"}, + {file = "httptools-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1d2357f791b12d86faced7b5736dea9ef4f5ecdc6c3f253e445ee82da579449"}, + {file = "httptools-0.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f90cd6fd97c9a1b7fe9215e60c3bd97336742a0857f00a4cb31547bc22560c2"}, + {file = "httptools-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5230a99e724a1bdbbf236a1b58d6e8504b912b0552721c7c6b8570925ee0ccde"}, + {file = "httptools-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a47a34f6015dd52c9eb629c0f5a8a5193e47bf2a12d9a3194d231eaf1bc451a"}, + {file = "httptools-0.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:24bb4bb8ac3882f90aa95403a1cb48465de877e2d5298ad6ddcfdebec060787d"}, + {file = "httptools-0.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e67d4f8734f8054d2c4858570cc4b233bf753f56e85217de4dfb2495904cf02e"}, + {file = "httptools-0.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7e5eefc58d20e4c2da82c78d91b2906f1a947ef42bd668db05f4ab4201a99f49"}, + {file = "httptools-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0297822cea9f90a38df29f48e40b42ac3d48a28637368f3ec6d15eebefd182f9"}, + {file = "httptools-0.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:557be7fbf2bfa4a2ec65192c254e151684545ebab45eca5d50477d562c40f986"}, + {file = "httptools-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:54465401dbbec9a6a42cf737627fb0f014d50dc7365a6b6cd57753f151a86ff0"}, + {file = "httptools-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4d9ebac23d2de960726ce45f49d70eb5466725c0087a078866043dad115f850f"}, + {file = "httptools-0.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:e8a34e4c0ab7b1ca17b8763613783e2458e77938092c18ac919420ab8655c8c1"}, + {file = "httptools-0.5.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f659d7a48401158c59933904040085c200b4be631cb5f23a7d561fbae593ec1f"}, + {file = "httptools-0.5.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef1616b3ba965cd68e6f759eeb5d34fbf596a79e84215eeceebf34ba3f61fdc7"}, + {file = "httptools-0.5.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3625a55886257755cb15194efbf209584754e31d336e09e2ffe0685a76cb4b60"}, + {file = "httptools-0.5.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:72ad589ba5e4a87e1d404cc1cb1b5780bfcb16e2aec957b88ce15fe879cc08ca"}, + {file = "httptools-0.5.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:850fec36c48df5a790aa735417dca8ce7d4b48d59b3ebd6f83e88a8125cde324"}, + {file = "httptools-0.5.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f222e1e9d3f13b68ff8a835574eda02e67277d51631d69d7cf7f8e07df678c86"}, + {file = "httptools-0.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3cb8acf8f951363b617a8420768a9f249099b92e703c052f9a51b66342eea89b"}, + {file = "httptools-0.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550059885dc9c19a072ca6d6735739d879be3b5959ec218ba3e013fd2255a11b"}, + {file = "httptools-0.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a04fe458a4597aa559b79c7f48fe3dceabef0f69f562daf5c5e926b153817281"}, + {file = "httptools-0.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d0c1044bce274ec6711f0770fd2d5544fe392591d204c68328e60a46f88843b"}, + {file = "httptools-0.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c6eeefd4435055a8ebb6c5cc36111b8591c192c56a95b45fe2af22d9881eee25"}, + {file = "httptools-0.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5b65be160adcd9de7a7e6413a4966665756e263f0d5ddeffde277ffeee0576a5"}, + {file = "httptools-0.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fe9c766a0c35b7e3d6b6939393c8dfdd5da3ac5dec7f971ec9134f284c6c36d6"}, + {file = "httptools-0.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:85b392aba273566c3d5596a0a490978c085b79700814fb22bfd537d381dd230c"}, + {file = "httptools-0.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5e3088f4ed33947e16fd865b8200f9cfae1144f41b64a8cf19b599508e096bc"}, + {file = "httptools-0.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c2a56b6aad7cc8f5551d8e04ff5a319d203f9d870398b94702300de50190f63"}, + {file = "httptools-0.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b571b281a19762adb3f48a7731f6842f920fa71108aff9be49888320ac3e24d"}, + {file = "httptools-0.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa47ffcf70ba6f7848349b8a6f9b481ee0f7637931d91a9860a1838bfc586901"}, + {file = "httptools-0.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:bede7ee075e54b9a5bde695b4fc8f569f30185891796b2e4e09e2226801d09bd"}, + {file = "httptools-0.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:64eba6f168803a7469866a9c9b5263a7463fa8b7a25b35e547492aa7322036b6"}, + {file = "httptools-0.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4b098e4bb1174096a93f48f6193e7d9aa7071506a5877da09a783509ca5fff42"}, + {file = "httptools-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9423a2de923820c7e82e18980b937893f4aa8251c43684fa1772e341f6e06887"}, + {file = "httptools-0.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca1b7becf7d9d3ccdbb2f038f665c0f4857e08e1d8481cbcc1a86a0afcfb62b2"}, + {file = "httptools-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:50d4613025f15f4b11f1c54bbed4761c0020f7f921b95143ad6d58c151198142"}, + {file = "httptools-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ffce9d81c825ac1deaa13bc9694c0562e2840a48ba21cfc9f3b4c922c16f372"}, + {file = "httptools-0.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:1af91b3650ce518d226466f30bbba5b6376dbd3ddb1b2be8b0658c6799dd450b"}, + {file = "httptools-0.5.0.tar.gz", hash = "sha256:295874861c173f9101960bba332429bb77ed4dcd8cdf5cee9922eb00e4f6bc09"}, +] + +[package.extras] +test = ["Cython (>=0.29.24,<0.30.0)"] + +[[package]] +name = "httpx" +version = "0.23.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"}, + {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"}, +] + +[package.dependencies] +certifi = "*" +h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} +httpcore = ">=0.15.0,<0.16.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<13)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "humanize" +version = "4.7.0" +description = "Python humanize utilities" +optional = false +python-versions = ">=3.8" +files = [ + {file = "humanize-4.7.0-py3-none-any.whl", hash = "sha256:df7c429c2d27372b249d3f26eb53b07b166b661326e0325793e0a988082e3889"}, + {file = "humanize-4.7.0.tar.gz", hash = "sha256:7ca0e43e870981fa684acb5b062deb307218193bca1a01f2b2676479df849b3a"}, +] + +[package.extras] +tests = ["freezegun", "pytest", "pytest-cov"] + +[[package]] +name = "hyperframe" +version = "6.0.1" +description = "HTTP/2 framing layer for Python" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "invoke" +version = "1.7.3" +description = "Pythonic task execution" +optional = false +python-versions = "*" +files = [ + {file = "invoke-1.7.3-py3-none-any.whl", hash = "sha256:d9694a865764dd3fd91f25f7e9a97fb41666e822bbb00e670091e3f43933574d"}, + {file = "invoke-1.7.3.tar.gz", hash = "sha256:41b428342d466a82135d5ab37119685a989713742be46e42a3a399d685579314"}, +] + +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "itsdangerous" +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.7" +files = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "libsass" +version = "0.21.0" +description = "Sass for Python: A straightforward binding of libsass for Python." +optional = false +python-versions = "*" +files = [ + {file = "libsass-0.21.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:06c8776417fe930714bdc930a3d7e795ae3d72be6ac883ff72a1b8f7c49e5ffb"}, + {file = "libsass-0.21.0-cp27-cp27m-win32.whl", hash = "sha256:a005f298f64624f313a3ac618ab03f844c71d84ae4f4a4aec4b68d2a4ffe75eb"}, + {file = "libsass-0.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:6b984510ed94993708c0d697b4fef2d118929bbfffc3b90037be0f5ccadf55e7"}, + {file = "libsass-0.21.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e25dd9047a9392d3c59a0b869e0404f2b325a03871ee45285ee33b3664f5613"}, + {file = "libsass-0.21.0-cp36-abi3-macosx_10_14_x86_64.whl", hash = "sha256:12f39712de38689a8b785b7db41d3ba2ea1d46f9379d81ea4595802d91fa6529"}, + {file = "libsass-0.21.0-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e2b1a7d093f2e76dc694c17c0c285e846d0b0deb0e8b21dc852ba1a3a4e2f1d6"}, + {file = "libsass-0.21.0-cp36-abi3-win32.whl", hash = "sha256:abc29357ee540849faf1383e1746d40d69ed5cb6d4c346df276b258f5aa8977a"}, + {file = "libsass-0.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:659ae41af8708681fa3ec73f47b9735a6725e71c3b66ff570bfce78952f2314e"}, + {file = "libsass-0.21.0-cp38-abi3-macosx_12_0_arm64.whl", hash = "sha256:c9ec490609752c1d81ff6290da33485aa7cb6d7365ac665b74464c1b7d97f7da"}, + {file = "libsass-0.21.0.tar.gz", hash = "sha256:d5ba529d9ce668be9380563279f3ffe988f27bc5b299c5a28453df2e0b0fbaf2"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "loguru" +version = "0.6.0" +description = "Python logging made (stupidly) simple" +optional = false +python-versions = ">=3.5" +files = [ + {file = "loguru-0.6.0-py3-none-any.whl", hash = "sha256:4e2414d534a2ab57573365b3e6d0234dfb1d84b68b7f3b948e6fb743860a77c3"}, + {file = "loguru-0.6.0.tar.gz", hash = "sha256:066bd06758d0a513e9836fd9c6b5a75bfb3fd36841f4b996bc60b547a309d41c"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["Sphinx (>=4.1.1)", "black (>=19.10b0)", "colorama (>=0.3.4)", "docutils (==0.16)", "flake8 (>=3.7.7)", "isort (>=5.1.1)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "tox (>=3.9.0)"] + +[[package]] +name = "lxml" +version = "4.9.2" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +files = [ + {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"}, + {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"}, + {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"}, + {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"}, + {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"}, + {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"}, + {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"}, + {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, + {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, + {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, + {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, + {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, + {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, + {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"}, + {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"}, + {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"}, + {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"}, + {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"}, + {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"}, + {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"}, + {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"}, + {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, + {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.7)"] + +[[package]] +name = "mako" +version = "1.2.4" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, + {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = "*" +files = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] + +[[package]] +name = "mf2py" +version = "1.1.3" +description = "Python Microformats2 parser" +optional = false +python-versions = ">=2.7" +files = [ + {file = "mf2py-1.1.3-py3-none-any.whl", hash = "sha256:8f9e2c147beadd56f8839644124c7d141d96e879319b9f50d02826c88766bf4d"}, + {file = "mf2py-1.1.3.tar.gz", hash = "sha256:4241e91ed4b644dd666d9fbd2557ed86e5bb7399c196026f7b0a1f413b33f59f"}, +] + +[package.dependencies] +BeautifulSoup4 = ">=4.6.0" +html5lib = ">=1.0.1" +requests = ">=2.18.4" + +[[package]] +name = "mistletoe" +version = "0.9.0" +description = "A fast, extensible Markdown parser in pure Python." +optional = false +python-versions = "~=3.5" +files = [ + {file = "mistletoe-0.9.0-py3-none-any.whl", hash = "sha256:11316e2fe0be422a8248293ad0efbee9ad0c6f3683b2f45bc6b989ea17a68c74"}, + {file = "mistletoe-0.9.0.tar.gz", hash = "sha256:3cb96d78226d08f0d3bf09efcaf330d23902492006e18b2c06558e8b86bf7faf"}, +] + +[[package]] +name = "mypy" +version = "0.960" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mypy-0.960-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3a3e525cd76c2c4f90f1449fd034ba21fcca68050ff7c8397bb7dd25dd8b8248"}, + {file = "mypy-0.960-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7a76dc4f91e92db119b1be293892df8379b08fd31795bb44e0ff84256d34c251"}, + {file = "mypy-0.960-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffdad80a92c100d1b0fe3d3cf1a4724136029a29afe8566404c0146747114382"}, + {file = "mypy-0.960-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7d390248ec07fa344b9f365e6ed9d205bd0205e485c555bed37c4235c868e9d5"}, + {file = "mypy-0.960-cp310-cp310-win_amd64.whl", hash = "sha256:925aa84369a07846b7f3b8556ccade1f371aa554f2bd4fb31cb97a24b73b036e"}, + {file = "mypy-0.960-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:239d6b2242d6c7f5822163ee082ef7a28ee02e7ac86c35593ef923796826a385"}, + {file = "mypy-0.960-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f1ba54d440d4feee49d8768ea952137316d454b15301c44403db3f2cb51af024"}, + {file = "mypy-0.960-cp36-cp36m-win_amd64.whl", hash = "sha256:cb7752b24528c118a7403ee955b6a578bfcf5879d5ee91790667c8ea511d2085"}, + {file = "mypy-0.960-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:826a2917c275e2ee05b7c7b736c1e6549a35b7ea5a198ca457f8c2ebea2cbecf"}, + {file = "mypy-0.960-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3eabcbd2525f295da322dff8175258f3fc4c3eb53f6d1929644ef4d99b92e72d"}, + {file = "mypy-0.960-cp37-cp37m-win_amd64.whl", hash = "sha256:f47322796c412271f5aea48381a528a613f33e0a115452d03ae35d673e6064f8"}, + {file = "mypy-0.960-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2c7f8bb9619290836a4e167e2ef1f2cf14d70e0bc36c04441e41487456561409"}, + {file = "mypy-0.960-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbfb873cf2b8d8c3c513367febde932e061a5f73f762896826ba06391d932b2a"}, + {file = "mypy-0.960-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc537885891382e08129d9862553b3d00d4be3eb15b8cae9e2466452f52b0117"}, + {file = "mypy-0.960-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:481f98c6b24383188c928f33dd2f0776690807e12e9989dd0419edd5c74aa53b"}, + {file = "mypy-0.960-cp38-cp38-win_amd64.whl", hash = "sha256:29dc94d9215c3eb80ac3c2ad29d0c22628accfb060348fd23d73abe3ace6c10d"}, + {file = "mypy-0.960-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:33d53a232bb79057f33332dbbb6393e68acbcb776d2f571ba4b1d50a2c8ba873"}, + {file = "mypy-0.960-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d645e9e7f7a5da3ec3bbcc314ebb9bb22c7ce39e70367830eb3c08d0140b9ce"}, + {file = "mypy-0.960-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:85cf2b14d32b61db24ade8ac9ae7691bdfc572a403e3cb8537da936e74713275"}, + {file = "mypy-0.960-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a85a20b43fa69efc0b955eba1db435e2ffecb1ca695fe359768e0503b91ea89f"}, + {file = "mypy-0.960-cp39-cp39-win_amd64.whl", hash = "sha256:0ebfb3f414204b98c06791af37a3a96772203da60636e2897408517fcfeee7a8"}, + {file = "mypy-0.960-py3-none-any.whl", hash = "sha256:bfd4f6536bd384c27c392a8b8f790fd0ed5c0cf2f63fc2fed7bce56751d53026"}, + {file = "mypy-0.960.tar.gz", hash = "sha256:d4fccf04c1acf750babd74252e0f2db6bd2ac3aa8fe960797d9f3ef41cf2bfd4"}, +] + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "pathspec" +version = "0.11.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, +] + +[[package]] +name = "pebble" +version = "5.0.3" +description = "Threading and multiprocessing eye-candy." +optional = false +python-versions = ">=3.6" +files = [ + {file = "Pebble-5.0.3-py3-none-any.whl", hash = "sha256:8274aa0959f387b368ede47666129cbe5d123f276a1bd9cafe77e020194b2141"}, + {file = "Pebble-5.0.3.tar.gz", hash = "sha256:bdcfd9ea7e0aedb895b204177c19e6d6543d9962f4e3402ebab2175004863da8"}, +] + +[[package]] +name = "pillow" +version = "9.5.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pillow-9.5.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:ace6ca218308447b9077c14ea4ef381ba0b67ee78d64046b3f19cf4e1139ad16"}, + {file = "Pillow-9.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3d403753c9d5adc04d4694d35cf0391f0f3d57c8e0030aac09d7678fa8030aa"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ba1b81ee69573fe7124881762bb4cd2e4b6ed9dd28c9c60a632902fe8db8b38"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe7e1c262d3392afcf5071df9afa574544f28eac825284596ac6db56e6d11062"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f36397bf3f7d7c6a3abdea815ecf6fd14e7fcd4418ab24bae01008d8d8ca15e"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:252a03f1bdddce077eff2354c3861bf437c892fb1832f75ce813ee94347aa9b5"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85ec677246533e27770b0de5cf0f9d6e4ec0c212a1f89dfc941b64b21226009d"}, + {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b416f03d37d27290cb93597335a2f85ed446731200705b22bb927405320de903"}, + {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1781a624c229cb35a2ac31cc4a77e28cafc8900733a864870c49bfeedacd106a"}, + {file = "Pillow-9.5.0-cp310-cp310-win32.whl", hash = "sha256:8507eda3cd0608a1f94f58c64817e83ec12fa93a9436938b191b80d9e4c0fc44"}, + {file = "Pillow-9.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:d3c6b54e304c60c4181da1c9dadf83e4a54fd266a99c70ba646a9baa626819eb"}, + {file = "Pillow-9.5.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:7ec6f6ce99dab90b52da21cf0dc519e21095e332ff3b399a357c187b1a5eee32"}, + {file = "Pillow-9.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:560737e70cb9c6255d6dcba3de6578a9e2ec4b573659943a5e7e4af13f298f5c"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96e88745a55b88a7c64fa49bceff363a1a27d9a64e04019c2281049444a571e3"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9c206c29b46cfd343ea7cdfe1232443072bbb270d6a46f59c259460db76779a"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcc2c53c06f2ccb8976fb5c71d448bdd0a07d26d8e07e321c103416444c7ad1"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:a0f9bb6c80e6efcde93ffc51256d5cfb2155ff8f78292f074f60f9e70b942d99"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8d935f924bbab8f0a9a28404422da8af4904e36d5c33fc6f677e4c4485515625"}, + {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fed1e1cf6a42577953abbe8e6cf2fe2f566daebde7c34724ec8803c4c0cda579"}, + {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c1170d6b195555644f0616fd6ed929dfcf6333b8675fcca044ae5ab110ded296"}, + {file = "Pillow-9.5.0-cp311-cp311-win32.whl", hash = "sha256:54f7102ad31a3de5666827526e248c3530b3a33539dbda27c6843d19d72644ec"}, + {file = "Pillow-9.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfa4561277f677ecf651e2b22dc43e8f5368b74a25a8f7d1d4a3a243e573f2d4"}, + {file = "Pillow-9.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:965e4a05ef364e7b973dd17fc765f42233415974d773e82144c9bbaaaea5d089"}, + {file = "Pillow-9.5.0-cp312-cp312-win32.whl", hash = "sha256:22baf0c3cf0c7f26e82d6e1adf118027afb325e703922c8dfc1d5d0156bb2eeb"}, + {file = "Pillow-9.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:432b975c009cf649420615388561c0ce7cc31ce9b2e374db659ee4f7d57a1f8b"}, + {file = "Pillow-9.5.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5d4ebf8e1db4441a55c509c4baa7a0587a0210f7cd25fcfe74dbbce7a4bd1906"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:375f6e5ee9620a271acb6820b3d1e94ffa8e741c0601db4c0c4d3cb0a9c224bf"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99eb6cafb6ba90e436684e08dad8be1637efb71c4f2180ee6b8f940739406e78"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfaaf10b6172697b9bceb9a3bd7b951819d1ca339a5ef294d1f1ac6d7f63270"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:763782b2e03e45e2c77d7779875f4432e25121ef002a41829d8868700d119392"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:35f6e77122a0c0762268216315bf239cf52b88865bba522999dc38f1c52b9b47"}, + {file = "Pillow-9.5.0-cp37-cp37m-win32.whl", hash = "sha256:aca1c196f407ec7cf04dcbb15d19a43c507a81f7ffc45b690899d6a76ac9fda7"}, + {file = "Pillow-9.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322724c0032af6692456cd6ed554bb85f8149214d97398bb80613b04e33769f6"}, + {file = "Pillow-9.5.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:a0aa9417994d91301056f3d0038af1199eb7adc86e646a36b9e050b06f526597"}, + {file = "Pillow-9.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f8286396b351785801a976b1e85ea88e937712ee2c3ac653710a4a57a8da5d9c"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c830a02caeb789633863b466b9de10c015bded434deb3ec87c768e53752ad22a"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbd359831c1657d69bb81f0db962905ee05e5e9451913b18b831febfe0519082"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8fc330c3370a81bbf3f88557097d1ea26cd8b019d6433aa59f71195f5ddebbf"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:7002d0797a3e4193c7cdee3198d7c14f92c0836d6b4a3f3046a64bd1ce8df2bf"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:229e2c79c00e85989a34b5981a2b67aa079fd08c903f0aaead522a1d68d79e51"}, + {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9adf58f5d64e474bed00d69bcd86ec4bcaa4123bfa70a65ce72e424bfb88ed96"}, + {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:662da1f3f89a302cc22faa9f14a262c2e3951f9dbc9617609a47521c69dd9f8f"}, + {file = "Pillow-9.5.0-cp38-cp38-win32.whl", hash = "sha256:6608ff3bf781eee0cd14d0901a2b9cc3d3834516532e3bd673a0a204dc8615fc"}, + {file = "Pillow-9.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:e49eb4e95ff6fd7c0c402508894b1ef0e01b99a44320ba7d8ecbabefddcc5569"}, + {file = "Pillow-9.5.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:482877592e927fd263028c105b36272398e3e1be3269efda09f6ba21fd83ec66"}, + {file = "Pillow-9.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3ded42b9ad70e5f1754fb7c2e2d6465a9c842e41d178f262e08b8c85ed8a1d8e"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c446d2245ba29820d405315083d55299a796695d747efceb5717a8b450324115"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aca1152d93dcc27dc55395604dcfc55bed5f25ef4c98716a928bacba90d33a3"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:608488bdcbdb4ba7837461442b90ea6f3079397ddc968c31265c1e056964f1ef"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:60037a8db8750e474af7ffc9faa9b5859e6c6d0a50e55c45576bf28be7419705"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:07999f5834bdc404c442146942a2ecadd1cb6292f5229f4ed3b31e0a108746b1"}, + {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a127ae76092974abfbfa38ca2d12cbeddcdeac0fb71f9627cc1135bedaf9d51a"}, + {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:489f8389261e5ed43ac8ff7b453162af39c3e8abd730af8363587ba64bb2e865"}, + {file = "Pillow-9.5.0-cp39-cp39-win32.whl", hash = "sha256:9b1af95c3a967bf1da94f253e56b6286b50af23392a886720f563c547e48e964"}, + {file = "Pillow-9.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:77165c4a5e7d5a284f10a6efaa39a0ae8ba839da344f20b111d62cc932fa4e5d"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:833b86a98e0ede388fa29363159c9b1a294b0905b5128baf01db683672f230f5"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaf305d6d40bd9632198c766fb64f0c1a83ca5b667f16c1e79e1661ab5060140"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0852ddb76d85f127c135b6dd1f0bb88dbb9ee990d2cd9aa9e28526c93e794fba"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:91ec6fe47b5eb5a9968c79ad9ed78c342b1f97a091677ba0e012701add857829"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb841572862f629b99725ebaec3287fc6d275be9b14443ea746c1dd325053cbd"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c380b27d041209b849ed246b111b7c166ba36d7933ec6e41175fd15ab9eb1572"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c9af5a3b406a50e313467e3565fc99929717f780164fe6fbb7704edba0cebbe"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5671583eab84af046a397d6d0ba25343c00cd50bce03787948e0fff01d4fd9b1"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:84a6f19ce086c1bf894644b43cd129702f781ba5751ca8572f08aa40ef0ab7b7"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1e7723bd90ef94eda669a3c2c19d549874dd5badaeefabefd26053304abe5799"}, + {file = "Pillow-9.5.0.tar.gz", hash = "sha256:bf548479d336726d7a0eceb6e767e179fbde37833ae42794602631a070d630f1"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "platformdirs" +version = "3.8.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.8.0-py3-none-any.whl", hash = "sha256:ca9ed98ce73076ba72e092b23d3c93ea6c4e186b3f1c3dad6edd98ff6ffcca2e"}, + {file = "platformdirs-3.8.0.tar.gz", hash = "sha256:b0cabcb11063d21a0b261d557acb0a9d2126350e63b70cdf7db6347baea456dc"}, +] + +[package.extras] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] + +[[package]] +name = "pluggy" +version = "1.2.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.38" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, + {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "pyaml" +version = "23.5.9" +description = "PyYAML-based module to produce a bit more pretty and readable YAML-serialized data" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyaml-23.5.9-py3-none-any.whl", hash = "sha256:b7fa20b43c5b6e5c8b7406a2408fe533efd65a6459feff828f918342f043ef4c"}, + {file = "pyaml-23.5.9.tar.gz", hash = "sha256:4c4b28b6fe89336000f08646f3cf1f6b68fb11e4c409626b77562e65a577273b"}, +] + +[package.dependencies] +PyYAML = "*" + +[package.extras] +anchors = ["unidecode"] + +[[package]] +name = "pycodestyle" +version = "2.8.0" +description = "Python style guide checker" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, + {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pycryptodome" +version = "3.18.0" +description = "Cryptographic library for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pycryptodome-3.18.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:d1497a8cd4728db0e0da3c304856cb37c0c4e3d0b36fcbabcc1600f18504fc54"}, + {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:928078c530da78ff08e10eb6cada6e0dff386bf3d9fa9871b4bbc9fbc1efe024"}, + {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:157c9b5ba5e21b375f052ca78152dd309a09ed04703fd3721dce3ff8ecced148"}, + {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:d20082bdac9218649f6abe0b885927be25a917e29ae0502eaf2b53f1233ce0c2"}, + {file = "pycryptodome-3.18.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:e8ad74044e5f5d2456c11ed4cfd3e34b8d4898c0cb201c4038fe41458a82ea27"}, + {file = "pycryptodome-3.18.0-cp27-cp27m-win32.whl", hash = "sha256:62a1e8847fabb5213ccde38915563140a5b338f0d0a0d363f996b51e4a6165cf"}, + {file = "pycryptodome-3.18.0-cp27-cp27m-win_amd64.whl", hash = "sha256:16bfd98dbe472c263ed2821284118d899c76968db1a6665ade0c46805e6b29a4"}, + {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7a3d22c8ee63de22336679e021c7f2386f7fc465477d59675caa0e5706387944"}, + {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:78d863476e6bad2a592645072cc489bb90320972115d8995bcfbee2f8b209918"}, + {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:b6a610f8bfe67eab980d6236fdc73bfcdae23c9ed5548192bb2d530e8a92780e"}, + {file = "pycryptodome-3.18.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:422c89fd8df8a3bee09fb8d52aaa1e996120eafa565437392b781abec2a56e14"}, + {file = "pycryptodome-3.18.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:9ad6f09f670c466aac94a40798e0e8d1ef2aa04589c29faa5b9b97566611d1d1"}, + {file = "pycryptodome-3.18.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:53aee6be8b9b6da25ccd9028caf17dcdce3604f2c7862f5167777b707fbfb6cb"}, + {file = "pycryptodome-3.18.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:10da29526a2a927c7d64b8f34592f461d92ae55fc97981aab5bbcde8cb465bb6"}, + {file = "pycryptodome-3.18.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f21efb8438971aa16924790e1c3dba3a33164eb4000106a55baaed522c261acf"}, + {file = "pycryptodome-3.18.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4944defabe2ace4803f99543445c27dd1edbe86d7d4edb87b256476a91e9ffa4"}, + {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:51eae079ddb9c5f10376b4131be9589a6554f6fd84f7f655180937f611cd99a2"}, + {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:83c75952dcf4a4cebaa850fa257d7a860644c70a7cd54262c237c9f2be26f76e"}, + {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:957b221d062d5752716923d14e0926f47670e95fead9d240fa4d4862214b9b2f"}, + {file = "pycryptodome-3.18.0-cp35-abi3-win32.whl", hash = "sha256:795bd1e4258a2c689c0b1f13ce9684fa0dd4c0e08680dcf597cf9516ed6bc0f3"}, + {file = "pycryptodome-3.18.0-cp35-abi3-win_amd64.whl", hash = "sha256:b1d9701d10303eec8d0bd33fa54d44e67b8be74ab449052a8372f12a66f93fb9"}, + {file = "pycryptodome-3.18.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:cb1be4d5af7f355e7d41d36d8eec156ef1382a88638e8032215c215b82a4b8ec"}, + {file = "pycryptodome-3.18.0-pp27-pypy_73-win32.whl", hash = "sha256:fc0a73f4db1e31d4a6d71b672a48f3af458f548059aa05e83022d5f61aac9c08"}, + {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f022a4fd2a5263a5c483a2bb165f9cb27f2be06f2f477113783efe3fe2ad887b"}, + {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:363dd6f21f848301c2dcdeb3c8ae5f0dee2286a5e952a0f04954b82076f23825"}, + {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12600268763e6fec3cefe4c2dcdf79bde08d0b6dc1813887e789e495cb9f3403"}, + {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4604816adebd4faf8810782f137f8426bf45fee97d8427fa8e1e49ea78a52e2c"}, + {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:01489bbdf709d993f3058e2996f8f40fee3f0ea4d995002e5968965fa2fe89fb"}, + {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3811e31e1ac3069988f7a1c9ee7331b942e605dfc0f27330a9ea5997e965efb2"}, + {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4b967bb11baea9128ec88c3d02f55a3e338361f5e4934f5240afcb667fdaec"}, + {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9c8eda4f260072f7dbe42f473906c659dcbadd5ae6159dfb49af4da1293ae380"}, + {file = "pycryptodome-3.18.0.tar.gz", hash = "sha256:c9adee653fc882d98956e33ca2c1fb582e23a8af7ac82fee75bd6113c55a0413"}, +] + +[[package]] +name = "pydantic" +version = "1.10.10" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:adad1ee4ab9888f12dac2529276704e719efcf472e38df7813f5284db699b4ec"}, + {file = "pydantic-1.10.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a7db03339893feef2092ff7b1afc9497beed15ebd4af84c3042a74abce02d48"}, + {file = "pydantic-1.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b3714b97ff84b2689654851c2426389bcabfac9080617bcf4306c69db606f6"}, + {file = "pydantic-1.10.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edfdf0a5abc5c9bf2052ebaec20e67abd52e92d257e4f2d30e02c354ed3e6030"}, + {file = "pydantic-1.10.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a3b30fd255eeeb63caa9483502ba96b7795ce5bf895c6a179b3d909d9f53a6"}, + {file = "pydantic-1.10.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db4c7f7e60ca6f7d6c1785070f3e5771fcb9b2d88546e334d2f2c3934d949028"}, + {file = "pydantic-1.10.10-cp310-cp310-win_amd64.whl", hash = "sha256:a2d5be50ac4a0976817144c7d653e34df2f9436d15555189f5b6f61161d64183"}, + {file = "pydantic-1.10.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:566a04ba755e8f701b074ffb134ddb4d429f75d5dced3fbd829a527aafe74c71"}, + {file = "pydantic-1.10.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f79db3652ed743309f116ba863dae0c974a41b688242482638b892246b7db21d"}, + {file = "pydantic-1.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c62376890b819bebe3c717a9ac841a532988372b7e600e76f75c9f7c128219d5"}, + {file = "pydantic-1.10.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4870f13a4fafd5bc3e93cff3169222534fad867918b188e83ee0496452978437"}, + {file = "pydantic-1.10.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:990027e77cda6072a566e433b6962ca3b96b4f3ae8bd54748e9d62a58284d9d7"}, + {file = "pydantic-1.10.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8c40964596809eb616d94f9c7944511f620a1103d63d5510440ed2908fc410af"}, + {file = "pydantic-1.10.10-cp311-cp311-win_amd64.whl", hash = "sha256:ea9eebc2ebcba3717e77cdeee3f6203ffc0e78db5f7482c68b1293e8cc156e5e"}, + {file = "pydantic-1.10.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:762aa598f79b4cac2f275d13336b2dd8662febee2a9c450a49a2ab3bec4b385f"}, + {file = "pydantic-1.10.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dab5219659f95e357d98d70577b361383057fb4414cfdb587014a5f5c595f7b"}, + {file = "pydantic-1.10.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3d4ee957a727ccb5a36f1b0a6dbd9fad5dedd2a41eada99a8df55c12896e18d"}, + {file = "pydantic-1.10.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b69f9138dec566962ec65623c9d57bee44412d2fc71065a5f3ebb3820bdeee96"}, + {file = "pydantic-1.10.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7aa75d1bd9cc275cf9782f50f60cddaf74cbaae19b6ada2a28e737edac420312"}, + {file = "pydantic-1.10.10-cp37-cp37m-win_amd64.whl", hash = "sha256:9f62a727f5c590c78c2d12fda302d1895141b767c6488fe623098f8792255fe5"}, + {file = "pydantic-1.10.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aac218feb4af73db8417ca7518fb3bade4534fcca6e3fb00f84966811dd94450"}, + {file = "pydantic-1.10.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88546dc10a40b5b52cae87d64666787aeb2878f9a9b37825aedc2f362e7ae1da"}, + {file = "pydantic-1.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c41bbaae89e32fc582448e71974de738c055aef5ab474fb25692981a08df808a"}, + {file = "pydantic-1.10.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b71bd504d1573b0b722ae536e8ffb796bedeef978979d076bf206e77dcc55a5"}, + {file = "pydantic-1.10.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e088e3865a2270ecbc369924cd7d9fbc565667d9158e7f304e4097ebb9cf98dd"}, + {file = "pydantic-1.10.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3403a090db45d4027d2344859d86eb797484dfda0706cf87af79ace6a35274ef"}, + {file = "pydantic-1.10.10-cp38-cp38-win_amd64.whl", hash = "sha256:e0014e29637125f4997c174dd6167407162d7af0da73414a9340461ea8573252"}, + {file = "pydantic-1.10.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9965e49c6905840e526e5429b09e4c154355b6ecc0a2f05492eda2928190311d"}, + {file = "pydantic-1.10.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:748d10ab6089c5d196e1c8be9de48274f71457b01e59736f7a09c9dc34f51887"}, + {file = "pydantic-1.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86936c383f7c38fd26d35107eb669c85d8f46dfceae873264d9bab46fe1c7dde"}, + {file = "pydantic-1.10.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a26841be620309a9697f5b1ffc47dce74909e350c5315ccdac7a853484d468a"}, + {file = "pydantic-1.10.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:409b810f387610cc7405ab2fa6f62bdf7ea485311845a242ebc0bd0496e7e5ac"}, + {file = "pydantic-1.10.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ce937a2a2c020bcad1c9fde02892392a1123de6dda906ddba62bfe8f3e5989a2"}, + {file = "pydantic-1.10.10-cp39-cp39-win_amd64.whl", hash = "sha256:37ebddef68370e6f26243acc94de56d291e01227a67b2ace26ea3543cf53dd5f"}, + {file = "pydantic-1.10.10-py3-none-any.whl", hash = "sha256:a5939ec826f7faec434e2d406ff5e4eaf1716eb1f247d68cd3d0b3612f7b4c8a"}, + {file = "pydantic-1.10.10.tar.gz", hash = "sha256:3b8d5bd97886f9eb59260594207c9f57dce14a6f869c6ceea90188715d29921a"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyflakes" +version = "2.4.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, + {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, +] + +[[package]] +name = "pygments" +version = "2.15.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, + {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyld" +version = "2.0.3" +description = "Python implementation of the JSON-LD API" +optional = false +python-versions = "*" +files = [ + {file = "PyLD-2.0.3.tar.gz", hash = "sha256:287445f888c3a332ccbd20a14844c66c2fcbaeab3c99acd506a0788e2ebb2f82"}, +] + +[package.dependencies] +cachetools = "*" +frozendict = "*" +lxml = "*" + +[package.extras] +aiohttp = ["aiohttp"] +cachetools = ["cachetools"] +frozendict = ["frozendict"] +requests = ["requests"] + +[[package]] +name = "pytest" +version = "7.4.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.18.3" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.18.3.tar.gz", hash = "sha256:7659bdb0a9eb9c6e3ef992eef11a2b3e69697800ad02fb06374a210d85b29f91"}, + {file = "pytest_asyncio-0.18.3-1-py3-none-any.whl", hash = "sha256:16cf40bdf2b4fb7fc8e4b82bd05ce3fbcd454cbf7b92afc445fe299dabb88213"}, + {file = "pytest_asyncio-0.18.3-py3-none-any.whl", hash = "sha256:8fafa6c52161addfd41ee7ab35f11836c5a16ec208f93ee388f752bea3493a84"}, +] + +[package.dependencies] +pytest = ">=6.1.0" + +[package.extras] +testing = ["coverage (==6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (==0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, + {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-multipart" +version = "0.0.5" +description = "A streaming multipart parser for Python" +optional = false +python-versions = "*" +files = [ + {file = "python-multipart-0.0.5.tar.gz", hash = "sha256:f7bb5f611fc600d15fa47b3974c8aa16e93724513b49b5f95c81e6624c83fa43"}, +] + +[package.dependencies] +six = ">=1.4.0" + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "respx" +version = "0.19.3" +description = "A utility for mocking out the Python HTTPX and HTTP Core libraries." +optional = false +python-versions = ">=3.6" +files = [ + {file = "respx-0.19.3-py2.py3-none-any.whl", hash = "sha256:ea3049468bfcf95c7827436230e05b85a037050dc569dbeaecdaa08fa0de5750"}, + {file = "respx-0.19.3.tar.gz", hash = "sha256:6dab3a19dfb0ea07cef996198c12bf8a34e6fc3573a24b184d714f765e8b5e57"}, +] + +[package.dependencies] +httpx = ">=0.21.0" + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +optional = false +python-versions = "*" +files = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "setuptools" +version = "68.0.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "soupsieve" +version = "2.4.1" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, + {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, +] + +[[package]] +name = "sqlalchemy" +version = "1.4.48" +description = "Database Abstraction Library" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "SQLAlchemy-1.4.48-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:4bac3aa3c3d8bc7408097e6fe8bf983caa6e9491c5d2e2488cfcfd8106f13b6a"}, + {file = "SQLAlchemy-1.4.48-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dbcae0e528d755f4522cad5842f0942e54b578d79f21a692c44d91352ea6d64e"}, + {file = "SQLAlchemy-1.4.48-cp27-cp27m-win32.whl", hash = "sha256:cbbe8b8bffb199b225d2fe3804421b7b43a0d49983f81dc654d0431d2f855543"}, + {file = "SQLAlchemy-1.4.48-cp27-cp27m-win_amd64.whl", hash = "sha256:627e04a5d54bd50628fc8734d5fc6df2a1aa5962f219c44aad50b00a6cdcf965"}, + {file = "SQLAlchemy-1.4.48-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9af1db7a287ef86e0f5cd990b38da6bd9328de739d17e8864f1817710da2d217"}, + {file = "SQLAlchemy-1.4.48-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:ce7915eecc9c14a93b73f4e1c9d779ca43e955b43ddf1e21df154184f39748e5"}, + {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5381ddd09a99638f429f4cbe1b71b025bed318f6a7b23e11d65f3eed5e181c33"}, + {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:87609f6d4e81a941a17e61a4c19fee57f795e96f834c4f0a30cee725fc3f81d9"}, + {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb0808ad34167f394fea21bd4587fc62f3bd81bba232a1e7fbdfa17e6cfa7cd7"}, + {file = "SQLAlchemy-1.4.48-cp310-cp310-win32.whl", hash = "sha256:d53cd8bc582da5c1c8c86b6acc4ef42e20985c57d0ebc906445989df566c5603"}, + {file = "SQLAlchemy-1.4.48-cp310-cp310-win_amd64.whl", hash = "sha256:4355e5915844afdc5cf22ec29fba1010166e35dd94a21305f49020022167556b"}, + {file = "SQLAlchemy-1.4.48-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:066c2b0413e8cb980e6d46bf9d35ca83be81c20af688fedaef01450b06e4aa5e"}, + {file = "SQLAlchemy-1.4.48-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c99bf13e07140601d111a7c6f1fc1519914dd4e5228315bbda255e08412f61a4"}, + {file = "SQLAlchemy-1.4.48-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee26276f12614d47cc07bc85490a70f559cba965fb178b1c45d46ffa8d73fda"}, + {file = "SQLAlchemy-1.4.48-cp311-cp311-win32.whl", hash = "sha256:49c312bcff4728bffc6fb5e5318b8020ed5c8b958a06800f91859fe9633ca20e"}, + {file = "SQLAlchemy-1.4.48-cp311-cp311-win_amd64.whl", hash = "sha256:cef2e2abc06eab187a533ec3e1067a71d7bbec69e582401afdf6d8cad4ba3515"}, + {file = "SQLAlchemy-1.4.48-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:3509159e050bd6d24189ec7af373359f07aed690db91909c131e5068176c5a5d"}, + {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc2ab4d9f6d9218a5caa4121bdcf1125303482a1cdcfcdbd8567be8518969c0"}, + {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e1ddbbcef9bcedaa370c03771ebec7e39e3944782bef49e69430383c376a250b"}, + {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f82d8efea1ca92b24f51d3aea1a82897ed2409868a0af04247c8c1e4fef5890"}, + {file = "SQLAlchemy-1.4.48-cp36-cp36m-win32.whl", hash = "sha256:e3e98d4907805b07743b583a99ecc58bf8807ecb6985576d82d5e8ae103b5272"}, + {file = "SQLAlchemy-1.4.48-cp36-cp36m-win_amd64.whl", hash = "sha256:25887b4f716e085a1c5162f130b852f84e18d2633942c8ca40dfb8519367c14f"}, + {file = "SQLAlchemy-1.4.48-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:0817c181271b0ce5df1aa20949f0a9e2426830fed5ecdcc8db449618f12c2730"}, + {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe1dd2562313dd9fe1778ed56739ad5d9aae10f9f43d9f4cf81d65b0c85168bb"}, + {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:68413aead943883b341b2b77acd7a7fe2377c34d82e64d1840860247cec7ff7c"}, + {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbde5642104ac6e95f96e8ad6d18d9382aa20672008cf26068fe36f3004491df"}, + {file = "SQLAlchemy-1.4.48-cp37-cp37m-win32.whl", hash = "sha256:11c6b1de720f816c22d6ad3bbfa2f026f89c7b78a5c4ffafb220e0183956a92a"}, + {file = "SQLAlchemy-1.4.48-cp37-cp37m-win_amd64.whl", hash = "sha256:eb5464ee8d4bb6549d368b578e9529d3c43265007193597ddca71c1bae6174e6"}, + {file = "SQLAlchemy-1.4.48-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:92e6133cf337c42bfee03ca08c62ba0f2d9695618c8abc14a564f47503157be9"}, + {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d29a3fc6d9c45962476b470a81983dd8add6ad26fdbfae6d463b509d5adcda"}, + {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:005e942b451cad5285015481ae4e557ff4154dde327840ba91b9ac379be3b6ce"}, + {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8cfe951ed074ba5e708ed29c45397a95c4143255b0d022c7c8331a75ae61f3"}, + {file = "SQLAlchemy-1.4.48-cp38-cp38-win32.whl", hash = "sha256:2b9af65cc58726129d8414fc1a1a650dcdd594ba12e9c97909f1f57d48e393d3"}, + {file = "SQLAlchemy-1.4.48-cp38-cp38-win_amd64.whl", hash = "sha256:2b562e9d1e59be7833edf28b0968f156683d57cabd2137d8121806f38a9d58f4"}, + {file = "SQLAlchemy-1.4.48-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a1fc046756cf2a37d7277c93278566ddf8be135c6a58397b4c940abf837011f4"}, + {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d9b55252d2ca42a09bcd10a697fa041e696def9dfab0b78c0aaea1485551a08"}, + {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6dab89874e72a9ab5462997846d4c760cdb957958be27b03b49cf0de5e5c327c"}, + {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fd8b5ee5a3acc4371f820934b36f8109ce604ee73cc668c724abb054cebcb6e"}, + {file = "SQLAlchemy-1.4.48-cp39-cp39-win32.whl", hash = "sha256:eee09350fd538e29cfe3a496ec6f148504d2da40dbf52adefb0d2f8e4d38ccc4"}, + {file = "SQLAlchemy-1.4.48-cp39-cp39-win_amd64.whl", hash = "sha256:7ad2b0f6520ed5038e795cc2852eb5c1f20fa6831d73301ced4aafbe3a10e1f6"}, + {file = "SQLAlchemy-1.4.48.tar.gz", hash = "sha256:b47bc287096d989a0838ce96f7d8e966914a24da877ed41a7531d44b55cdb8df"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", optional = true, markers = "python_version >= \"3\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\" or extra == \"asyncio\")"} + +[package.extras] +aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql", "pymysql (<1)"] +sqlcipher = ["sqlcipher3-binary"] + +[[package]] +name = "sqlalchemy2-stubs" +version = "0.0.2a34" +description = "Typing Stubs for SQLAlchemy 1.4" +optional = false +python-versions = ">=3.6" +files = [ + {file = "sqlalchemy2-stubs-0.0.2a34.tar.gz", hash = "sha256:2432137ab2fde1a608df4544f6712427b0b7ff25990cfbbc5a9d1db6c8c6f489"}, + {file = "sqlalchemy2_stubs-0.0.2a34-py3-none-any.whl", hash = "sha256:a313220ac793404349899faf1272e821a62dbe1d3a029bd444faa8d3e966cd07"}, +] + +[package.dependencies] +typing-extensions = ">=3.7.4" + +[[package]] +name = "starlette" +version = "0.19.1" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.6" +files = [ + {file = "starlette-0.19.1-py3-none-any.whl", hash = "sha256:5a60c5c2d051f3a8eb546136aa0c9399773a689595e099e0877704d5888279bf"}, + {file = "starlette-0.19.1.tar.gz", hash = "sha256:c6d21096774ecb9639acad41b86b7706e52ba3bf1dc13ea4ed9ad593d47e24c7"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"] + +[[package]] +name = "supervisor" +version = "4.2.5" +description = "A system for controlling process state under UNIX" +optional = false +python-versions = "*" +files = [ + {file = "supervisor-4.2.5-py2.py3-none-any.whl", hash = "sha256:2ecaede32fc25af814696374b79e42644ecaba5c09494c51016ffda9602d0f08"}, + {file = "supervisor-4.2.5.tar.gz", hash = "sha256:34761bae1a23c58192281a5115fb07fbf22c9b0133c08166beffc70fed3ebc12"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +testing = ["pytest", "pytest-cov"] + +[[package]] +name = "tabulate" +version = "0.8.10" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "tabulate-0.8.10-py3-none-any.whl", hash = "sha256:0ba055423dbaa164b9e456abe7920c5e8ed33fcc16f6d1b2f2d152c8e1e8b4fc"}, + {file = "tabulate-0.8.10.tar.gz", hash = "sha256:6c57f3f3dd7ac2782770155f3adb2db0b1a269637e42f27599925e64b114f519"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tomli-w" +version = "1.0.0" +description = "A lil' TOML writer" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli_w-1.0.0-py3-none-any.whl", hash = "sha256:9f2a07e8be30a0729e533ec968016807069991ae2fd921a78d42f429ae5f4463"}, + {file = "tomli_w-1.0.0.tar.gz", hash = "sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9"}, +] + +[[package]] +name = "types-bleach" +version = "5.0.3.1" +description = "Typing stubs for bleach" +optional = false +python-versions = "*" +files = [ + {file = "types-bleach-5.0.3.1.tar.gz", hash = "sha256:ce8772ea5126dab1883851b41e3aeff229aa5213ced36096990344e632e92373"}, + {file = "types_bleach-5.0.3.1-py3-none-any.whl", hash = "sha256:af5f1b3a54ff279f54c29eccb2e6988ebb6718bc4061469588a5fd4880a79287"}, +] + +[[package]] +name = "types-cachetools" +version = "5.3.0.5" +description = "Typing stubs for cachetools" +optional = false +python-versions = "*" +files = [ + {file = "types-cachetools-5.3.0.5.tar.gz", hash = "sha256:67fa46d51a650896770aee0ba80f0e61dc4a7d1373198eec1bc0622263eaa256"}, + {file = "types_cachetools-5.3.0.5-py3-none-any.whl", hash = "sha256:c0c5fa00199017d974c935bf043c467d5204e4f835141e489b48765b5ac1d960"}, +] + +[[package]] +name = "types-emoji" +version = "1.7.4" +description = "Typing stubs for emoji" +optional = false +python-versions = "*" +files = [ + {file = "types-emoji-1.7.4.tar.gz", hash = "sha256:1e1341399ed1dd10a54d348ec7bb0708a56cd04f79702941fef5db60d276ea46"}, + {file = "types_emoji-1.7.4-py3-none-any.whl", hash = "sha256:b7b7e468fb04b16514a2e9dfe67a352003ee5bf657230986b3a41fcb0fa69fa2"}, +] + +[[package]] +name = "types-markdown" +version = "3.4.2.9" +description = "Typing stubs for Markdown" +optional = false +python-versions = "*" +files = [ + {file = "types-Markdown-3.4.2.9.tar.gz", hash = "sha256:0930057bea0a534e06bbc021d57520720ad2a65b363612614ab0599cc7f606a9"}, + {file = "types_Markdown-3.4.2.9-py3-none-any.whl", hash = "sha256:c23a8a4dd9313539a446ba3dc673a6a920d79580c406de10a5c85a16733890a7"}, +] + +[[package]] +name = "types-pillow" +version = "9.5.0.5" +description = "Typing stubs for Pillow" +optional = false +python-versions = "*" +files = [ + {file = "types-Pillow-9.5.0.5.tar.gz", hash = "sha256:de9877aa1e6226b6479459ca84df02fd7e999b970c79cfee3b8298840336e77c"}, + {file = "types_Pillow-9.5.0.5-py3-none-any.whl", hash = "sha256:2b17f95c5c16e4962e4032bdb95494766a85569fa278ee21e5fcbbd318e9ccd2"}, +] + +[[package]] +name = "types-python-dateutil" +version = "2.8.19.13" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = "*" +files = [ + {file = "types-python-dateutil-2.8.19.13.tar.gz", hash = "sha256:09a0275f95ee31ce68196710ed2c3d1b9dc42e0b61cc43acc369a42cb939134f"}, + {file = "types_python_dateutil-2.8.19.13-py3-none-any.whl", hash = "sha256:0b0e7c68e7043b0354b26a1e0225cb1baea7abb1b324d02b50e2d08f1221043f"}, +] + +[[package]] +name = "types-requests" +version = "2.31.0.1" +description = "Typing stubs for requests" +optional = false +python-versions = "*" +files = [ + {file = "types-requests-2.31.0.1.tar.gz", hash = "sha256:3de667cffa123ce698591de0ad7db034a5317457a596eb0b4944e5a9d9e8d1ac"}, + {file = "types_requests-2.31.0.1-py3-none-any.whl", hash = "sha256:afb06ef8f25ba83d59a1d424bd7a5a939082f94b94e90ab5e6116bd2559deaa3"}, +] + +[package.dependencies] +types-urllib3 = "*" + +[[package]] +name = "types-tabulate" +version = "0.8.11" +description = "Typing stubs for tabulate" +optional = false +python-versions = "*" +files = [ + {file = "types-tabulate-0.8.11.tar.gz", hash = "sha256:17a5fa3b5ca453815778fc9865e8ecd0118b07b2b9faff3e2b06fe448174dd5e"}, + {file = "types_tabulate-0.8.11-py3-none-any.whl", hash = "sha256:af811268241e8fb87b63c052c87d1e329898a93191309d5d42111372232b2e0e"}, +] + +[[package]] +name = "types-urllib3" +version = "1.26.25.13" +description = "Typing stubs for urllib3" +optional = false +python-versions = "*" +files = [ + {file = "types-urllib3-1.26.25.13.tar.gz", hash = "sha256:3300538c9dc11dad32eae4827ac313f5d986b8b21494801f1bf97a1ac6c03ae5"}, + {file = "types_urllib3-1.26.25.13-py3-none-any.whl", hash = "sha256:5dbd1d2bef14efee43f5318b5d36d805a489f6600252bb53626d4bfafd95e27c"}, +] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "urllib3" +version = "2.0.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"}, + {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvicorn" +version = "0.18.3" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.7" +files = [ + {file = "uvicorn-0.18.3-py3-none-any.whl", hash = "sha256:0abd429ebb41e604ed8d2be6c60530de3408f250e8d2d84967d85ba9e86fe3af"}, + {file = "uvicorn-0.18.3.tar.gz", hash = "sha256:9a66e7c42a2a95222f76ec24a4b754c158261c4696e683b9dadc72b590e0311b"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.4.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.0", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.4.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.0)"] + +[[package]] +name = "uvloop" +version = "0.17.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.7" +files = [ + {file = "uvloop-0.17.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce9f61938d7155f79d3cb2ffa663147d4a76d16e08f65e2c66b77bd41b356718"}, + {file = "uvloop-0.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:68532f4349fd3900b839f588972b3392ee56042e440dd5873dfbbcd2cc67617c"}, + {file = "uvloop-0.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0949caf774b9fcefc7c5756bacbbbd3fc4c05a6b7eebc7c7ad6f825b23998d6d"}, + {file = "uvloop-0.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff3d00b70ce95adce264462c930fbaecb29718ba6563db354608f37e49e09024"}, + {file = "uvloop-0.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a5abddb3558d3f0a78949c750644a67be31e47936042d4f6c888dd6f3c95f4aa"}, + {file = "uvloop-0.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8efcadc5a0003d3a6e887ccc1fb44dec25594f117a94e3127954c05cf144d811"}, + {file = "uvloop-0.17.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3378eb62c63bf336ae2070599e49089005771cc651c8769aaad72d1bd9385a7c"}, + {file = "uvloop-0.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6aafa5a78b9e62493539456f8b646f85abc7093dd997f4976bb105537cf2635e"}, + {file = "uvloop-0.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c686a47d57ca910a2572fddfe9912819880b8765e2f01dc0dd12a9bf8573e539"}, + {file = "uvloop-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:864e1197139d651a76c81757db5eb199db8866e13acb0dfe96e6fc5d1cf45fc4"}, + {file = "uvloop-0.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2a6149e1defac0faf505406259561bc14b034cdf1d4711a3ddcdfbaa8d825a05"}, + {file = "uvloop-0.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6708f30db9117f115eadc4f125c2a10c1a50d711461699a0cbfaa45b9a78e376"}, + {file = "uvloop-0.17.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:23609ca361a7fc587031429fa25ad2ed7242941adec948f9d10c045bfecab06b"}, + {file = "uvloop-0.17.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2deae0b0fb00a6af41fe60a675cec079615b01d68beb4cc7b722424406b126a8"}, + {file = "uvloop-0.17.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45cea33b208971e87a31c17622e4b440cac231766ec11e5d22c76fab3bf9df62"}, + {file = "uvloop-0.17.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9b09e0f0ac29eee0451d71798878eae5a4e6a91aa275e114037b27f7db72702d"}, + {file = "uvloop-0.17.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbbaf9da2ee98ee2531e0c780455f2841e4675ff580ecf93fe5c48fe733b5667"}, + {file = "uvloop-0.17.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a4aee22ece20958888eedbad20e4dbb03c37533e010fb824161b4f05e641f738"}, + {file = "uvloop-0.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:307958f9fc5c8bb01fad752d1345168c0abc5d62c1b72a4a8c6c06f042b45b20"}, + {file = "uvloop-0.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ebeeec6a6641d0adb2ea71dcfb76017602ee2bfd8213e3fcc18d8f699c5104f"}, + {file = "uvloop-0.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1436c8673c1563422213ac6907789ecb2b070f5939b9cbff9ef7113f2b531595"}, + {file = "uvloop-0.17.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8887d675a64cfc59f4ecd34382e5b4f0ef4ae1da37ed665adba0c2badf0d6578"}, + {file = "uvloop-0.17.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3db8de10ed684995a7f34a001f15b374c230f7655ae840964d51496e2f8a8474"}, + {file = "uvloop-0.17.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7d37dccc7ae63e61f7b96ee2e19c40f153ba6ce730d8ba4d3b4e9738c1dccc1b"}, + {file = "uvloop-0.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cbbe908fda687e39afd6ea2a2f14c2c3e43f2ca88e3a11964b297822358d0e6c"}, + {file = "uvloop-0.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d97672dc709fa4447ab83276f344a165075fd9f366a97b712bdd3fee05efae8"}, + {file = "uvloop-0.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1e507c9ee39c61bfddd79714e4f85900656db1aec4d40c6de55648e85c2799c"}, + {file = "uvloop-0.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c092a2c1e736086d59ac8e41f9c98f26bbf9b9222a76f21af9dfe949b99b2eb9"}, + {file = "uvloop-0.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:30babd84706115626ea78ea5dbc7dd8d0d01a2e9f9b306d24ca4ed5796c66ded"}, + {file = "uvloop-0.17.0.tar.gz", hash = "sha256:0ddf6baf9cf11a1a22c71487f39f15b2cf78eb5bde7e5b45fbb99e8a9d91b9e1"}, +] + +[package.extras] +dev = ["Cython (>=0.29.32,<0.30.0)", "Sphinx (>=4.1.2,<4.2.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=22.0.0,<22.1.0)", "pycodestyle (>=2.7.0,<2.8.0)", "pytest (>=3.6.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=22.0.0,<22.1.0)", "pycodestyle (>=2.7.0,<2.8.0)"] + +[[package]] +name = "watchdog" +version = "3.0.0" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.7" +files = [ + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, + {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, + {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, + {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, + {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, + {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, + {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, + {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, + {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "watchfiles" +version = "0.19.0" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "watchfiles-0.19.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:91633e64712df3051ca454ca7d1b976baf842d7a3640b87622b323c55f3345e7"}, + {file = "watchfiles-0.19.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:b6577b8c6c8701ba8642ea9335a129836347894b666dd1ec2226830e263909d3"}, + {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:18b28f6ad871b82df9542ff958d0c86bb0d8310bb09eb8e87d97318a3b5273af"}, + {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fac19dc9cbc34052394dbe81e149411a62e71999c0a19e1e09ce537867f95ae0"}, + {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:09ea3397aecbc81c19ed7f025e051a7387feefdb789cf768ff994c1228182fda"}, + {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0376deac92377817e4fb8f347bf559b7d44ff556d9bc6f6208dd3f79f104aaf"}, + {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c75eff897786ee262c9f17a48886f4e98e6cfd335e011c591c305e5d083c056"}, + {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb5d45c4143c1dd60f98a16187fd123eda7248f84ef22244818c18d531a249d1"}, + {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:79c533ff593db861ae23436541f481ec896ee3da4e5db8962429b441bbaae16e"}, + {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3d7d267d27aceeeaa3de0dd161a0d64f0a282264d592e335fff7958cc0cbae7c"}, + {file = "watchfiles-0.19.0-cp37-abi3-win32.whl", hash = "sha256:176a9a7641ec2c97b24455135d58012a5be5c6217fc4d5fef0b2b9f75dbf5154"}, + {file = "watchfiles-0.19.0-cp37-abi3-win_amd64.whl", hash = "sha256:945be0baa3e2440151eb3718fd8846751e8b51d8de7b884c90b17d271d34cae8"}, + {file = "watchfiles-0.19.0-cp37-abi3-win_arm64.whl", hash = "sha256:0089c6dc24d436b373c3c57657bf4f9a453b13767150d17284fc6162b2791911"}, + {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cae3dde0b4b2078f31527acff6f486e23abed307ba4d3932466ba7cdd5ecec79"}, + {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f3920b1285a7d3ce898e303d84791b7bf40d57b7695ad549dc04e6a44c9f120"}, + {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9afd0d69429172c796164fd7fe8e821ade9be983f51c659a38da3faaaaac44dc"}, + {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68dce92b29575dda0f8d30c11742a8e2b9b8ec768ae414b54f7453f27bdf9545"}, + {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5569fc7f967429d4bc87e355cdfdcee6aabe4b620801e2cf5805ea245c06097c"}, + {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5471582658ea56fca122c0f0d0116a36807c63fefd6fdc92c71ca9a4491b6b48"}, + {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b538014a87f94d92f98f34d3e6d2635478e6be6423a9ea53e4dd96210065e193"}, + {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20b44221764955b1e703f012c74015306fb7e79a00c15370785f309b1ed9aa8d"}, + {file = "watchfiles-0.19.0.tar.gz", hash = "sha256:d9b073073e048081e502b6c6b0b88714c026a1a4c890569238d04aca5f9ca74b"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "wcwidth" +version = "0.2.6" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, +] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +optional = false +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + +[[package]] +name = "websockets" +version = "11.0.3" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3ccc8a0c387629aec40f2fc9fdcb4b9d5431954f934da3eaf16cdc94f67dbfac"}, + {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d67ac60a307f760c6e65dad586f556dde58e683fab03323221a4e530ead6f74d"}, + {file = "websockets-11.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84d27a4832cc1a0ee07cdcf2b0629a8a72db73f4cf6de6f0904f6661227f256f"}, + {file = "websockets-11.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffd7dcaf744f25f82190856bc26ed81721508fc5cbf2a330751e135ff1283564"}, + {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7622a89d696fc87af8e8d280d9b421db5133ef5b29d3f7a1ce9f1a7bf7fcfa11"}, + {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bceab846bac555aff6427d060f2fcfff71042dba6f5fca7dc4f75cac815e57ca"}, + {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:54c6e5b3d3a8936a4ab6870d46bdd6ec500ad62bde9e44462c32d18f1e9a8e54"}, + {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:41f696ba95cd92dc047e46b41b26dd24518384749ed0d99bea0a941ca87404c4"}, + {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:86d2a77fd490ae3ff6fae1c6ceaecad063d3cc2320b44377efdde79880e11526"}, + {file = "websockets-11.0.3-cp310-cp310-win32.whl", hash = "sha256:2d903ad4419f5b472de90cd2d40384573b25da71e33519a67797de17ef849b69"}, + {file = "websockets-11.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:1d2256283fa4b7f4c7d7d3e84dc2ece74d341bce57d5b9bf385df109c2a1a82f"}, + {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e848f46a58b9fcf3d06061d17be388caf70ea5b8cc3466251963c8345e13f7eb"}, + {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa5003845cdd21ac0dc6c9bf661c5beddd01116f6eb9eb3c8e272353d45b3288"}, + {file = "websockets-11.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b58cbf0697721120866820b89f93659abc31c1e876bf20d0b3d03cef14faf84d"}, + {file = "websockets-11.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:660e2d9068d2bedc0912af508f30bbeb505bbbf9774d98def45f68278cea20d3"}, + {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1f0524f203e3bd35149f12157438f406eff2e4fb30f71221c8a5eceb3617b6b"}, + {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:def07915168ac8f7853812cc593c71185a16216e9e4fa886358a17ed0fd9fcf6"}, + {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b30c6590146e53149f04e85a6e4fcae068df4289e31e4aee1fdf56a0dead8f97"}, + {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:619d9f06372b3a42bc29d0cd0354c9bb9fb39c2cbc1a9c5025b4538738dbffaf"}, + {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:01f5567d9cf6f502d655151645d4e8b72b453413d3819d2b6f1185abc23e82dd"}, + {file = "websockets-11.0.3-cp311-cp311-win32.whl", hash = "sha256:e1459677e5d12be8bbc7584c35b992eea142911a6236a3278b9b5ce3326f282c"}, + {file = "websockets-11.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:e7837cb169eca3b3ae94cc5787c4fed99eef74c0ab9506756eea335e0d6f3ed8"}, + {file = "websockets-11.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f59a3c656fef341a99e3d63189852be7084c0e54b75734cde571182c087b152"}, + {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2529338a6ff0eb0b50c7be33dc3d0e456381157a31eefc561771ee431134a97f"}, + {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34fd59a4ac42dff6d4681d8843217137f6bc85ed29722f2f7222bd619d15e95b"}, + {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:332d126167ddddec94597c2365537baf9ff62dfcc9db4266f263d455f2f031cb"}, + {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6505c1b31274723ccaf5f515c1824a4ad2f0d191cec942666b3d0f3aa4cb4007"}, + {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f467ba0050b7de85016b43f5a22b46383ef004c4f672148a8abf32bc999a87f0"}, + {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9d9acd80072abcc98bd2c86c3c9cd4ac2347b5a5a0cae7ed5c0ee5675f86d9af"}, + {file = "websockets-11.0.3-cp37-cp37m-win32.whl", hash = "sha256:e590228200fcfc7e9109509e4d9125eace2042fd52b595dd22bbc34bb282307f"}, + {file = "websockets-11.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b16fff62b45eccb9c7abb18e60e7e446998093cdcb50fed33134b9b6878836de"}, + {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fb06eea71a00a7af0ae6aefbb932fb8a7df3cb390cc217d51a9ad7343de1b8d0"}, + {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8a34e13a62a59c871064dfd8ffb150867e54291e46d4a7cf11d02c94a5275bae"}, + {file = "websockets-11.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4841ed00f1026dfbced6fca7d963c4e7043aa832648671b5138008dc5a8f6d99"}, + {file = "websockets-11.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a073fc9ab1c8aff37c99f11f1641e16da517770e31a37265d2755282a5d28aa"}, + {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68b977f21ce443d6d378dbd5ca38621755f2063d6fdb3335bda981d552cfff86"}, + {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a99a7a71631f0efe727c10edfba09ea6bee4166a6f9c19aafb6c0b5917d09c"}, + {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bee9fcb41db2a23bed96c6b6ead6489702c12334ea20a297aa095ce6d31370d0"}, + {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4b253869ea05a5a073ebfdcb5cb3b0266a57c3764cf6fe114e4cd90f4bfa5f5e"}, + {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1553cb82942b2a74dd9b15a018dce645d4e68674de2ca31ff13ebc2d9f283788"}, + {file = "websockets-11.0.3-cp38-cp38-win32.whl", hash = "sha256:f61bdb1df43dc9c131791fbc2355535f9024b9a04398d3bd0684fc16ab07df74"}, + {file = "websockets-11.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:03aae4edc0b1c68498f41a6772d80ac7c1e33c06c6ffa2ac1c27a07653e79d6f"}, + {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:777354ee16f02f643a4c7f2b3eff8027a33c9861edc691a2003531f5da4f6bc8"}, + {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c82f11964f010053e13daafdc7154ce7385ecc538989a354ccc7067fd7028fd"}, + {file = "websockets-11.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3580dd9c1ad0701169e4d6fc41e878ffe05e6bdcaf3c412f9d559389d0c9e016"}, + {file = "websockets-11.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f1a3f10f836fab6ca6efa97bb952300b20ae56b409414ca85bff2ad241d2a61"}, + {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df41b9bc27c2c25b486bae7cf42fccdc52ff181c8c387bfd026624a491c2671b"}, + {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279e5de4671e79a9ac877427f4ac4ce93751b8823f276b681d04b2156713b9dd"}, + {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1fdf26fa8a6a592f8f9235285b8affa72748dc12e964a5518c6c5e8f916716f7"}, + {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69269f3a0b472e91125b503d3c0b3566bda26da0a3261c49f0027eb6075086d1"}, + {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:97b52894d948d2f6ea480171a27122d77af14ced35f62e5c892ca2fae9344311"}, + {file = "websockets-11.0.3-cp39-cp39-win32.whl", hash = "sha256:c7f3cb904cce8e1be667c7e6fef4516b98d1a6a0635a58a57528d577ac18a128"}, + {file = "websockets-11.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c792ea4eabc0159535608fc5658a74d1a81020eb35195dd63214dcf07556f67e"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e58f2c36cc52d41f2659e4c0cbf7353e28c8c9e63e30d8c6d3494dc9fdedcf"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de36fe9c02995c7e6ae6efe2e205816f5f00c22fd1fbf343d4d18c3d5ceac2f5"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ac56b661e60edd453585f4bd68eb6a29ae25b5184fd5ba51e97652580458998"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e052b8467dd07d4943936009f46ae5ce7b908ddcac3fda581656b1b19c083d9b"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:42cc5452a54a8e46a032521d7365da775823e21bfba2895fb7b77633cce031bb"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e6316827e3e79b7b8e7d8e3b08f4e331af91a48e794d5d8b099928b6f0b85f20"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8531fdcad636d82c517b26a448dcfe62f720e1922b33c81ce695d0edb91eb931"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c114e8da9b475739dde229fd3bc6b05a6537a88a578358bc8eb29b4030fac9c9"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e063b1865974611313a3849d43f2c3f5368093691349cf3c7c8f8f75ad7cb280"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:92b2065d642bf8c0a82d59e59053dd2fdde64d4ed44efe4870fa816c1232647b"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0ee68fe502f9031f19d495dae2c268830df2760c0524cbac5d759921ba8c8e82"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcacf2c7a6c3a84e720d1bb2b543c675bf6c40e460300b628bab1b1efc7c034c"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b67c6f5e5a401fc56394f191f00f9b3811fe843ee93f4a70df3c389d1adf857d"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5023a4b6a5b183dc838808087033ec5df77580485fc533e7dab2567851b0a4"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ed058398f55163a79bb9f06a90ef9ccc063b204bb346c4de78efc5d15abfe602"}, + {file = "websockets-11.0.3-py3-none-any.whl", hash = "sha256:6681ba9e7f8f3b19440921e99efbb40fc89f26cd71bf539e45d8c8a25c976dc6"}, + {file = "websockets-11.0.3.tar.gz", hash = "sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016"}, +] + +[[package]] +name = "win32-setctime" +version = "1.1.0" +description = "A small Python utility to set file creation time on Windows" +optional = false +python-versions = ">=3.5" +files = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] + +[package.extras] +dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "68f010f9874331fb1cffa13ff0f059ccb61b55ef98fffaa70ccfb0fcb7987689" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..90af4d1 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,90 @@ +[tool.poetry] +name = "microblogpub" +version = "2.0.0" +description = "" +authors = ["Thomas Sileo "] +license = "AGPL-3.0" + +[tool.poetry.dependencies] +python = "^3.10" +Jinja2 = "^3.1.2" +fastapi = "^0.78.0" +pycryptodome = "^3.14.1" +bcrypt = "^3.2.2" +itsdangerous = "^2.1.2" +python-multipart = "^0.0.5" +tomli = "^2.0.1" +httpx = {version = "0.23.0", extras = ["http2"]} +SQLAlchemy = {extras = ["asyncio"], version = "^1.4.39"} +alembic = "^1.8.0" +bleach = "^5.0.0" +prompt-toolkit = "^3.0.29" +tomli-w = "^1.0.0" +python-dateutil = "^2.8.2" +bs4 = "^0.0.1" +html5lib = "^1.1" +mf2py = "^1.1.2" +Pygments = "^2.12.0" +loguru = "^0.6.0" +Pillow = "^9.1.1" +blurhash-python = "^1.1.3" +html2text = "^2020.1.16" +feedgen = "^0.9.0" +emoji = "^1.7.0" +PyLD = "^2.0.3" +aiosqlite = "^0.17.0" +cachetools = "^5.2.0" +humanize = "^4.2.3" +tabulate = "^0.8.10" +asgiref = "^3.5.2" +supervisor = "^4.2.4" +invoke = "^1.7.1" +boussole = "^2.0.0" +uvicorn = {extras = ["standard"], version = "^0.18.3"} +Brotli = "^1.0.9" +greenlet = "^1.1.3" +mistletoe = "^0.9.0" +Pebble = "^5.0.2" + +[tool.poetry.dev-dependencies] +black = "^22.3.0" +flake8 = "^4.0.1" +mypy = "^0.960" +isort = "^5.10.1" +invoke = "^1.7.1" +libsass = "^0.21.0" +pytest = "^7.1.2" +respx = "^0.19.2" +boussole = "^2.0.0" +types-bleach = "^5.0.2" +types-Markdown = "^3.3.28" +factory-boy = "^3.2.1" +pytest-asyncio = "^0.18.3" +types-Pillow = "^9.0.20" +types-emoji = "^1.7.2" +types-cachetools = "^5.2.1" +sqlalchemy2-stubs = "^0.0.2-alpha.24" +types-python-dateutil = "^2.8.18" +types-tabulate = "^0.8.11" +types-requests = "^2.28.1" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.isort] +profile = "black" + +[tool.mypy] +exclude = ["alembic/versions/"] +plugins = ["sqlalchemy.ext.mypy.plugin", "pydantic.mypy"] + +[tool.black] +extend-exclude = ''' +/( + | alembic/versions +)/ +''' + +[tool.pytest.ini_options] +asyncio_mode = "strict" diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index b586bf6..0000000 --- a/requirements.txt +++ /dev/null @@ -1,27 +0,0 @@ -cachetools -poussetaches -python-dateutil -libsass -tornado<6.0.0 -gunicorn -piexif -requests -python-u2flib-server -Flask -Flask-WTF -pymongo -timeago -bleach -html2text -feedgen -itsdangerous -bcrypt -mf2py -passlib -git+https://github.com/erikriver/opengraph.git#egg=opengraph -git+https://github.com/tsileo/little-boxes.git@litepub#egg=little_boxes -pyyaml -pillow -emoji-unicode -html5lib -Pygments diff --git a/run.sh b/run.sh deleted file mode 100755 index 9cb4fda..0000000 --- a/run.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash -python -c "import logging; logging.basicConfig(level=logging.DEBUG); from core import migrations; migrations.perform()" -python -c "from core import indexes; indexes.create_indexes()" -python startup.py -(sleep 5 && curl -X POST -u :$POUSETACHES_AUTH_KEY $MICROBLOGPUB_POUSSETACHES_HOST/resume)& -gunicorn -t 600 -w 5 -b 0.0.0.0:5005 --log-level debug app:app diff --git a/run_dev.sh b/run_dev.sh deleted file mode 100755 index 33c6009..0000000 --- a/run_dev.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -DEV_POUSSETACHES_AUTH_KEY="1234567890" -MICROBLOGPUB_INTERNAL_HOST="http://host.docker.internal:5005" - - -env POUSSETACHES_AUTH_KEY=${DEV_POUSSETACHES_AUTH_KEY} docker-compose -f docker-compose-dev.yml up -d -FLASK_DEBUG=1 MICROBLOGPUB_DEBUG=1 FLASK_APP=app.py POUSSETACHES_AUTH_KEY=${DEV_POUSSETACHES_AUTH_KEY} MICROBLOGPUB_INTERNAL_HOST=${MICROBLOGPUB_INTERNAL_HOST} flask run -p 5005 --with-threads -docker-compose down diff --git a/sass/base_theme.scss b/sass/base_theme.scss deleted file mode 100644 index d3104ee..0000000 --- a/sass/base_theme.scss +++ /dev/null @@ -1,415 +0,0 @@ -.note-container p:first-child { - margin-top: 0; -} -html, body { - height: 100%; -} - -@media only screen and (max-width: 480px) { - #menu-item-following { - display: none; - } -} -body { - background-color: $background-color; - color: $color; - display: flex; - flex-direction: column; -} -.base-container { - flex: 1 0 auto; -} -.footer { - flex-shrink: 0; -} -a, h1, h2, h3, h4, h5, h6 { - color: $color-title-link; -} -a { - text-decoration: none; -} -a:hover { - text-decoration: underline; -} -.gold { - color: $primary-color; -} -.pcolor { - color: $primary-color; -} -.lcolor { - color: $color-light; -} -.older-link, .newer-link, .older-link:hover, .newer-link:hover { - text-decoration: none; - padding: 3px; -} -.older-link { float: left } -.newer-link { float: right } -.clear { clear: both; } -.remote-follow-button { - background: $color-menu-background; - color: $color-light; - text-decoration: none; - padding: 5px 8px; - margin-top: 5px; - border-radius: 2px; -} -.remote-follow-button:hover { - text-decoration: none; - background: $primary-color; - color: $background-color; -} -#admin-menu-wrapper { - padding: 10px; - margin:0 auto; - width: 100%; - background: $color-menu-background; - max-width: 720px; - -#admin-menu { - list-style-type: none; - display: inline; - padding: 10px; - color: $color-light; - border-radius-bottom-left: 2px; - border-radius-bottom-right: 2px; - .left { float: left; } - .right { float: right; } - li { - a { text-decoration: none; } - .admin-title { - text-transform: uppercase; - font-weight: bold; - } - padding-right:10px; - .selected, a:hover { - color: $primary-color; - } - } -} -} -header#header { - margin-bottom: 70px; - - .title { - font-size: 1.2em; - padding-right: 15px; - color: $color-title-link; - } - .title:hover { - text-decoration: none; - } - .subtitle-username { - color: $color; - } - .menu { - clear: both; - padding: 0 0 10px 0; - ul { - display: inline; - list-style-type: none; - padding: 0; - li { - float: left; - margin-bottom: 10px; - margin-right: 10px; - } - } - a { - padding: 5px 10px; - small.badge { - background-color: $color-menu-background; - color: $color-light; - border-radius: 2px; - margin-left: 5px; - padding: 3px 5px 0px 5px; - font-weight: bold; - } - } - a.selected { - background: $primary-color; - color: $background-color; - border-radius:2px; - .badge { - color: $primary-color; - background: $background-color; - } - - } - a:hover { - background: $primary-color; - color: $background-color; - text-decoration: none; - border-radius: 2px; - .badge { - color: $primary-color; - background: $background-color; - } - } - } -} -#container { - width: 90%; - max-width: 720px; - margin: 30px auto; -} -#container #notes { - margin-top: 20px; -} -.actor-box { - display: block; - text-decoration: none; - margin-bottom: 40px; - - .actor-icon { - width: 120px; - border-radius:2px; - } - - h3 { margin: 0; } - - .actor-inline { - text-overflow: ellipsis; - white-space: nowrap; - overflow: hidden; - } -} -.actor-box-big { - display: block; - text-decoration: none; - - .actor-box-wrapper { - margin-bottom:40px; - - .actor-icon { - width:120px; - border-radius:2px; - } - - h3 { margin: 0; } - } -} -.note-box { - margin-bottom: 70px; -} -.note { - display: flex; - .l { - color: $color-note-link; - } - - .h-card { - flex: initial; - width: 50px; - } - - .u-photo { - width: 50px; - border-radius: 2px; - } - .note-wrapper { - flex: 1; - padding-left: 15px; - overflow: hidden; - } - - .bottom-bar { margin-top:10px;display:inline-block; } - - .img-attachment { - max-width:100%; - border-radius:2px; - } - - h3 { - font-size: 1.1em; - color: $color-light; - } - - strong { font-weight:600; } - - .note-container { - clear: right; - padding:10px 0; - word-break: normal; - } -} -.color-menu-background { - background: $color-menu-background; -} -.og-link { text-decoration: none; } -.og-link:hover { text-decoration: none; } -.bar-item-no-hover { - cursor: default; - background: $color-menu-background; - padding: 5px; - color: $color-light; - margin-right:10px; - border-radius:2px; - float: left; -} -.bar-item-no-hover:hover { - cursor: default; -} -.bar-item-no-bg { - cursor: default; - padding: 5px; - color: $color-light; - margin-right:10px; - border-radius:2px; - float: left; -} -.bar-item-no-bg:hover { - cursor: default; -} -.bar-item-pinned { - cursor: default; - background: $color-menu-background; - color: $color-light; - padding: 5px; - margin-right:5px; - border-radius:2px; -} -.bar-item-pinned:hover { - cursor: default; -} -.bar-item { - background: $color-menu-background; - padding: 5px; - color: $color-light; - margin-right: 10px; - float: left; - border-radius:2px; -} -.bar-item:hover { - background: $primary-color; - color: $background-color; - text-decoration: none; -} -.bar-item-no-border { - color: $color-light; - background: inherit; - cursor: default; -} -.bar-item-no-border:hover { - color: $color-light; - background: inherit; - cursor: default; -} -.bar-item-reverse { - background: $primary-color; - color: $background-color; - padding: 5px; - margin-right: 10px; - float: left; - border-radius:2px; - border: 0; -} -.bar-item-reverse:hover { - background: $color-menu-background; - color: $color-light; -} - -button.bar-item { - border: 0 -} -form.action-form { - display: inline; -} -.perma { - font-size: 1.25em; -} -.bottom-bar .perma-item { - margin-right: 5px; -} -.bottom-bar a.bar-item:hover { - text-decoration: none; -} -.footer > div { - width: 90%; - max-width: 720px; - margin: 40px auto; -} -.footer a, .footer a:hover, .footer a:visited { - text-decoration: underline; - color: $color; -} -.summary { - color: $color-summary; - font-size: 1.1em; - margin-top: 10px; - margin-bottom: 30px; -} -.summary a, .summay a:hover { - color: $color-summary; - text-decoration: underline; -} -#followers, #following, #new { - margin-top: 50px; -} -#admin { - margin-top: 50px; -} -.tabbar { - margin-bottom:50px; -} -.tab { - padding: 10px; - text-decoration: none; -} -.tab.selected { - background: $color-menu-background; - color: $primary-color; - border-top: 1px solid $primary-color; - border-right: 1px solid $primary-color; - border-left: 1px solid $primary-color; - padding: 9px; - -} -.tab:hover { - text-decoration: none; - background: $color-menu-background; - color: $color-light; -} -textarea, input, select { - background: $color-menu-background; - padding: 10px; - color: $color-light; - border: 0px; - border-radius: 2px; -} -select { - padding: 4px 10px; -} -input { - padding: 10px; -} -input[type=submit] { - color: $primary-color; - text-transform: uppercase; -} -.note-video { - margin: 30px 0 10px 0; -} -li.answer { - height:30px; - margin-bottom:10px; - position:relative; -} -.answer .answer-bar { - position:absolute; - height:30px; - border-radius:2px; -} -.answer .answer-text { - position:relative; - top:6px; - padding-left:10px; - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; -} -.answer .answer-text > span { - width:70px; - display:inline-block; -} diff --git a/sass/dark.scss b/sass/dark.scss deleted file mode 100644 index 68a9f40..0000000 --- a/sass/dark.scss +++ /dev/null @@ -1,8 +0,0 @@ -$background-color: #060606; -$background-light: #222; -$color: #808080; -$color-title-link: #fefefe; -$color-summary: #ddd; -$color-light: #bbb; -$color-menu-background: #222; -$color-note-link: #666; diff --git a/sass/light.scss b/sass/light.scss deleted file mode 100644 index 9c4c251..0000000 --- a/sass/light.scss +++ /dev/null @@ -1,9 +0,0 @@ -$background-color: #eee; -$background-light: #ccc; -$color: #111; -$color-title-link: #333; -$color-light: #555; -$color-summary: #111; -$color-note-link: #333; -$color-menu-background: #ddd; -// $primary-color: #1d781d; diff --git a/sass/theme.scss b/sass/theme.scss deleted file mode 100644 index 6277839..0000000 --- a/sass/theme.scss +++ /dev/null @@ -1 +0,0 @@ -@import 'base_theme.scss' diff --git a/scripts/build_docs.py b/scripts/build_docs.py new file mode 100644 index 0000000..075077e --- /dev/null +++ b/scripts/build_docs.py @@ -0,0 +1,166 @@ +import re +import shutil +import typing +from pathlib import Path +from typing import Any + +from jinja2 import Environment +from jinja2 import FileSystemLoader +from jinja2 import select_autoescape +from mistletoe import Document # type: ignore +from mistletoe import HTMLRenderer # type: ignore +from mistletoe import block_token # type: ignore +from pygments import highlight # type: ignore +from pygments.formatters import HtmlFormatter # type: ignore +from pygments.lexers import get_lexer_by_name as get_lexer # type: ignore +from pygments.lexers import guess_lexer # type: ignore + +from app.config import VERSION +from app.source import CustomRenderer +from app.utils.datetime import now + +_FORMATTER = HtmlFormatter() +_FORMATTER.noclasses = True + + +class DocRenderer(CustomRenderer): + def __init__( + self, + depth=5, + omit_title=True, + filter_conds=[], + ) -> None: + super().__init__( + enable_mentionify=False, + enable_hashtagify=False, + ) + self._headings: list[tuple[int, str, str]] = [] + self._ids: set[str] = set() + self.depth = depth + self.omit_title = omit_title + self.filter_conds = filter_conds + + @property + def toc(self): + """ + Returns table of contents as a block_token.List instance. + """ + + def get_indent(level): + if self.omit_title: + level -= 1 + return " " * 4 * (level - 1) + + def build_list_item(heading): + level, content, title_id = heading + template = '{indent}- {content}\n' + return template.format( + indent=get_indent(level), content=content, id=title_id + ) + + lines = [build_list_item(heading) for heading in self._headings] + items = block_token.tokenize(lines) + return items[0] + + def render_heading(self, token): + """ + Overrides super().render_heading; stores rendered heading first, + then returns it. + """ + template = '{inner}' + inner = self.render_inner(token) + title_id = inner.lower().replace(" ", "-") + if title_id in self._ids: + i = 1 + while 1: + title_id = f"{title_id}_{i}" + if title_id not in self._ids: + break + self._ids.add(title_id) + rendered = template.format(level=token.level, inner=inner, id=title_id) + content = self.parse_rendered_heading(rendered) + + if not ( + self.omit_title + and token.level == 1 + or token.level > self.depth + or any(cond(content) for cond in self.filter_conds) + ): + self._headings.append((token.level, content, title_id)) + return rendered + + @staticmethod + def parse_rendered_heading(rendered): + """ + Helper method; converts rendered heading to plain text. + """ + return re.sub(r"<.+?>", "", rendered) + + def render_block_code(self, token: typing.Any) -> str: + code = token.children[0].content + lexer = get_lexer(token.language) if token.language else guess_lexer(code) + return highlight(code, lexer, _FORMATTER) + + +def markdownify(content: str) -> tuple[str, Any]: + with DocRenderer() as renderer: + rendered_content = renderer.render(Document(content)) + + with HTMLRenderer() as html_renderer: + toc = html_renderer.render(renderer.toc) + + return rendered_content, toc + + +def main() -> None: + # Setup Jinja + loader = FileSystemLoader("docs/templates") + env = Environment(loader=loader, autoescape=select_autoescape()) + template = env.get_template("layout.html") + + shutil.rmtree("docs/dist", ignore_errors=True) + Path("docs/dist").mkdir(exist_ok=True) + shutil.rmtree("docs/dist/static", ignore_errors=True) + shutil.copytree("docs/static", "docs/dist/static") + + last_updated = now().replace(second=0, microsecond=0).isoformat() + + readme = Path("README.md") + content, toc = markdownify(readme.read_text().removeprefix("# microblog.pub")) + template.stream( + content=content, + version=VERSION, + path="/", + last_updated=last_updated, + ).dump("docs/dist/index.html") + + install = Path("docs/install.md") + content, toc = markdownify(install.read_text()) + template.stream( + content=content.replace("[TOC]", toc), + version=VERSION, + path="/installing.html", + last_updated=last_updated, + ).dump("docs/dist/installing.html") + + user_guide = Path("docs/user_guide.md") + content, toc = markdownify(user_guide.read_text()) + template.stream( + content=content.replace("[TOC]", toc), + version=VERSION, + path="/user_guide.html", + last_updated=last_updated, + ).dump("docs/dist/user_guide.html") + + developer_guide = Path("docs/developer_guide.md") + content, toc = markdownify(developer_guide.read_text()) + template.stream( + content=content.replace("[TOC]", toc), + version=VERSION, + path="/developer_guide.html", + last_updated=last_updated, + ).dump("docs/dist/developer_guide.html") + + +if __name__ == "__main__": + main() diff --git a/scripts/config_wizard.py b/scripts/config_wizard.py new file mode 100644 index 0000000..912d726 --- /dev/null +++ b/scripts/config_wizard.py @@ -0,0 +1,96 @@ +"""Basic wizard for setting up microblog.pub configuration files.""" +import os +import sys +from pathlib import Path +from typing import Any + +import bcrypt +import tomli_w +from prompt_toolkit import prompt +from prompt_toolkit.key_binding import KeyBindings + +from app.key import generate_key + +_ROOT_DIR = Path().parent.resolve() +_KEY_PATH = _ROOT_DIR / "data" / "key.pem" + + +_kb = KeyBindings() + + +@_kb.add("c-@") +def _(event): + """Save multi-line buffer on CTRL + space""" + event.current_buffer.validate_and_handle() + + +def main() -> None: + print("Welcome to microblog.pub setup wizard\n") + print("Generating key...") + if _KEY_PATH.exists(): + yn = "" + while yn not in ["y", "n"]: + yn = prompt( + "WARNING, a key already exists, overwrite it? (y/n): ", default="n" + ).lower() + if yn == "y": + generate_key(_KEY_PATH) + else: + generate_key(_KEY_PATH) + + config_file = Path("data/profile.toml") + + if config_file.exists(): + # Spit out the relative path for the "config artifacts" + rconfig_file = "data/profile.toml" + print( + f"Existing setup detected, please delete {rconfig_file} " + "before restarting the wizard" + ) + sys.exit(2) + + dat: dict[str, Any] = {} + print("Your identity will be @{username}@{domain}") + dat["domain"] = prompt("domain: ") + dat["username"] = prompt("username: ") + dat["admin_password"] = bcrypt.hashpw( + prompt("admin password: ", is_password=True).encode(), bcrypt.gensalt() + ).decode() + dat["name"] = prompt("name (e.g. John Doe): ", default=dat["username"]) + dat["summary"] = prompt( + ( + "summary (short description, in markdown, " + "press [CTRL] + [SPACE] to submit):\n" + ), + key_bindings=_kb, + multiline=True, + ) + dat["https"] = True + proto = "https" + yn = "" + while yn not in ["y", "n"]: + yn = prompt("will the site be served via https? (y/n): ", default="y").lower() + if yn == "n": + dat["https"] = False + proto = "http" + + print("Note that you can put your icon/avatar in the static/ directory") + if icon_url := prompt( + "icon URL: ", default=f'{proto}://{dat["domain"]}/static/nopic.png' + ): + dat["icon_url"] = icon_url + dat["secret"] = os.urandom(16).hex() + + with config_file.open("w") as f: + f.write(tomli_w.dumps(dat)) + + print("Done") + sys.exit(0) + + +if __name__ == "__main__": + try: + main() + except KeyboardInterrupt: + print("Aborted") + sys.exit(1) diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 6deafc2..0000000 --- a/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[flake8] -max-line-length = 120 diff --git a/setup_wizard/Dockerfile b/setup_wizard/Dockerfile deleted file mode 100644 index 83b54b3..0000000 --- a/setup_wizard/Dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -FROM python:3.7 -WORKDIR /app -ADD . /app -RUN pip install -r requirements.txt -LABEL maintainer="t@a4.io" -LABEL pub.microblog.oneshot=true -CMD ["python", "wizard.py"] diff --git a/setup_wizard/requirements.txt b/setup_wizard/requirements.txt deleted file mode 100644 index 9482032..0000000 --- a/setup_wizard/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -prompt_toolkit -bcrypt -markdown diff --git a/setup_wizard/wizard.py b/setup_wizard/wizard.py deleted file mode 100644 index 98b9676..0000000 --- a/setup_wizard/wizard.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Basic wizard for setting up microblog.pub configuration files.""" -import binascii -import os -import sys -from pathlib import Path - -import bcrypt -from markdown import markdown -from prompt_toolkit import prompt - - -def main() -> None: - print("Welcome to microblog.pub setup wizard\n") - - config_file = Path("/app/out/config/me.yml") - env_file = Path("/app/out/.env") - - if config_file.exists(): - # Spit out the relative path for the "config artifacts" - rconfig_file = "config/me.yml" - print( - f"Existing setup detected, please delete {rconfig_file} before restarting the wizard" - ) - sys.exit(2) - - dat = {} - print("Your identity will be @{username}@{domain}") - dat["domain"] = prompt("domain: ") - dat["username"] = prompt("username: ") - dat["pass"] = bcrypt.hashpw( - prompt("password: ", is_password=True).encode(), bcrypt.gensalt() - ).decode() - dat["name"] = prompt("name (e.g. John Doe): ") - dat["summary"] = markdown( - prompt( - "summary (short description, in markdown, press [ESC] then [ENTER] to submit):\n", - multiline=True, - ) - ) - dat["https"] = True - proto = "https" - yn = "" - while yn not in ["y", "n"]: - yn = prompt("will the site be served via https? (y/n): ", default="y").lower() - if yn == "n": - dat["https"] = False - proto = "http" - - dat["icon_url"] = prompt( - "icon URL: ", default=f'{proto}://{dat["domain"]}/static/nopic.png' - ) - - out = "" - for k, v in dat.items(): - out += f"{k}: {v!r}\n" - - with config_file.open("w") as f: - f.write(out) - - proj_name = os.getenv("MICROBLOGPUB_WIZARD_PROJECT_NAME", "microblogpub") - - env = { - "WEB_PORT": 5005, - "CONFIG_DIR": "./config", - "DATA_DIR": "./data", - "POUSSETACHES_AUTH_KEY": binascii.hexlify(os.urandom(32)).decode(), - "COMPOSE_PROJECT_NAME": proj_name, - } - - out2 = "" - for k, v in env.items(): - out2 += f"{k}={v}\n" - - with env_file.open("w") as f: - f.write(out2) - - print("Done") - sys.exit(0) - - -if __name__ == "__main__": - try: - main() - except KeyboardInterrupt: - print("Aborted") - sys.exit(1) diff --git a/startup.py b/startup.py deleted file mode 100644 index 7ee7db0..0000000 --- a/startup.py +++ /dev/null @@ -1,12 +0,0 @@ -import app # noqa: F401 # here to init the backend -from core.activitypub import _actor_hash -from core.shared import MY_PERSON -from core.shared import p -from core.tasks import Tasks -from utils.local_actor_cache import is_actor_updated - -h = _actor_hash(MY_PERSON, local=True) -if is_actor_updated(h): - Tasks.send_actor_update() - -p.push({}, "/task/cleanup", schedule="@every 1h") diff --git a/static/favicon.png b/static/favicon.png deleted file mode 100644 index 644d6e8..0000000 Binary files a/static/favicon.png and /dev/null differ diff --git a/tasks.py b/tasks.py new file mode 100644 index 0000000..c968c85 --- /dev/null +++ b/tasks.py @@ -0,0 +1,424 @@ +import asyncio +import io +import shutil +import tarfile +from collections import namedtuple +from contextlib import contextmanager +from inspect import getfullargspec +from pathlib import Path +from typing import Generator +from typing import Optional +from unittest.mock import patch + +import httpx +import invoke # type: ignore +from invoke import Context # type: ignore +from invoke import run # type: ignore +from invoke import task # type: ignore + + +def fix_annotations(): + """ + Pyinvoke doesn't accept annotations by default, this fix that + Based on: @zelo's fix in https://github.com/pyinvoke/invoke/pull/606 + Context in: https://github.com/pyinvoke/invoke/issues/357 + Python 3.11 https://github.com/pyinvoke/invoke/issues/833 + """ + + ArgSpec = namedtuple("ArgSpec", ["args", "defaults"]) + + def patched_inspect_getargspec(func): + spec = getfullargspec(func) + return ArgSpec(spec.args, spec.defaults) + + org_task_argspec = invoke.tasks.Task.argspec + + def patched_task_argspec(*args, **kwargs): + with patch( + target="inspect.getargspec", new=patched_inspect_getargspec, create=True + ): + return org_task_argspec(*args, **kwargs) + + invoke.tasks.Task.argspec = patched_task_argspec + + +fix_annotations() + + +@task +def generate_db_migration(ctx, message): + # type: (Context, str) -> None + run(f'alembic revision --autogenerate -m "{message}"', echo=True) + + +@task +def migrate_db(ctx): + # type: (Context) -> None + run("alembic upgrade head", echo=True) + + +@task +def autoformat(ctx): + # type: (Context) -> None + run("black .", echo=True) + run("isort -sl .", echo=True) + + +@task +def lint(ctx): + # type: (Context) -> None + run("black --check .", echo=True) + run("isort -sl --check-only .", echo=True) + run("flake8 .", echo=True) + run("mypy .", echo=True) + + +@task +def compile_scss(ctx, watch=False): + # type: (Context, bool) -> None + from app.utils.favicon import build_favicon + + favicon_file = Path("data/favicon.ico") + if not favicon_file.exists(): + build_favicon() + else: + shutil.copy2(favicon_file, "app/static/favicon.ico") + + theme_file = Path("data/_theme.scss") + if not theme_file.exists(): + theme_file.write_text("// override vars for theming here") + + if watch: + run("boussole watch", echo=True) + else: + run("boussole compile", echo=True) + + +@task +def uvicorn(ctx): + # type: (Context) -> None + run("uvicorn app.main:app --no-server-header", pty=True, echo=True) + + +@task +def process_outgoing_activities(ctx): + # type: (Context) -> None + from app.outgoing_activities import loop + + asyncio.run(loop()) + + +@task +def process_incoming_activities(ctx): + # type: (Context) -> None + from app.incoming_activities import loop + + asyncio.run(loop()) + + +@task +def tests(ctx, k=None): + # type: (Context, Optional[str]) -> None + pytest_args = " -vvv" + if k: + pytest_args += f" -k {k}" + run( + f"MICROBLOGPUB_CONFIG_FILE=tests.toml pytest tests{pytest_args}", + pty=True, + echo=True, + ) + + +@task +def generate_requirements_txt(ctx, where="requirements.txt"): + # type: (Context, str) -> None + run( + f"poetry export -f requirements.txt --without-hashes > {where}", + pty=True, + echo=True, + ) + + +@task +def build_docs(ctx): + # type: (Context) -> None + with embed_version(): + run("PYTHONPATH=. python scripts/build_docs.py", pty=True, echo=True) + + +@task +def download_twemoji(ctx): + # type: (Context) -> None + resp = httpx.get( + "https://github.com/twitter/twemoji/archive/refs/tags/v14.0.2.tar.gz", + follow_redirects=True, + ) + resp.raise_for_status() + tf = tarfile.open(fileobj=io.BytesIO(resp.content)) + members = [ + member + for member in tf.getmembers() + if member.name.startswith("twemoji-14.0.2/assets/svg/") + ] + for member in members: + emoji_name = Path(member.name).name + with open(f"app/static/twemoji/{emoji_name}", "wb") as f: + f.write(tf.extractfile(member).read()) # type: ignore + + +@task(download_twemoji, compile_scss) +def configuration_wizard(ctx): + # type: (Context) -> None + run("MICROBLOGPUB_CONFIG_FILE=tests.toml alembic upgrade head", echo=True) + run( + "MICROBLOGPUB_CONFIG_FILE=tests.toml PYTHONPATH=. python scripts/config_wizard.py", # noqa: E501 + pty=True, + echo=True, + ) + + +@task +def install_deps(ctx): + # type: (Context) -> None + run("poetry install", pty=True, echo=True) + + +@task(pre=[compile_scss], post=[migrate_db]) +def update(ctx, update_deps=True): + # type: (Context, bool) -> None + if update_deps: + run("poetry install", pty=True, echo=True) + print("Done") + + +@task +def stats(ctx): + # type: (Context) -> None + from app.utils.stats import print_stats + + print_stats() + + +@contextmanager +def embed_version() -> Generator[None, None, None]: + from app.utils.version import get_version_commit + + version_file = Path("app/_version.py") + version_file.unlink(missing_ok=True) + version_commit = get_version_commit() + version_file.write_text(f'VERSION_COMMIT = "{version_commit}"') + try: + yield + finally: + version_file.unlink() + + +@task +def build_docker_image(ctx): + # type: (Context) -> None + with embed_version(): + run("docker build -t microblogpub/microblogpub .") + + +@task +def prune_old_data(ctx): + # type: (Context) -> None + from app.prune import run_prune_old_data + + asyncio.run(run_prune_old_data()) + + +@task +def webfinger(ctx, account): + # type: (Context, str) -> None + import traceback + + from loguru import logger + + from app.source import _MENTION_REGEX + from app.webfinger import get_actor_url + + logger.disable("app") + if not account.startswith("@"): + account = f"@{account}" + if not _MENTION_REGEX.match(account): + print(f"Invalid acccount {account}") + return + + print(f"Resolving {account}") + try: + maybe_actor_url = asyncio.run(get_actor_url(account)) + if maybe_actor_url: + print(f"SUCCESS: {maybe_actor_url}") + else: + print(f"ERROR: Failed to resolve {account}") + except Exception as exc: + print(f"ERROR: Failed to resolve {account}") + print("".join(traceback.format_exception(exc))) + + +@task +def move_to(ctx, moved_to): + # type: (Context, str) -> None + import traceback + + from loguru import logger + + from app.actor import LOCAL_ACTOR + from app.actor import fetch_actor + from app.boxes import send_move + from app.database import async_session + from app.source import _MENTION_REGEX + from app.webfinger import get_actor_url + + logger.disable("app") + + if not moved_to.startswith("@"): + moved_to = f"@{moved_to}" + if not _MENTION_REGEX.match(moved_to): + print(f"Invalid acccount {moved_to}") + return + + async def _send_move(): + print(f"Initiating move to {moved_to}") + async with async_session() as db_session: + try: + moved_to_actor_id = await get_actor_url(moved_to) + except Exception as exc: + print(f"ERROR: Failed to resolve {moved_to}") + print("".join(traceback.format_exception(exc))) + return + + if not moved_to_actor_id: + print("ERROR: Failed to resolve {moved_to}") + return + + new_actor = await fetch_actor(db_session, moved_to_actor_id) + + if LOCAL_ACTOR.ap_id not in new_actor.ap_actor.get("alsoKnownAs", []): + print( + f"{new_actor.handle}/{moved_to_actor_id} is missing " + f"{LOCAL_ACTOR.ap_id} in alsoKnownAs" + ) + return + + await send_move(db_session, new_actor.ap_id) + + print("Done") + + asyncio.run(_send_move()) + + +@task +def self_destruct(ctx): + # type: (Context) -> None + from loguru import logger + + from app.boxes import send_self_destruct + from app.database import async_session + + logger.disable("app") + + async def _send_self_destruct(): + if input("Initiating self destruct, type yes to confirm: ") != "yes": + print("Aborting") + + async with async_session() as db_session: + await send_self_destruct(db_session) + + print("Done") + + asyncio.run(_send_self_destruct()) + + +@task +def yunohost_config( + ctx, + domain, + username, + name, + summary, + password, +): + # type: (Context, str, str, str, str, str) -> None + from app.utils import yunohost + + yunohost.setup_config_file( + domain=domain, + username=username, + name=name, + summary=summary, + password=password, + ) + + +@task +def reset_password(ctx): + # type: (Context) -> None + import bcrypt + from prompt_toolkit import prompt + + new_password = bcrypt.hashpw( + prompt("New admin password: ", is_password=True).encode(), bcrypt.gensalt() + ).decode() + + print() + print("Update data/profile.toml with:") + print(f'admin_password = "{new_password}"') + + +@task +def check_config(ctx): + # type: (Context) -> None + import sys + import traceback + + from loguru import logger + + logger.disable("app") + + try: + from app import config # noqa: F401 + except Exception as exc: + print("Config error, please fix data/profile.toml:\n") + print("".join(traceback.format_exception(exc))) + sys.exit(1) + else: + print("Config is OK") + + +@task +def import_mastodon_following_accounts(ctx, path): + # type: (Context, str) -> None + from loguru import logger + + from app.boxes import _get_following + from app.boxes import _send_follow + from app.database import async_session + from app.utils.mastodon import get_actor_urls_from_following_accounts_csv_file + + async def _import_following() -> int: + count = 0 + async with async_session() as db_session: + followings = { + following.ap_actor_id for following in await _get_following(db_session) + } + for ( + handle, + actor_url, + ) in await get_actor_urls_from_following_accounts_csv_file(path): + if actor_url in followings: + logger.info(f"Already following {handle}") + continue + + logger.info(f"Importing {actor_url=}") + + await _send_follow(db_session, actor_url) + count += 1 + + await db_session.commit() + + return count + + count = asyncio.run(_import_following()) + logger.info(f"Import done, {count} follow requests sent") diff --git a/templates/about.html b/templates/about.html deleted file mode 100644 index 7625840..0000000 --- a/templates/about.html +++ /dev/null @@ -1,22 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block header %} -{% endblock %} -{% block content %} -
- -{% include "header.html" %} - -
-
- -
- -
-{{ text | safe }} -
{{ me.summary }}
-
-
- -
-{% endblock %} diff --git a/templates/admin.html b/templates/admin.html deleted file mode 100644 index e2c6261..0000000 --- a/templates/admin.html +++ /dev/null @@ -1,28 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}Admin - {{ config.NAME }}{% endblock %} -{% block content %} -
-{% include "header.html" %} -
-

Admin

-

Links

- -

DB

-
    -
  • Inbox size: {{ inbox_size }}
  • -
  • Outbox size: {{ outbox_size }}
  • -
-

Collections

-
    -
  • followers: {{ col_followers }}
  • -
  • following: {{ col_following }}
  • -
  • liked: {{col_liked }}
  • -
-
- -
-{% endblock %} diff --git a/templates/admin_indieauth.html b/templates/admin_indieauth.html deleted file mode 100644 index 510ec9c..0000000 --- a/templates/admin_indieauth.html +++ /dev/null @@ -1,29 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}IndieAuth logs - {{ config.NAME }}{% endblock %} -{% block content %} -
-{% include "header.html" %} -
-

IndieAuth logs

-
    -{% for action in indieauth_actions %} -
  • {{action.ts|format_ts}} -{% if action.verified_by == "id" %}Authentication {% else %}Authorization {% endif %} -request by {{ action.client_id }} - / {{action.ip_address}} {% if action.geoip %}({{action.geoip}}){% endif %} -as {{action.me}} -({% if action.scope %}scope={{action.scope}},{% endif %}redirect_uri={{action.redirect_uri}}). -{% if action.token_expires %} -
    -The token {{action.token[:20]}}... -{% if action.token_expires|gt_ts%}has expired on{% else %}expires{% endif %} {{ action.token_expires|format_ts }} -{% endif %} - -
  • -{% endfor %} -
-
- -
-{% endblock %} diff --git a/templates/admin_tasks.html b/templates/admin_tasks.html deleted file mode 100644 index e336f86..0000000 --- a/templates/admin_tasks.html +++ /dev/null @@ -1,116 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}Tasks - {{ config.NAME }}{% endblock %} -{% block content %} -
-{% include "header.html" %} -
- -

Cron

- - - - - - - - - - - - - - {% for task in cron %} - - - - - - - - - {% endfor %} - -
#URLPayloadScheduleNext runResponse
{{ task.task_id }}{{ task.url }} ({{ task.expected }}){{ task.payload }}{{ task.schedule }}{{ task.next_run }}Tries #{{ task.tries }}: {{ task.last_error_body }} ({{ task.last_error_status_code }})
- -

Dead

- - - - - - - - - - - - - {% for task in dead %} - - - - - - - - {% endfor %} - -
#URLPayloadNext runResponse
{{ task.task_id }}{{ task.url }} ({{ task.expected }}){{ task.payload }}{{ task.next_run }}Tries #{{ task.tries }}: {{ task.last_error_body }} ({{ task.last_error_status_code }})
- - -

Waiting

- - - - - - - - - - - - - {% for task in waiting %} - - - - - - - - {% endfor %} - -
#URLPayloadNext runResponse
{{ task.task_id }}{{ task.url }} ({{ task.expected }}){{ task.payload }}{{ task.next_run }}Tries #{{ task.tries }}: {{ task.last_error_body }} ({{ task.last_error_status_code }})
- -

Success

- - - - - - - - - - - - - {% for task in success %} - - - - - - - - {% endfor %} - -
#URLPayloadNext runResponse
{{ task.task_id }}{{ task.url }} ({{ task.expected }}){{ task.payload }}{{ task.next_run }}Tries #{{ task.tries }}: {{ task.last_error_body }} ({{ task.last_error_status_code }})
- - - -
-
-{% endblock %} diff --git a/templates/authorize_remote_follow.html b/templates/authorize_remote_follow.html deleted file mode 100644 index e0fdc8d..0000000 --- a/templates/authorize_remote_follow.html +++ /dev/null @@ -1,16 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block header %} -{% endblock %} -{% block content %} -
-

You're about to follow {{ profile}}

- -
- - - -
- -
-{% endblock %} diff --git a/templates/direct_messages.html b/templates/direct_messages.html deleted file mode 100644 index 9669c0a..0000000 --- a/templates/direct_messages.html +++ /dev/null @@ -1,20 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}DMs - {{ config.NAME }}{% endblock %} -{% block header %} - -{% endblock %} -{% block content %} -
-
- - {% for thread in threads %} -

With {{ ", ".join(thread.participants) }} - {{ thread.len }} message{% if thread.len > 1 %}s{% endif %} -

- {{ utils.display_note(thread.last_reply.activity.object, meta=thread.last_reply.meta) }} - {% endfor %} - -
-
-{% endblock %} diff --git a/templates/error.html b/templates/error.html deleted file mode 100644 index 1f77766..0000000 --- a/templates/error.html +++ /dev/null @@ -1,24 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}{{code}} {{status_text}} - {{ config.NAME }}{% endblock %} -{% block header %} -{% endblock %} -{% block content %} -
-{% if not request.path.startswith('/admin') %} -{% include "header.html" %} -{% endif %} -
-

{{code}} {{status_text}}

-

Something went wrong :(

-{% if tb %} -
-

Please consider opening an issue on GitHub.
Here is the traceback:

-
-{{ tb }}
-
-
-{% endif %} -
-
-{% endblock %} diff --git a/templates/followers.html b/templates/followers.html deleted file mode 100644 index 6d31da2..0000000 --- a/templates/followers.html +++ /dev/null @@ -1,36 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}Followers - {{ config.NAME }}{% endblock %} -{% block header %} -{% endblock %} -{% block content %} -
-{% include "header.html" %} - -
-
- {% for meta in followers_data %} - {% set follower = meta.actor %} - {% if session.logged_in %} -
-profile - -{% if meta.notification_follows_back %} -following -{% endif %} - -
- {% endif %} -
- {{ utils.display_actor_inline(follower, size=80) }} -
- {% endfor %} - {{ utils.display_pagination(older_than, newer_than) }} -
- -
-
-{% endblock %} -{% block links %} - -{{ utils.display_pagination_links(older_than, newer_than) }}{% endblock %} diff --git a/templates/following.html b/templates/following.html deleted file mode 100644 index bdb5361..0000000 --- a/templates/following.html +++ /dev/null @@ -1,71 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}Following - {{ config.NAME }}{% endblock %} -{% block header %} -{% endblock %} -{% block content %} -
-{% include "header.html" %} - -
- {% for (follow_id, meta) in following_data %} - {% set follow = meta.object %} - {% if session.logged_in %} -
-profile -
- - - - -
-{% if meta.notification_follows_back %} -follows you back -{% endif %} - -{% if lists %} -
- - - - - -
-{% endif %} - -{% for l in lists %} -{% if follow.id in l.members %} -
- - - - -
- - -{% endif %} -{% endfor %} - - -
- - {% endif %} -
- {{ utils.display_actor_inline(follow, size=80) }} -
- {% endfor %} - {{ utils.display_pagination(older_than, newer_than) }} -
- -
-{% endblock %} -{% block links %} - -{{ utils.display_pagination_links(older_than, newer_than) }}{% endblock %} diff --git a/templates/header.html b/templates/header.html deleted file mode 100644 index 766aa96..0000000 --- a/templates/header.html +++ /dev/null @@ -1,40 +0,0 @@ - diff --git a/templates/index.html b/templates/index.html deleted file mode 100644 index 15a7d14..0000000 --- a/templates/index.html +++ /dev/null @@ -1,80 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block header %} - - - - - - - - - - - - -{% endblock %} -{% block content %} -
- - -{% include "header.html" %} - -
- {% for item in pinned %} - {% if item.meta.pinned %} -

- pinned -

- {% endif %} - - {{ utils.display_note(item.activity.object, meta=item.meta, no_color=True) }} - {% endfor %} - - {% for item in outbox_data %} - - {% if item | has_type('Announce') %} - {% if "actor" in item.meta %} - {% set boost_actor = item.meta.actor %} - {% if session.logged_in %} -
-
- - - - -
- {{ utils.display_actor_box(boost_actor, after="boosted") }} - {{ utils.display_in_reply_to(item.meta, item.activity.object) }} -
- {% else %} -

- {{ utils.display_actor_box(boost_actor, after="boosted") }} - {{ utils.display_in_reply_to(item.meta, item.activity.object) }} -

- {% endif %} - {% endif %} - {% if item.meta.object %} - {{ utils.display_note(item.meta.object, meta=item.meta) }} - {% endif %} - {% elif item | has_type('Create') %} - {% if item.activity.object.inReplyTo %} -

- {{ utils.display_in_reply_to(item.meta, item.activity.object) }} -

- {% endif %} - {{ utils.display_note(item.activity.object, meta=item.meta, no_color=True) }} - {% endif %} - - {% endfor %} - - {{ utils.display_pagination(older_than, newer_than) }} -
- -
-{% endblock %} -{% block links %} - - - -{{ utils.display_pagination_links(older_than, newer_than) }}{% endblock %} diff --git a/templates/indieauth_flow.html b/templates/indieauth_flow.html deleted file mode 100644 index 9a6769e..0000000 --- a/templates/indieauth_flow.html +++ /dev/null @@ -1,41 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block header %} -{% endblock %} -{% block content %} -
- -
-{% if client.logo %} -
- -
-{% endif %} -
-
-{{ client.name }} -

wants you to login as {{ me }}

-
-
-
- -
- {% if scopes %} -

Scopes

-
    - {% for scope in scopes %} -
  • -
  • - {% endfor %} -
- {% endif %} - - - - - - -
- -
-{% endblock %} diff --git a/templates/layout.html b/templates/layout.html deleted file mode 100644 index 3b084ee..0000000 --- a/templates/layout.html +++ /dev/null @@ -1,63 +0,0 @@ - - - - - - -{% block title %}{{ config.NAME }}'s microblog{% endblock %} - - - - - -{% if not request.args.get("older_than") and not request.args.get("previous_than") %}{% endif %} -{% block links %}{% endblock %} -{% if config.THEME_COLOR %}{% endif %} - -{% block header %}{% endblock %} - - -{% if logged_in %} - -{% endif %} - - -
-{% block content %}{% endblock %} -
- - - diff --git a/templates/liked.html b/templates/liked.html deleted file mode 100644 index 039efac..0000000 --- a/templates/liked.html +++ /dev/null @@ -1,34 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block content %} -
- - -{% include "header.html" %} - -
- {% for item in liked %} - {% if session.logged_in %} -
-
- - - - -
-
- - {% endif %} - {% if item.meta.object %} - {{ utils.display_note(item.meta.object, meta=item.meta) }} - {% endif %} - {% endfor %} - - {{ utils.display_pagination(older_than, newer_than) }} -
- -
-{% endblock %} -{% block links %} - -{{ utils.display_pagination_links(older_than, newer_than) }}{% endblock %} diff --git a/templates/lists.html b/templates/lists.html deleted file mode 100644 index e2d0059..0000000 --- a/templates/lists.html +++ /dev/null @@ -1,61 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}Lists - {{ config.NAME }}{% endblock %} -{% block header %} - -{% endblock %} -{% block content %} -
- -
-

Lists and its members are private.

-

New List

-
- - - - -
- -

Lists

-

Manage list members in the Following section

- - - -

Manage lists

-{% for l in lists %} -

{{ l.name }} {{ l.members | length }} members

-
- - - - -
- -
- -{% for member in l.members %} -
-
- - - - - -
-
- -
- {{ utils.display_actor_inline(member | get_actor, size=80) }} -
-{% endfor %} - -{% endfor %} -
-
-
-{% endblock %} diff --git a/templates/login.html b/templates/login.html deleted file mode 100644 index 717dfdc..0000000 --- a/templates/login.html +++ /dev/null @@ -1,46 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}Login - {{ config.NAME }}{% endblock %} -{% block header %} - -{% endblock %} -{% block content %} -
- -
- - - {% if u2f_enabled %} - - {% else %} - {% endif %} - -
- -
-{% if u2f_enabled %} - -{% endif %} -{% endblock %} diff --git a/templates/lookup.html b/templates/lookup.html deleted file mode 100644 index 0067273..0000000 --- a/templates/lookup.html +++ /dev/null @@ -1,65 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}Lookup - {{ config.NAME }}{% endblock %} -{% block content %} -
-
- -

Interact with an ActivityPub object via its URL or look for a user using @user@domain.tld

- -
- - -
- -{% if data %} -{% set data = data.to_dict() %} -
- {% if data | has_actor_type %} -
-{% if following %} -profile -
- - - - -
-
- - - - -
-{% else %} -
- - - - -
-{% endif %} -{% if follower %}follows you!{% endif %} -
- - - {{ utils.display_actor_inline(data, size=80) }} - - {% elif data | has_type('Create') %} - {{ utils.display_note(data.object, meta=meta) }} - {% elif data | has_type(['Note', 'Article', 'Video', 'Audio', 'Page', 'Question']) %} - {{ utils.display_note(data, meta=meta) }} - {% elif data | has_type('Announce') %} - {% set boost_actor = meta.actor %} -

- {{ boost_actor.name }} boosted -

- {{ utils.display_note(meta.object, meta=meta) }} - - {% endif %} -
-{% endif %} - -
-
-{% endblock %} diff --git a/templates/new.html b/templates/new.html deleted file mode 100644 index 44e4d63..0000000 --- a/templates/new.html +++ /dev/null @@ -1,138 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}New - {{ config.NAME }}{% endblock %} -{% block header %} - -{% endblock %} -{% block content %} -
-
-{% if thread %} -

Replying to {{ content }}

-{{ utils.display_thread(thread) }} -{% else %} -
-{% if request.args.get("question") == "1" %} -NoteQuestion -{% else %} -NoteQuestion -{% endif %} -
-{% endif %} -
- - - -{% if reply %}{% endif %} - -

- -

-{% for emoji in emojis %} -{{ emoji | emojify | safe }} -{% endfor %} -{% for emoji in custom_emojis %} -{{emoji.name}} -{% endfor %} -

- - - -

- -

-

- -

- -

- - -ask browser for location -

-

- -

- -{% if request.args.get("question") == "1" %} -
-

Open for:

- -

- - {% for i in range(4) %} -

- {% endfor %} - -
-{% endif %} - - -
- - -
- -{% endblock %} diff --git a/templates/note.html b/templates/note.html deleted file mode 100644 index a4b9c73..0000000 --- a/templates/note.html +++ /dev/null @@ -1,27 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}{{ config.NAME }}{{ note.activity.object | get_text | html2plaintext | trim | truncate(50) }}"{% endblock %} -{% block header %} - - - - - - - - - - - -{% endblock %} -{% block content %} -
-{% if request.path != "/admin/thread" %} -{% include "header.html" %} -{% endif %} -
-{{ utils.display_thread(thread, likes=likes, shares=shares) }} -
-
-{% endblock %} -{% block links %}{% endblock %} diff --git a/templates/note_debug.html b/templates/note_debug.html deleted file mode 100644 index f34d61c..0000000 --- a/templates/note_debug.html +++ /dev/null @@ -1,22 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}{{ config.NAME }}: "{{ note.activity.object.content | html2plaintext | truncate(50) }}"{% endblock %} -{% block header %} - - - - - - - - - - -{% endblock %} -{% block content %} -
-{% include "header.html" %} -
{{ thread | remove_mongo_id | tojson(indent=4) }}
-
-{% endblock %} -{% block links %}{% endblock %} diff --git a/templates/remote_follow.html b/templates/remote_follow.html deleted file mode 100644 index 065dff6..0000000 --- a/templates/remote_follow.html +++ /dev/null @@ -1,18 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block header %} -{% endblock %} -{% block content %} -
-{% include "header.html" %} -
-

Remote follow @{{ config.USERNAME }}@{{ config.DOMAIN }}

- -
- - - -
-
-
-{% endblock %} diff --git a/templates/stream.html b/templates/stream.html deleted file mode 100644 index 4c10761..0000000 --- a/templates/stream.html +++ /dev/null @@ -1,266 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}{% if request.path == url_for('admin.admin_stream') %}Stream{% elif actor_id %}Profile {{ actor.name }}{% elif list_name %}List {{ list_name }}{% else %}Notifications{% endif %} - {{ config.NAME }}{% endblock %} -{% block content %} -
-
- -{% if list_name %} -

{{ list_name }}

-{% endif %} - -{% if request.path == url_for('admin.admin_notifications') and unread_notifications_count %} -
-
- - - - -
-
-{% endif %} - -{% if actor %} -{% set actor_redir = request.path + "?actor_id=" + request.args.get('actor_id') %} - -
-
- {% if follower %}follows you!{% endif %} - -{% if following %} -
- - - - -
-
- - - - -
- - -{% if lists %} -
- - - - - -
-{% endif %} - -{% for l in lists %} -{% if actor.id in l.members %} -
- - - - -
- - -{% endif %} -{% endfor %} - - - - -{% else %} -
- - - - -
-
- - - - -
- - -{% endif %} -
- - - -{% if not actor.icon %} - -{% else %} -{% endif %} - -
-
{{ (actor.name or actor.preferredUsername) | clean | replace_custom_emojis(actor) | safe }}
-@{{ actor.preferredUsername }}@{{ actor | url_or_id | get_url | domain }} -{% if actor.manuallyApprovesFollowers %}

Manually approves followers

{% endif %} -
-
- -{% if actor.summary %} -
- {{ actor.summary | clean | replace_custom_emojis(actor) | safe }} -
-{% endif %} - -{% if actor.attachment %} -
    - {% for item in actor.attachment %} - {% if item.type == "PropertyValue" %} -
  • {{ item.name }}: {{ item.value | clean | replace_custom_emojis(actor) | safe }}
  • - {% endif %} - {% endfor %} -
-{% endif %} -
- -{% endif %} - -
- {% for item in inbox_data %} - {% if 'actor' in item.meta %} - {% if item | has_type('Create') %} -
- {% if request.path.startswith("/admin/notifications") and not item.meta.reply_acked and item.meta.object_visibility | visibility_is_public %} -
- - - - -
- - {% endif %} - {{ utils.display_in_reply_to(item.meta, item.activity.object) }} -
- {{ utils.display_note(item.activity.object, meta=item.meta) }} - {% else %} - {% if item | has_type('Announce') %} - {% set boost_actor = item.meta.actor %} - {% if boost_actor %} -
- {{ utils.display_actor_box(boost_actor, after="boosted") }} - {{ utils.display_in_reply_to(item.meta, item.activity.object) }} - - {% if request.path == url_for('admin.admin_notifications') %} - {% if item.meta.notification_unread %}new{% endif %} - {{ (item.activity.published or item.meta.published) | format_timeago }} - {% endif %} - -
- {% endif %} - {% if item.meta.object %} - - {{ utils.display_note(item.meta.object, meta=item.meta) }} - {% endif %} - {% endif %} - - {% if item | has_type('Like') %} - {% set boost_actor = item.meta.actor %} -
- {{ utils.display_actor_box(boost_actor, after="liked") }} - {{ utils.display_in_reply_to(item.meta, item.activity.object) }} - {% if item.meta.notification_unread %}new{% endif %} - {{ (item.activity.published or item.meta.published) | format_timeago }} -
- {% if item.meta.object %} - {{ utils.display_note(item.meta.object, meta=item.meta) }} - {% endif %} - {% endif %} - - {% if item | has_type('Follow') %} -
- new follower - {{ item.meta.follow_status }} - {% if config.MANUALLY_APPROVES_FOLLOWERS and item.meta.follow_status != "accepted" %} -
- - - - -
- {% endif %} - {% if item.meta.notification_unread %}new{% endif %} - {{ (item.activity.published or item.meta.published) | format_timeago }} - profile - {% if item.meta.notification_follows_back %}already following - {% else %} -
- - - - -
- {% endif %} -
-
- {{ utils.display_actor_inline(item.meta.actor, size=50) }} -
- - {% elif item | has_type('Accept') %} -
- you started following - {% if item.meta.notification_unread %}new{% endif %} - {{ (item.activity.published or item.meta.published) | format_timeago }} - profile - {% if item.meta.notification_follows_back %}follows you back{% endif %} -
- -
- {{ utils.display_actor_inline(item.meta.actor, size=50) }} -
- - {% elif item | has_type('Reject') %} -
- rejected your follow request - {% if item.meta.notification_unread %}new{% endif %} - {{ (item.activity.published or item.meta.published) | format_timeago }} - profile - {% if item.meta.notification_follows_back %}follows you{% endif %} -
- -
- {{ utils.display_actor_inline(item.meta.actor, size=50) }} -
- - {% elif item | has_type('Undo') %} -
- unfollowed you - {% if item.meta.notification_unread %}new{% endif %} - {{ (item.activity.published or item.meta.published) | format_timeago }} -
-
- {{ utils.display_actor_inline(item.meta.actor, size=50) }} -
- - {% else %} - - {% endif %} - - - {% endif %} - {% else %} - - {% if item | has_type('question_ended') %} -

poll ended

- {{ utils.display_note(item.activity, meta={"object_visibility": "PUBLIC"}) }} - {% endif %} - - {% endif %} - {% endfor %} - - {{ utils.display_pagination(older_than, newer_than) }} -
-
- -
-{% endblock %} diff --git a/templates/stream_debug.html b/templates/stream_debug.html deleted file mode 100644 index 6b4a96e..0000000 --- a/templates/stream_debug.html +++ /dev/null @@ -1,19 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}{% if request.path == url_for('admin_stream') %}Stream{% else %}Notifications{% endif %} - {{ config.NAME }}{% endblock %} -{% block content %} -
-{% include "header.html" %} -
- -
- {% for item in inbox_data %} -
{{ item |remove_mongo_id|tojson(indent=4) }}
- {% endfor %} - - {{ utils.display_pagination(older_than, newer_than) }} -
-
- -
-{% endblock %} diff --git a/templates/tags.html b/templates/tags.html deleted file mode 100644 index ea983b2..0000000 --- a/templates/tags.html +++ /dev/null @@ -1,35 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block title %}#{{ tag }} - {{ config.NAME }}{% endblock %} -{% block header %} - - - - - - - - - - - - -{% endblock %} -{% block content %} -
- - -{% include "header.html" %} -
-

#{{ tag }}

-
- {% for item in outbox_data %} - {{ utils.display_note(item.activity.object, meta=item.meta) }} - {% endfor %} -
-
-
-{% endblock %} -{% block links %} - -{% endblock %} diff --git a/templates/u2f.html b/templates/u2f.html deleted file mode 100644 index 0d94691..0000000 --- a/templates/u2f.html +++ /dev/null @@ -1,29 +0,0 @@ -{% extends "layout.html" %} -{% import 'utils.html' as utils %} -{% block header %} -{% endblock %} -{% block content %} -
- {% if session.logged_in %}logged{% else%}not logged{%endif%} - -
- - -
- -
- -{% endblock %} diff --git a/templates/utils.html b/templates/utils.html deleted file mode 100644 index 842422a..0000000 --- a/templates/utils.html +++ /dev/null @@ -1,456 +0,0 @@ -{% macro display_actor_inline(follower, size=50) -%} -{% if follower and follower.id %} - - -{% if not follower.icon %} - -{% else %} -{% endif %} - -
-
{{ (follower.name or follower.preferredUsername) | clean | replace_custom_emojis(follower) | safe }}
-@{{ follower.preferredUsername }}@{{ follower | url_or_id | get_url | domain }} -
-
-{% endif %} -{%- endmacro %} - -{% macro display_actor_icon(follower, size=50) -%} -{% if follower and follower.id %} - -{% if not follower.icon %} - -{% else %} -{% endif %} - -{% endif %} -{%- endmacro %} - -{% macro display_actor_oneline(follower) -%} -{% if follower and follower.id %} - - -{{ (follower.name or follower.preferredUsername) | clean | replace_custom_emojis(follower) | safe }} -@{{ follower.preferredUsername }}@{{ follower | url_or_id | get_url | domain }} - - -{% endif %} -{%- endmacro %} - -{% macro display_actor_box(actor, after=None, before=None, before_url=None) -%} - -{% if not actor.icon %} -{% set icon_url = "/static/nopic.png" %} -{% else %} -{% set icon_url = actor.icon.url | get_actor_icon_url(50) %} -{% endif %} - - - - {% if before and before_url %} - {{before}} - - {{ ( actor.name or actor.preferredUsername) | clean | replace_custom_emojis(actor) | safe }} - - {% endif %} - {% if after %} - - {{ ( actor.name or actor.preferredUsername) | clean | replace_custom_emojis(actor) | safe }} - - {{ after }} - {% endif %} - - - - -{%- endmacro %} - -{% macro display_in_reply_to(meta, obj) -%} - -{% if meta.in_reply_to_actor %} -{{ display_actor_box(meta.in_reply_to_actor, before="in reply to", before_url=obj.inReplyTo) }} -{% elif meta.in_reply_to_self %} - -self reply - -{% endif %} - -{%- endmacro %} - -{% macro display_note(obj, perma=False, likes=[], shares=[], meta={}, no_color=False) -%} - -{% if meta.object_actor %} - {% set actor = meta.object_actor %} -{% elif meta.actor %} - {% set actor = meta.actor %} -{% else %} - {% set actor = obj.attributedTo | get_actor %} -{% endif %} - - -{% if session.logged_in %} - {% set perma_id = obj.id | permalink_id %} - {% set redir = request.path + "?"+ request.query_string.decode() + "#activity-" + perma_id %} - {% set aid = obj.id | quote_plus %} -{% endif %} - -{% set real_end_time = obj.closed or obj.endTime %} - -
-
- -
- - -
- -
- - - {% if obj.summary %}

{{ obj.summary | clean | replace_custom_emojis(obj) | safe }}

{% endif %} - {% if obj | has_type('Video') %} -
- -
- {% endif %} - -
- {% if obj | has_type(['Article', 'Page']) %} - {{ obj.name }} {{ obj | url_or_id | get_url }} - {% elif obj | has_type('Question') %} - {{ obj.content | update_inline_imgs | clean | replace_custom_emojis(obj) | code_highlight | safe }} - - -
    - {% set total_votes = obj | get_total_answers_count(meta) %} - {% for oneOf in obj.oneOf %} - {% set pct = 0 %} - {% if total_votes > 0 %} - {% set cnt = oneOf.name | get_answer_count(obj, meta) %} - {% set pct = cnt * 100.0 / total_votes %} - {% endif %} -
  • - {% if session.logged_in and not meta.poll_answers_sent and not (real_end_time | gtnow) and not (obj.id | is_from_outbox) %} -
    - - - - - -
    {% endif %} - - - - {{ '%0.0f'| format(pct) }}% - {{ oneOf.name }} {% if oneOf.name | poll_answer_key in meta.poll_answers_sent %}(your vote){% endif %} - -
  • - {% endfor %} - {% if obj.anyOf %} - - {% for anyOf in obj.anyOf %} - {% set pct = 0 %} - {% if total_votes > 0 %} - {% set cnt = anyOf.name | get_answer_count(obj, meta) %} - {% set pct = cnt * 100.0 / total_votes %} - {% endif %} -
  • - {% set already_voted = anyOf.name | poll_answer_key in meta.poll_answers_sent %} - {% if session.logged_in and not already_voted and not (real_end_time | gtnow) and not (obj.id | is_from_outbox) %} -
    - - - - - -
    - {% elif session.logged_in and already_voted and not (real_end_time | gtnow) %} - - {% endif %} - - - {{ '%0.0f'| format(pct) }}% - {{ anyOf.name }} {% if anyOf.name | poll_answer_key in meta.poll_answers_sent %}(your vote){% endif %} - -
  • - {% endfor %} - - {% endif %} -
-

- {% if real_end_time | gtnow %} - Ended {{ real_end_time | format_timeago }} with {{ total_votes }} vote{% if total_votes | gtone %}s{% endif %}. - {% else %} - Ends {{ real_end_time | format_timeago }} ({{ total_votes }} vote{% if total_votes | gtone %}s{% endif %} as of now). - {% endif %} -

- - - {% else %} - {{ obj | get_text | update_inline_imgs | clean | replace_custom_emojis(obj) | code_highlight | safe }} - {% endif %} - - {% if obj | has_place %} -

Location: {{ obj | get_place | safe }}

- {% endif %} - -
- - {% if obj.attachment and obj | has_type('Note') %} -
- - {% if obj.sensitive and not request.args.get("show_sensitive") == perma_id %} -
-
- -{% for k, v in request.args.items() %} - -{% endfor %} - -
-
- {% else %} - - {% if obj.sensitive %} -
-
-{% for k, v in request.args.items() %} -{% if k != "show_sensitive" %} - -{% endif %} -{% endfor %} - -
-
- {% endif %} - - {% if obj.attachment | not_only_imgs %} -

Attachments

-
    - {% endif %} - {% for a in (obj | iter_note_attachments) %} - {% if (a.mediaType and a.mediaType.startswith("image/")) or (a.type and a.type == 'Image') %} - - {{ a.name }} - {% elif (a.mediaType and a.mediaType.startswith("video/")) %} -
  • - {% elif (a.mediaType and a.mediaType.startswith("audio/")) %} -
  • - {% else %} -
  • {% if a.filename %}{{ a.filename }}{% else %}{{ a.url }}{% endif %}
  • - {% endif %} - {% endfor %} - {% if obj.attachment | not_only_imgs %} -
- {% endif %} - -{% endif %} -
- {% endif %} - - - -{% if meta and meta.og_metadata and obj | has_type('Note') %} -{% for og in meta.og_metadata %} -{% if og.url %} - -{% if og.image | get_og_image_url %} -
- -
-{% endif %} -
-{{ og.title }} -

{{ og.description | truncate(80) }}

-{{ og.site_name }} -
-
-{% endif %} -{% endfor %} -{% endif %} - - - - - - -{% if likes or shares %} - -{% endif %} - - -
-
-
- - -{%- endmacro %} - -{% macro display_thread(thread, likes=[], shares=[]) -%} -{% for reply in thread %} -{% if reply._requested %} -
-{{ display_in_reply_to(reply.meta, reply.activity.object) }} -
-{{ display_note(reply.activity.object, perma=True, likes=likes, shares=shares, meta=reply.meta) }} -{% else %} -
-{{ display_in_reply_to(reply.meta, reply.activity.object) }} -
-{{ display_note(reply.activity.object, perma=False, meta=reply.meta) }} -{% endif %} -{% endfor %} -{% endmacro -%} - -{% macro display_pagination(older_than, newer_than) -%} -
- {% if older_than %} - - {% endif %} - {% if newer_than %} - - {% endif %} -
-{% endmacro -%} - -{% macro display_pagination_links(older_than, newer_than) -%} -{% if older_than %}{% endif %} -{% if newer_than %}{% endif %} -{% endmacro -%} diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..b78cb2c --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,39 @@ +from typing import Generator + +import pytest +import pytest_asyncio +from fastapi.testclient import TestClient + +from app.database import Base +from app.database import async_engine +from app.database import async_session +from app.database import engine +from app.main import app +from tests.factories import _Session + + +@pytest_asyncio.fixture +async def async_db_session(): + async with async_session() as session: + async with async_engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + yield session + async with async_engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + + +@pytest.fixture +def db() -> Generator: + Base.metadata.create_all(bind=engine) + with _Session() as db_session: + try: + yield db_session + finally: + db_session.close() + Base.metadata.drop_all(bind=engine) + + +@pytest.fixture +def client(db) -> Generator: + with TestClient(app) as c: + yield c diff --git a/tests/factories.py b/tests/factories.py new file mode 100644 index 0000000..2c397e8 --- /dev/null +++ b/tests/factories.py @@ -0,0 +1,280 @@ +from urllib.parse import urlparse +from uuid import uuid4 + +import factory # type: ignore +from Crypto.PublicKey import RSA +from dateutil.parser import isoparse +from sqlalchemy import orm + +from app import activitypub as ap +from app import actor +from app import models +from app.actor import RemoteActor +from app.ap_object import RemoteObject +from app.database import SessionLocal +from app.utils.datetime import now + +_Session = orm.scoped_session(SessionLocal) + + +def generate_key() -> tuple[str, str]: + k = RSA.generate(1024) + return k.exportKey("PEM").decode(), k.publickey().exportKey("PEM").decode() + + +def build_follow_activity( + from_remote_actor: actor.RemoteActor, + for_remote_actor: actor.RemoteActor, + outbox_public_id: str | None = None, +) -> ap.RawObject: + return { + "@context": ap.AS_CTX, + "type": "Follow", + "id": from_remote_actor.ap_id + "/follow/" + (outbox_public_id or uuid4().hex), + "actor": from_remote_actor.ap_id, + "object": for_remote_actor.ap_id, + } + + +def build_delete_activity( + from_remote_actor: actor.RemoteActor | models.Actor, + deleted_object_ap_id: str, + outbox_public_id: str | None = None, +) -> ap.RawObject: + return { + "@context": ap.AS_CTX, + "type": "Delete", + "id": ( + from_remote_actor.ap_id # type: ignore + + "/follow/" + + (outbox_public_id or uuid4().hex) + ), + "actor": from_remote_actor.ap_id, + "object": deleted_object_ap_id, + } + + +def build_accept_activity( + from_remote_actor: actor.RemoteActor, + for_remote_object: RemoteObject, + outbox_public_id: str | None = None, +) -> ap.RawObject: + return { + "@context": ap.AS_CTX, + "type": "Accept", + "id": from_remote_actor.ap_id + "/accept/" + (outbox_public_id or uuid4().hex), + "actor": from_remote_actor.ap_id, + "object": for_remote_object.ap_id, + } + + +def build_block_activity( + from_remote_actor: actor.RemoteActor, + for_remote_actor: actor.RemoteActor, + outbox_public_id: str | None = None, +) -> ap.RawObject: + return { + "@context": ap.AS_CTX, + "type": "Block", + "id": from_remote_actor.ap_id + "/block/" + (outbox_public_id or uuid4().hex), + "actor": from_remote_actor.ap_id, + "object": for_remote_actor.ap_id, + } + + +def build_move_activity( + from_remote_actor: actor.RemoteActor, + for_remote_object: actor.RemoteActor, + outbox_public_id: str | None = None, +) -> ap.RawObject: + return { + "@context": ap.AS_CTX, + "type": "Move", + "id": from_remote_actor.ap_id + "/move/" + (outbox_public_id or uuid4().hex), + "actor": from_remote_actor.ap_id, + "object": from_remote_actor.ap_id, + "target": for_remote_object.ap_id, + } + + +def build_note_object( + from_remote_actor: actor.RemoteActor | models.Actor, + outbox_public_id: str | None = None, + content: str = "Hello", + to: list[str] = None, + cc: list[str] = None, + tags: list[ap.RawObject] = None, + in_reply_to: str | None = None, +) -> ap.RawObject: + published = now().replace(microsecond=0).isoformat().replace("+00:00", "Z") + context = from_remote_actor.ap_id + "/ctx/" + uuid4().hex + note_id = outbox_public_id or uuid4().hex + return { + "@context": ap.AS_CTX, + "type": "Note", + "id": from_remote_actor.ap_id + "/note/" + note_id, + "attributedTo": from_remote_actor.ap_id, + "content": content, + "to": to or [ap.AS_PUBLIC], + "cc": cc or [], + "published": published, + "context": context, + "conversation": context, + "url": from_remote_actor.ap_id + "/note/" + note_id, + "tag": tags or [], + "summary": None, + "sensitive": False, + "inReplyTo": in_reply_to, + } + + +def build_create_activity(obj: ap.RawObject) -> ap.RawObject: + return { + "@context": ap.AS_EXTENDED_CTX, + "actor": obj["attributedTo"], + "to": obj.get("to", []), + "cc": obj.get("cc", []), + "id": obj["id"] + "/activity", + "object": ap.remove_context(obj), + "published": obj["published"], + "type": "Create", + } + + +class BaseModelMeta: + sqlalchemy_session = _Session + sqlalchemy_session_persistence = "commit" + + +class RemoteActorFactory(factory.Factory): + class Meta: + model = RemoteActor + exclude = ( + "base_url", + "username", + "public_key", + "also_known_as", + ) + + class Params: + icon_url = None + summary = "I like unit tests" + also_known_as: list[str] = [] + + ap_actor = factory.LazyAttribute( + lambda o: { + "@context": ap.AS_CTX, + "type": "Person", + "id": o.base_url, + "following": o.base_url + "/following", + "followers": o.base_url + "/followers", + # "featured": ID + "/featured", + "inbox": o.base_url + "/inbox", + "outbox": o.base_url + "/outbox", + "preferredUsername": o.username, + "name": o.username, + "summary": o.summary, + "endpoints": {}, + "url": o.base_url, + "manuallyApprovesFollowers": False, + "attachment": [], + "icon": {}, + "publicKey": { + "id": f"{o.base_url}#main-key", + "owner": o.base_url, + "publicKeyPem": o.public_key, + }, + "alsoKnownAs": o.also_known_as, + } + ) + + +class ActorFactory(factory.alchemy.SQLAlchemyModelFactory): + class Meta(BaseModelMeta): + model = models.Actor + + # ap_actor + # ap_id + ap_type = "Person" + + @classmethod + def from_remote_actor(cls, ra): + return cls( + ap_type=ra.ap_type, + ap_actor=ra.ap_actor, + ap_id=ra.ap_id, + ) + + +class OutboxObjectFactory(factory.alchemy.SQLAlchemyModelFactory): + class Meta(BaseModelMeta): + model = models.OutboxObject + + # public_id + # relates_to_inbox_object_id + # relates_to_outbox_object_id + + @classmethod + def from_remote_object(cls, public_id, ro): + return cls( + public_id=public_id, + ap_type=ro.ap_type, + ap_id=ro.ap_id, + ap_context=ro.ap_context, + ap_object=ro.ap_object, + visibility=ro.visibility, + og_meta=ro.og_meta, + activity_object_ap_id=ro.activity_object_ap_id, + is_hidden_from_homepage=True if ro.in_reply_to else False, + ) + + +class OutgoingActivityFactory(factory.alchemy.SQLAlchemyModelFactory): + class Meta(BaseModelMeta): + model = models.OutgoingActivity + + # recipient + # outbox_object_id + + +class InboxObjectFactory(factory.alchemy.SQLAlchemyModelFactory): + class Meta(BaseModelMeta): + model = models.InboxObject + + @classmethod + def from_remote_object( + cls, + ro: RemoteObject, + actor: models.Actor, + relates_to_inbox_object_id: int | None = None, + relates_to_outbox_object_id: int | None = None, + ): + ap_published_at = now() + if "published" in ro.ap_object: + ap_published_at = isoparse(ro.ap_object["published"]) + return cls( + server=urlparse(ro.ap_id).hostname, + actor_id=actor.id, + ap_actor_id=actor.ap_id, + ap_type=ro.ap_type, + ap_id=ro.ap_id, + ap_context=ro.ap_context, + ap_published_at=ap_published_at, + ap_object=ro.ap_object, + visibility=ro.visibility, + relates_to_inbox_object_id=relates_to_inbox_object_id, + relates_to_outbox_object_id=relates_to_outbox_object_id, + activity_object_ap_id=ro.activity_object_ap_id, + # Hide replies from the stream + is_hidden_from_stream=True if ro.in_reply_to else False, + ) + + +class FollowerFactory(factory.alchemy.SQLAlchemyModelFactory): + class Meta(BaseModelMeta): + model = models.Follower + + +class FollowingFactory(factory.alchemy.SQLAlchemyModelFactory): + class Meta(BaseModelMeta): + model = models.Following diff --git a/tests/federation_test.py b/tests/federation_test.py deleted file mode 100644 index df4499e..0000000 --- a/tests/federation_test.py +++ /dev/null @@ -1,603 +0,0 @@ -import os -import time -from typing import List -from typing import Tuple - -import requests -from html2text import html2text -from little_boxes.collection import parse_collection - - -def resp2plaintext(resp): - """Convert the body of a requests reponse to plain text in order to make basic assertions.""" - return html2text(resp.text) - - -class Instance(object): - """Test instance wrapper.""" - - def __init__(self, name, host_url, docker_url=None): - self.host_url = host_url - self.docker_url = docker_url or host_url - self._create_delay = 10 - with open( - os.path.join( - os.path.dirname(os.path.abspath(__file__)), - f"fixtures/{name}/config/admin_api_key.key", - ) - ) as f: - api_key = f.read() - self._auth_headers = {"Authorization": f"Bearer {api_key}"} - - def _do_req(self, url): - """Used to parse collection.""" - url = url.replace(self.docker_url, self.host_url) - resp = requests.get(url, headers={"Accept": "application/activity+json"}) - resp.raise_for_status() - return resp.json() - - def _parse_collection(self, payload=None, url=None): - """Parses a collection (go through all the pages).""" - return parse_collection(url=url, payload=payload, fetcher=self._do_req) - - def ping(self): - """Ensures the homepage is reachable.""" - resp = requests.get(f"{self.host_url}/") - resp.raise_for_status() - assert resp.status_code == 200 - - def debug(self): - """Returns the debug infos (number of items in the inbox/outbox.""" - resp = requests.get( - f"{self.host_url}/api/debug", - headers={**self._auth_headers, "Accept": "application/json"}, - ) - resp.raise_for_status() - - return resp.json() - - def drop_db(self): - """Drops the MongoDB DB.""" - resp = requests.delete( - f"{self.host_url}/api/debug", - headers={**self._auth_headers, "Accept": "application/json"}, - ) - resp.raise_for_status() - - return resp.json() - - def block(self, actor_url) -> None: - """Blocks an actor.""" - # Instance1 follows instance2 - resp = requests.post( - f"{self.host_url}/api/block", - params={"actor": actor_url}, - headers=self._auth_headers, - ) - assert resp.status_code == 201 - - # We need to wait for the Follow/Accept dance - time.sleep(self._create_delay / 2) - return resp.json().get("activity") - - def follow(self, instance: "Instance") -> str: - """Follows another instance.""" - # Instance1 follows instance2 - resp = requests.post( - f"{self.host_url}/api/follow", - json={"actor": instance.docker_url}, - headers=self._auth_headers, - ) - assert resp.status_code == 201 - - # We need to wait for the Follow/Accept dance - time.sleep(self._create_delay) - return resp.json().get("activity") - - def new_note(self, content, reply=None) -> str: - """Creates a new note.""" - params = {"content": content} - if reply: - params["reply"] = reply - - resp = requests.post( - f"{self.host_url}/api/new_note", json=params, headers=self._auth_headers - ) - assert resp.status_code == 201 - - time.sleep(self._create_delay) - return resp.json().get("activity") - - def boost(self, oid: str) -> str: - """Creates an Announce activity.""" - resp = requests.post( - f"{self.host_url}/api/boost", json={"id": oid}, headers=self._auth_headers - ) - assert resp.status_code == 201 - - time.sleep(self._create_delay) - return resp.json().get("activity") - - def like(self, oid: str) -> str: - """Creates a Like activity.""" - resp = requests.post( - f"{self.host_url}/api/like", json={"id": oid}, headers=self._auth_headers - ) - assert resp.status_code == 201 - - time.sleep(self._create_delay) - return resp.json().get("activity") - - def delete(self, oid: str) -> str: - """Creates a Delete activity.""" - resp = requests.post( - f"{self.host_url}/api/note/delete", - json={"id": oid}, - headers=self._auth_headers, - ) - assert resp.status_code == 201 - - time.sleep(self._create_delay) - return resp.json().get("activity") - - def undo(self, oid: str) -> str: - """Creates a Undo activity.""" - resp = requests.post( - f"{self.host_url}/api/undo", json={"id": oid}, headers=self._auth_headers - ) - assert resp.status_code == 201 - - # We need to wait for the Follow/Accept dance - time.sleep(self._create_delay) - return resp.json().get("activity") - - def followers(self) -> List[str]: - """Parses the followers collection.""" - resp = requests.get( - f"{self.host_url}/followers", - headers={"Accept": "application/activity+json"}, - ) - resp.raise_for_status() - - data = resp.json() - - return self._parse_collection(payload=data) - - def following(self): - """Parses the following collection.""" - resp = requests.get( - f"{self.host_url}/following", - headers={"Accept": "application/activity+json"}, - ) - resp.raise_for_status() - - data = resp.json() - - return self._parse_collection(payload=data) - - def outbox(self): - """Returns the instance outbox.""" - resp = requests.get( - f"{self.host_url}/following", - headers={"Accept": "application/activity+json"}, - ) - resp.raise_for_status() - return resp.json() - - def outbox_get(self, aid): - """Fetches a specific item from the instance outbox.""" - resp = requests.get( - aid.replace(self.docker_url, self.host_url), - headers={"Accept": "application/activity+json"}, - ) - resp.raise_for_status() - return resp.json() - - def stream_jsonfeed(self): - """Returns the "stream"'s JSON feed.""" - resp = requests.get( - f"{self.host_url}/api/stream", - headers={**self._auth_headers, "Accept": "application/json"}, - ) - resp.raise_for_status() - return resp.json() - - -def _instances() -> Tuple[Instance, Instance]: - """Initializes the client for the two test instances.""" - instance1 = Instance("instance1", "http://docker:5006", "http://instance1_web:5005") - instance1.ping() - - instance2 = Instance("instance2", "http://docker:5007", "http://instance2_web:5005") - instance2.ping() - - # Return the DB - instance1.drop_db() - instance2.drop_db() - - return instance1, instance2 - - -def test_follow() -> None: - """instance1 follows instance2.""" - instance1, instance2 = _instances() - # Instance1 follows instance2 - instance1.follow(instance2) - instance1_debug = instance1.debug() - assert instance1_debug["inbox"] == 1 # An Accept activity should be there - assert instance1_debug["outbox"] == 1 # We've sent a Follow activity - - instance2_debug = instance2.debug() - assert instance2_debug["inbox"] == 1 # An Follow activity should be there - assert instance2_debug["outbox"] == 1 # We've sent a Accept activity - - assert instance2.followers() == [instance1.docker_url] - assert instance1.following() == [instance2.docker_url] - - -def test_follow_unfollow(): - """instance1 follows instance2, then unfollows it.""" - instance1, instance2 = _instances() - # Instance1 follows instance2 - follow_id = instance1.follow(instance2) - instance1_debug = instance1.debug() - assert instance1_debug["inbox"] == 1 # An Accept activity should be there - assert instance1_debug["outbox"] == 1 # We've sent a Follow activity - - instance2_debug = instance2.debug() - assert instance2_debug["inbox"] == 1 # An Follow activity should be there - assert instance2_debug["outbox"] == 1 # We've sent a Accept activity - - assert instance2.followers() == [instance1.docker_url] - assert instance1.following() == [instance2.docker_url] - - instance1.undo(follow_id) - - assert instance2.followers() == [] - assert instance1.following() == [] - - instance1_debug = instance1.debug() - assert instance1_debug["inbox"] == 1 # An Accept activity should be there - assert instance1_debug["outbox"] == 2 # We've sent a Follow and a Undo activity - - instance2_debug = instance2.debug() - assert instance2_debug["inbox"] == 2 # An Follow and Undo activity should be there - assert instance2_debug["outbox"] == 1 # We've sent a Accept activity - - -def test_post_content(): - """Instances follow each other, and instance1 creates a note.""" - instance1, instance2 = _instances() - # Instance1 follows instance2 - instance1.follow(instance2) - instance2.follow(instance1) - - inbox_stream = instance2.stream_jsonfeed() - assert len(inbox_stream["items"]) == 0 - - create_id = instance1.new_note("hello") - instance2_debug = instance2.debug() - assert ( - instance2_debug["inbox"] == 3 - ) # An Follow, Accept and Create activity should be there - assert instance2_debug["outbox"] == 2 # We've sent a Accept and a Follow activity - - # Ensure the post is visible in instance2's stream - inbox_stream = instance2.stream_jsonfeed() - assert len(inbox_stream["items"]) == 1 - assert inbox_stream["items"][0]["id"] == create_id - - -def test_block_and_post_content(): - """Instances follow each other, instance2 blocks instance1, instance1 creates a new note.""" - instance1, instance2 = _instances() - # Instance1 follows instance2 - instance1.follow(instance2) - instance2.follow(instance1) - - inbox_stream = instance2.stream_jsonfeed() - assert len(inbox_stream["items"]) == 0 - - instance2.block(instance1.docker_url) - - instance1.new_note("hello") - instance2_debug = instance2.debug() - assert ( - instance2_debug["inbox"] == 2 - ) # An Follow, Accept activity should be there, Create should have been dropped - assert ( - instance2_debug["outbox"] == 3 - ) # We've sent a Accept and a Follow activity + the Block activity - - # Ensure the post is not visible in instance2's stream - inbox_stream = instance2.stream_jsonfeed() - assert len(inbox_stream["items"]) == 0 - - -def test_post_content_and_delete(): - """Instances follow each other, instance1 creates a new note, then deletes it.""" - instance1, instance2 = _instances() - # Instance1 follows instance2 - instance1.follow(instance2) - instance2.follow(instance1) - - inbox_stream = instance2.stream_jsonfeed() - assert len(inbox_stream["items"]) == 0 - - create_id = instance1.new_note("hello") - instance2_debug = instance2.debug() - assert ( - instance2_debug["inbox"] == 3 - ) # An Follow, Accept and Create activity should be there - assert instance2_debug["outbox"] == 2 # We've sent a Accept and a Follow activity - - # Ensure the post is visible in instance2's stream - inbox_stream = instance2.stream_jsonfeed() - assert len(inbox_stream["items"]) == 1 - assert inbox_stream["items"][0]["id"] == create_id - - instance1.delete(f"{create_id}/activity") - instance2_debug = instance2.debug() - assert ( - instance2_debug["inbox"] == 4 - ) # An Follow, Accept and Create and Delete activity should be there - assert instance2_debug["outbox"] == 2 # We've sent a Accept and a Follow activity - - # Ensure the post has been delete from instance2's stream - inbox_stream = instance2.stream_jsonfeed() - assert len(inbox_stream["items"]) == 0 - - -def test_post_content_and_like(): - """Instances follow each other, instance1 creates a new note, instance2 likes it.""" - instance1, instance2 = _instances() - # Instance1 follows instance2 - instance1.follow(instance2) - instance2.follow(instance1) - - create_id = instance1.new_note("hello") - - # Ensure the post is visible in instance2's stream - inbox_stream = instance2.stream_jsonfeed() - assert len(inbox_stream["items"]) == 1 - assert inbox_stream["items"][0]["id"] == create_id - - # Now, instance2 like the note - like_id = instance2.like(f"{create_id}/activity") - - instance1_debug = instance1.debug() - assert instance1_debug["inbox"] == 3 # Follow, Accept and Like - assert instance1_debug["outbox"] == 3 # Folllow, Accept, and Create - - note = instance1.outbox_get(f"{create_id}/activity") - assert "likes" in note - assert note["likes"]["totalItems"] == 1 - likes = instance1._parse_collection(url=note["likes"]["first"]) - assert len(likes) == 1 - assert likes[0]["id"] == like_id - - -def test_post_content_and_like_unlike() -> None: - """Instances follow each other, instance1 creates a new note, instance2 likes it, then unlikes it.""" - instance1, instance2 = _instances() - # Instance1 follows instance2 - instance1.follow(instance2) - instance2.follow(instance1) - - create_id = instance1.new_note("hello") - - # Ensure the post is visible in instance2's stream - inbox_stream = instance2.stream_jsonfeed() - assert len(inbox_stream["items"]) == 1 - assert inbox_stream["items"][0]["id"] == create_id - - # Now, instance2 like the note - like_id = instance2.like(f"{create_id}/activity") - - instance1_debug = instance1.debug() - assert instance1_debug["inbox"] == 3 # Follow, Accept and Like - assert instance1_debug["outbox"] == 3 # Folllow, Accept, and Create - - note = instance1.outbox_get(f"{create_id}/activity") - assert "likes" in note - assert note["likes"]["totalItems"] == 1 - likes = instance1._parse_collection(url=note["likes"]["first"]) - assert len(likes) == 1 - assert likes[0]["id"] == like_id - - instance2.undo(like_id) - - instance1_debug = instance1.debug() - assert instance1_debug["inbox"] == 4 # Follow, Accept and Like and Undo - assert instance1_debug["outbox"] == 3 # Folllow, Accept, and Create - - note = instance1.outbox_get(f"{create_id}/activity") - assert "likes" in note - assert note["likes"]["totalItems"] == 0 - - -def test_post_content_and_boost() -> None: - """Instances follow each other, instance1 creates a new note, instance2 "boost" it.""" - instance1, instance2 = _instances() - # Instance1 follows instance2 - instance1.follow(instance2) - instance2.follow(instance1) - - create_id = instance1.new_note("hello") - - # Ensure the post is visible in instance2's stream - inbox_stream = instance2.stream_jsonfeed() - assert len(inbox_stream["items"]) == 1 - assert inbox_stream["items"][0]["id"] == create_id - - # Now, instance2 like the note - boost_id = instance2.boost(f"{create_id}/activity") - - instance1_debug = instance1.debug() - assert instance1_debug["inbox"] == 3 # Follow, Accept and Announce - assert instance1_debug["outbox"] == 3 # Folllow, Accept, and Create - - note = instance1.outbox_get(f"{create_id}/activity") - assert "shares" in note - assert note["shares"]["totalItems"] == 1 - shares = instance1._parse_collection(url=note["shares"]["first"]) - assert len(shares) == 1 - assert shares[0]["id"] == boost_id - - -def test_post_content_and_boost_unboost() -> None: - """Instances follow each other, instance1 creates a new note, instance2 "boost" it, then "unboost" it.""" - instance1, instance2 = _instances() - # Instance1 follows instance2 - instance1.follow(instance2) - instance2.follow(instance1) - - create_id = instance1.new_note("hello") - - # Ensure the post is visible in instance2's stream - inbox_stream = instance2.stream_jsonfeed() - assert len(inbox_stream["items"]) == 1 - assert inbox_stream["items"][0]["id"] == create_id - - # Now, instance2 like the note - boost_id = instance2.boost(f"{create_id}/activity") - - instance1_debug = instance1.debug() - assert instance1_debug["inbox"] == 3 # Follow, Accept and Announce - assert instance1_debug["outbox"] == 3 # Folllow, Accept, and Create - - note = instance1.outbox_get(f"{create_id}/activity") - assert "shares" in note - assert note["shares"]["totalItems"] == 1 - shares = instance1._parse_collection(url=note["shares"]["first"]) - assert len(shares) == 1 - assert shares[0]["id"] == boost_id - - instance2.undo(boost_id) - - instance1_debug = instance1.debug() - assert instance1_debug["inbox"] == 4 # Follow, Accept and Announce and Undo - assert instance1_debug["outbox"] == 3 # Folllow, Accept, and Create - - note = instance1.outbox_get(f"{create_id}/activity") - assert "shares" in note - assert note["shares"]["totalItems"] == 0 - - -def test_post_content_and_post_reply() -> None: - """Instances follow each other, instance1 creates a new note, instance2 replies to it.""" - instance1, instance2 = _instances() - # Instance1 follows instance2 - instance1.follow(instance2) - instance2.follow(instance1) - - inbox_stream = instance2.stream_jsonfeed() - assert len(inbox_stream["items"]) == 0 - - instance1_create_id = instance1.new_note("hello") - instance2_debug = instance2.debug() - assert ( - instance2_debug["inbox"] == 3 - ) # An Follow, Accept and Create activity should be there - assert instance2_debug["outbox"] == 2 # We've sent a Accept and a Follow activity - - # Ensure the post is visible in instance2's stream - instance2_inbox_stream = instance2.stream_jsonfeed() - assert len(instance2_inbox_stream["items"]) == 1 - assert instance2_inbox_stream["items"][0]["id"] == instance1_create_id - - instance2_create_id = instance2.new_note( - f"hey @instance1@{instance1.docker_url}", - reply=f"{instance1_create_id}/activity", - ) - instance2_debug = instance2.debug() - assert ( - instance2_debug["inbox"] == 3 - ) # An Follow, Accept and Create activity should be there - assert ( - instance2_debug["outbox"] == 3 - ) # We've sent a Accept and a Follow and a Create activity - - instance1_debug = instance1.debug() - assert ( - instance1_debug["inbox"] == 3 - ) # An Follow, Accept and Create activity should be there - assert ( - instance1_debug["outbox"] == 3 - ) # We've sent a Accept and a Follow and a Create activity - - instance1_inbox_stream = instance1.stream_jsonfeed() - assert len(instance1_inbox_stream["items"]) == 1 - assert instance1_inbox_stream["items"][0]["id"] == instance2_create_id - - instance1_note = instance1.outbox_get(f"{instance1_create_id}/activity") - assert "replies" in instance1_note - assert instance1_note["replies"]["totalItems"] == 1 - replies = instance1._parse_collection(url=instance1_note["replies"]["first"]) - assert len(replies) == 1 - assert replies[0]["id"] == f"{instance2_create_id}/activity" - - -def test_post_content_and_post_reply_and_delete() -> None: - """Instances follow each other, instance1 creates a new note, instance2 replies to it, then deletes its reply.""" - instance1, instance2 = _instances() - # Instance1 follows instance2 - instance1.follow(instance2) - instance2.follow(instance1) - - inbox_stream = instance2.stream_jsonfeed() - assert len(inbox_stream["items"]) == 0 - - instance1_create_id = instance1.new_note("hello") - instance2_debug = instance2.debug() - assert ( - instance2_debug["inbox"] == 3 - ) # An Follow, Accept and Create activity should be there - assert instance2_debug["outbox"] == 2 # We've sent a Accept and a Follow activity - - # Ensure the post is visible in instance2's stream - instance2_inbox_stream = instance2.stream_jsonfeed() - assert len(instance2_inbox_stream["items"]) == 1 - assert instance2_inbox_stream["items"][0]["id"] == instance1_create_id - - instance2_create_id = instance2.new_note( - f"hey @instance1@{instance1.docker_url}", - reply=f"{instance1_create_id}/activity", - ) - instance2_debug = instance2.debug() - assert ( - instance2_debug["inbox"] == 3 - ) # An Follow, Accept and Create activity should be there - assert ( - instance2_debug["outbox"] == 3 - ) # We've sent a Accept and a Follow and a Create activity - - instance1_debug = instance1.debug() - assert ( - instance1_debug["inbox"] == 3 - ) # An Follow, Accept and Create activity should be there - assert ( - instance1_debug["outbox"] == 3 - ) # We've sent a Accept and a Follow and a Create activity - - instance1_inbox_stream = instance1.stream_jsonfeed() - assert len(instance1_inbox_stream["items"]) == 1 - assert instance1_inbox_stream["items"][0]["id"] == instance2_create_id - - instance1_note = instance1.outbox_get(f"{instance1_create_id}/activity") - assert "replies" in instance1_note - assert instance1_note["replies"]["totalItems"] == 1 - - instance2.delete(f"{instance2_create_id}/activity") - - instance1_debug = instance1.debug() - assert ( - instance1_debug["inbox"] == 4 - ) # An Follow, Accept and Create and Delete activity should be there - assert ( - instance1_debug["outbox"] == 3 - ) # We've sent a Accept and a Follow and a Create activity - - instance1_note = instance1.outbox_get(f"{instance1_create_id}/activity") - assert "replies" in instance1_note - assert instance1_note["replies"]["totalItems"] == 0 diff --git a/tests/fixtures/instance1/config/.gitignore b/tests/fixtures/instance1/config/.gitignore deleted file mode 100644 index d6b7ef3..0000000 --- a/tests/fixtures/instance1/config/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -* -!.gitignore diff --git a/tests/fixtures/instance1/config/me.yml b/tests/fixtures/instance1/config/me.yml deleted file mode 100644 index dad9a91..0000000 --- a/tests/fixtures/instance1/config/me.yml +++ /dev/null @@ -1,8 +0,0 @@ -username: 'instance1' -name: 'Instance 1' -icon_url: 'https://sos-ch-dk-2.exo.io/microblogpub/microblobpub.png' -domain: 'instance1_web:5005' -summary: 'instance1 summary' -pass: '$2b$12$nEgJMgaYbXSPOvgnqM4jSeYnleKhXqsFgv/o3hg12x79uEdsR4cUy' # hello -https: false -hide_following: false diff --git a/tests/fixtures/instance2/config/.gitignore b/tests/fixtures/instance2/config/.gitignore deleted file mode 100644 index d6b7ef3..0000000 --- a/tests/fixtures/instance2/config/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -* -!.gitignore diff --git a/tests/fixtures/instance2/config/me.yml b/tests/fixtures/instance2/config/me.yml deleted file mode 100644 index e22d971..0000000 --- a/tests/fixtures/instance2/config/me.yml +++ /dev/null @@ -1,8 +0,0 @@ -username: 'instance2' -name: 'Instance 2' -icon_url: 'https://sos-ch-dk-2.exo.io/microblogpub/microblobpub.png' -domain: 'instance2_web:5005' -summary: 'instance2 summary' -pass: '$2b$12$nEgJMgaYbXSPOvgnqM4jSeYnleKhXqsFgv/o3hg12x79uEdsR4cUy' # hello -https: false -hide_following: false diff --git a/tests/fixtures/me.yml b/tests/fixtures/me.yml deleted file mode 100644 index 9aabaa3..0000000 --- a/tests/fixtures/me.yml +++ /dev/null @@ -1,7 +0,0 @@ -username: 'ci' -name: 'CI tests' -icon_url: 'https://sos-ch-dk-2.exo.io/microblogpub/microblobpub.png' -domain: 'localhost:5005' -summary: 'test instance summary' -pass: '$2b$12$nEgJMgaYbXSPOvgnqM4jSeYnleKhXqsFgv/o3hg12x79uEdsR4cUy' # hello -https: false diff --git a/tests/integration_test.py b/tests/integration_test.py deleted file mode 100644 index dbfe19b..0000000 --- a/tests/integration_test.py +++ /dev/null @@ -1,31 +0,0 @@ -import os - -import pytest -import requests -from html2text import html2text - - -@pytest.fixture -def config(): - """Return the current config as a dict.""" - import yaml - - with open( - os.path.join(os.path.dirname(__file__), "..", "config/me.yml"), "rb" - ) as f: - yield yaml.load(f) - - -def resp2plaintext(resp): - """Convert the body of a requests reponse to plain text in order to make basic assertions.""" - return html2text(resp.text) - - -def test_ping_homepage(config): - """Ensure the homepage is accessible.""" - resp = requests.get("http://localhost:5005") - resp.raise_for_status() - assert resp.status_code == 200 - body = resp2plaintext(resp) - assert config["name"] in body - assert f"@{config['username']}@{config['domain']}" in body diff --git a/tests/test.key b/tests/test.key new file mode 100644 index 0000000..375428a --- /dev/null +++ b/tests/test.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAvYhynEC0l2WVpXoPutfhhZHEeQyyoHiMszOfl1EHM50V0xOC +XCoXd/i5Hsa6dWswyjftOtSmdknY5Whr6LatwNu+i/tlsjmHSGgdhUxLhbj4Xc5T +LQWxDbS1cg49IwSZFYSIrBw2yfPI3dpMNzYvBt8CKAk0zodypHzdfSKPbSRIyBAy +SuG+mJsxsg9tx9CgWNrizauj/zVSWa/cRvNTvIwlxs1J516QJ0px3NygKqPMP2I4 +zNkhKFzaNDLzuv4zMsW8UNoM+Mlpf6+NbHQycUC9gIqywrP21E7YFmdljyr5cAfr +qn+KgDsQTpDSINFE1oUanY0iadKvFXjD9uQLfwIDAQABAoIBAAtqK1TjxLyVfqS/ +rDDZjZiIxedwb1WgzQCB7GulkqR2Inla5G/+jPlJvoRu/Y3SzdZv9dakNf5LxkdS +uaUDU4WY9mnh0ycftdkThCuiA65jDHpB0dqVTCuCJadf2ijAvyN/nueWr2oMR52s +5wgwODbWuX+Fxmtl1u63InPF4BN3kEQcGP4pgXMiQ2QEwjxMubG7fZTuHFChsZMZ +0QyHy0atmauK8+1FeseoZv7LefgjE+UhAKnIz5z/Ij4erGRaWJUKe5YS7i8nTT6M +W+SJ/gs/l6vOUmrqHZaXsp29pvseY23akgGnZciHJfuj/vxMJjGfZVM2ls+MUkh4 +tdEZ0NECgYEAxRGcRxhQyOdiohcsH4efG03mB7u+JBuvt33oFXWOCpW7lenAr9qg +3hm30lZq95ST3XilqGldgIW2zpHCkSLXk/lsJteNC9EEk8HuTDJ7Gd4SBiXisELd +IY147SJu5KXN/kaGoDMgMCGcR7Qkr6hzsRT3308A6nMNZG0viyUMzicCgYEA9jXx +WaLe0PC8pT/yAyPJnYerSOofv+vz+3KNlopBTSRsREsCpdbyOnGCXa4bechj29Lv +0QCbQMkga2pXUPNszdUz7L0LnAi8DZhKumPxyz82kcZSxSCGsvwp9kZju/LPCIHo +j1wKW92/w47QXdzCVjgkKbDAGsSwzphEJOuMhukCgYBUKl9KZfIqu9f+TlND7BJi +APUbnG1q0oBLp/R1Jc3Sa3zAXCM1d/R4pxdBODNbJhO45QwrT0Tl3TXkJ5Cnl+/m +fQJZ3Hma8Fw6FvuFg5HbzGJ6Sbf1e7kh2WAqNyiRctb1oH1i8jLvG4u5fBCnDRTM +Lp5mu0Ey4Ix5tcA2d05uxQKBgQDDBiePIPvt9UL4gpZo9kgViAmdUBamJ3izjCGr +RQhE2r0Hu4L1ajWlJZRmMCuDY7/1uDhODXTs9GPBshJIBQoCYQcoVvaDOkf7XM6U +peY5YHERN08I5qLL1AJJGaiWj9Z+nqhgJj/uVNA5Tz6tmtg1A3Nhsqf4jCShAOu5 +cvt1QQKBgH2Lg/o9KpFLeZLVXQzW3GFB7RzDetSDbpdhBBE3o/HAtrX0foEqYfKx +JuPrlGR2L6Q8jSw7AvFErkx5g5kCgdN8mOYjCe/EsL3ctIatqaoGDrjfvgWAeanW +XxMcVRlcMFzp5XB0VQhG0nP9uvHm/eIw/izN2JN7gz3ZZp84lq3S +-----END RSA PRIVATE KEY----- \ No newline at end of file diff --git a/tests/test_actor.py b/tests/test_actor.py new file mode 100644 index 0000000..487f28f --- /dev/null +++ b/tests/test_actor.py @@ -0,0 +1,59 @@ +import httpx +import pytest +import respx +from sqlalchemy import func +from sqlalchemy import select +from sqlalchemy.orm import Session + +from app import models +from app.actor import fetch_actor +from app.database import AsyncSession +from tests import factories + + +@pytest.mark.asyncio +async def test_fetch_actor(async_db_session: AsyncSession, respx_mock) -> None: + # Given a remote actor + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key="pk", + ) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) + respx_mock.get( + "https://example.com/.well-known/webfinger", + params={"resource": "acct%3Atoto%40example.com"}, + ).mock(return_value=httpx.Response(200, json={"subject": "acct:toto@example.com"})) + + # When fetching this actor for the first time + saved_actor = await fetch_actor(async_db_session, ra.ap_id) + + # Then it has been fetched and saved in DB + assert respx.calls.call_count == 2 + assert ( + await async_db_session.execute(select(models.Actor)) + ).scalar_one().ap_id == saved_actor.ap_id + + # When fetching it a second time + actor_from_db = await fetch_actor(async_db_session, ra.ap_id) + + # Then it's read from the DB + assert actor_from_db.ap_id == ra.ap_id + assert ( + await async_db_session.execute(select(func.count(models.Actor.id))) + ).scalar_one() == 1 + assert respx.calls.call_count == 2 + + +def test_sqlalchemy_factory(db: Session) -> None: + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key="pk", + ) + actor_in_db = factories.ActorFactory( + ap_type=ra.ap_type, + ap_actor=ra.ap_actor, + ap_id=ra.ap_id, + ) + assert actor_in_db.id == db.execute(select(models.Actor)).scalar_one().id diff --git a/tests/test_admin.py b/tests/test_admin.py new file mode 100644 index 0000000..f40767b --- /dev/null +++ b/tests/test_admin.py @@ -0,0 +1,27 @@ +import typing + +import starlette +from fastapi.testclient import TestClient + +from app.main import app + + +def test_admin_endpoints_are_authenticated(client: TestClient) -> None: + routes_tested = [] + + for route in app.routes: + route = typing.cast(starlette.routing.Route, route) + if not route.path.startswith("/admin") or route.path == "/admin/login": + continue + + for method in route.methods: # type: ignore + resp = client.request(method, route.path) + + # Admin routes should redirect to the login page + assert resp.status_code == 302, f"{method} {route.path} is unauthenticated" + assert resp.headers.get("Location", "").startswith( + "http://testserver/admin/login" + ) + routes_tested.append((method, route.path)) + + assert len(routes_tested) > 0 diff --git a/tests/test_emoji.py b/tests/test_emoji.py new file mode 100644 index 0000000..b459ff3 --- /dev/null +++ b/tests/test_emoji.py @@ -0,0 +1,61 @@ +from fastapi.testclient import TestClient +from sqlalchemy.orm import Session + +from app import activitypub as ap +from app import models +from app.config import generate_csrf_token +from app.utils.emoji import EMOJIS_BY_NAME +from tests.utils import generate_admin_session_cookies + + +def test_emoji_are_loaded() -> None: + assert len(EMOJIS_BY_NAME) >= 1 + + +def test_emoji_ap_endpoint(db: Session, client: TestClient) -> None: + response = client.get("/e/goose_honk", headers={"Accept": ap.AP_CONTENT_TYPE}) + assert response.status_code == 200 + assert response.headers["content-type"] == ap.AP_CONTENT_TYPE + emoji_resp = response.json() + assert emoji_resp["type"] == "Emoji" + + +def test_emoji_ap_endpoint__not_found(db: Session, client: TestClient) -> None: + response = client.get("/e/goose_honk2", headers={"Accept": ap.AP_CONTENT_TYPE}) + assert response.status_code == 404 + + +def test_emoji_note_with_emoji(db: Session, client: TestClient) -> None: + # Call admin endpoint to create a note with + note_content = "😺 :goose_honk:" + + response = client.post( + "/admin/actions/new", + data={ + "redirect_url": "http://testserver/", + "content": note_content, + "visibility": ap.VisibilityEnum.PUBLIC.name, + "csrf_token": generate_csrf_token(), + }, + cookies=generate_admin_session_cookies(), + ) + + # Then the server returns a 302 + assert response.status_code == 302 + + # And the Follow activity was created in the outbox + outbox_object = db.query(models.OutboxObject).one() + assert outbox_object.ap_type == "Note" + assert len(outbox_object.tags) == 1 + emoji_tag = outbox_object.tags[0] + assert emoji_tag["type"] == "Emoji" + assert emoji_tag["name"] == ":goose_honk:" + url = emoji_tag["icon"]["url"] + + # And the custom emoji is rendered in the HTML version + html_resp = client.get("/o/" + outbox_object.public_id) + html_resp.raise_for_status() + assert html_resp.status_code == 200 + assert url in html_resp.text + # And the unicode emoji is rendered with twemoji + assert f'/static/twemoji/{hex(ord("😺"))[2:]}.svg' in html_resp.text diff --git a/tests/test_httpsig.py b/tests/test_httpsig.py new file mode 100644 index 0000000..a36775a --- /dev/null +++ b/tests/test_httpsig.py @@ -0,0 +1,179 @@ +from typing import Any + +import fastapi +import httpx +import pytest +import respx +from fastapi.testclient import TestClient + +from app import activitypub as ap +from app import httpsig +from app.database import AsyncSession +from app.httpsig import _KEY_CACHE +from app.httpsig import HTTPSigInfo +from app.key import Key +from tests import factories + +_test_app = fastapi.FastAPI() + + +def _httpsig_info_to_dict(httpsig_info: HTTPSigInfo) -> dict[str, Any]: + return { + "has_valid_signature": httpsig_info.has_valid_signature, + "signed_by_ap_actor_id": httpsig_info.signed_by_ap_actor_id, + } + + +@_test_app.get("/httpsig_checker") +def get_httpsig_checker( + httpsig_info: httpsig.HTTPSigInfo = fastapi.Depends(httpsig.httpsig_checker), +): + return _httpsig_info_to_dict(httpsig_info) + + +@_test_app.post("/enforce_httpsig") +async def post_enforce_httpsig( + request: fastapi.Request, + httpsig_info: httpsig.HTTPSigInfo = fastapi.Depends(httpsig.enforce_httpsig), +): + await request.json() + return _httpsig_info_to_dict(httpsig_info) + + +def test_enforce_httpsig__no_signature( + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + with TestClient(_test_app) as client: + response = client.post( + "/enforce_httpsig", + headers={"Content-Type": ap.AS_CTX}, + json={"enforce_httpsig": True}, + ) + + assert response.status_code == 401 + assert response.json()["detail"] == "Invalid HTTP sig" + + +@pytest.mark.asyncio +async def test_enforce_httpsig__with_valid_signature( + respx_mock: respx.MockRouter, + async_db_session: AsyncSession, +) -> None: + # Given a remote actor + privkey, pubkey = factories.generate_key() + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key=pubkey, + ) + k = Key(ra.ap_id, f"{ra.ap_id}#main-key") + k.load(privkey) + auth = httpsig.HTTPXSigAuth(k) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) + + _KEY_CACHE.clear() + + async with httpx.AsyncClient(app=_test_app, base_url="http://test") as client: + response = await client.post( + "/enforce_httpsig", + headers={"Content-Type": ap.AS_CTX}, + json={"enforce_httpsig": True}, + auth=auth, # type: ignore + ) + assert response.status_code == 200 + + json_response = response.json() + + assert json_response["has_valid_signature"] is True + assert json_response["signed_by_ap_actor_id"] == ra.ap_id + + +def test_httpsig_checker__no_signature( + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + with TestClient(_test_app) as client: + response = client.get( + "/httpsig_checker", + headers={"Accept": ap.AS_CTX}, + ) + + assert response.status_code == 200 + json_response = response.json() + assert json_response["has_valid_signature"] is False + assert json_response["signed_by_ap_actor_id"] is None + + +@pytest.mark.asyncio +async def test_httpsig_checker__with_valid_signature( + respx_mock: respx.MockRouter, + async_db_session: AsyncSession, +) -> None: + # Given a remote actor + privkey, pubkey = factories.generate_key() + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key=pubkey, + ) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) + k = Key(ra.ap_id, f"{ra.ap_id}#main-key") + k.load(privkey) + auth = httpsig.HTTPXSigAuth(k) + + _KEY_CACHE.clear() + + async with httpx.AsyncClient(app=_test_app, base_url="http://test") as client: + response = await client.get( + "/httpsig_checker", + headers={"Accept": ap.AS_CTX}, + auth=auth, # type: ignore + ) + + assert response.status_code == 200 + json_response = response.json() + + assert json_response["has_valid_signature"] is True + assert json_response["signed_by_ap_actor_id"] == ra.ap_id + + +@pytest.mark.asyncio +async def test_httpsig_checker__with_invvalid_signature( + respx_mock: respx.MockRouter, + async_db_session: AsyncSession, +) -> None: + # Given a remote actor + privkey, pubkey = factories.generate_key() + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key=pubkey, + ) + k = Key(ra.ap_id, f"{ra.ap_id}#main-key") + k.load(privkey) + auth = httpsig.HTTPXSigAuth(k) + + ra2_privkey, ra2_pubkey = factories.generate_key() + ra2 = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key=ra2_pubkey, + ) + assert ra.ap_id == ra2.ap_id + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra2.ap_actor)) + + _KEY_CACHE.clear() + + async with httpx.AsyncClient(app=_test_app, base_url="http://test") as client: + response = await client.get( + "/httpsig_checker", + headers={"Accept": ap.AS_CTX}, + auth=auth, # type: ignore + ) + + assert response.status_code == 200 + json_response = response.json() + + assert json_response["has_valid_signature"] is False + assert json_response["signed_by_ap_actor_id"] == ra.ap_id diff --git a/tests/test_inbox.py b/tests/test_inbox.py new file mode 100644 index 0000000..325ab7b --- /dev/null +++ b/tests/test_inbox.py @@ -0,0 +1,475 @@ +from unittest import mock +from uuid import uuid4 + +import httpx +import respx +from fastapi.testclient import TestClient +from sqlalchemy import func +from sqlalchemy import select +from sqlalchemy.orm import Session + +from app import activitypub as ap +from app import models +from app.actor import LOCAL_ACTOR +from app.ap_object import RemoteObject +from tests import factories +from tests.utils import mock_httpsig_checker +from tests.utils import run_process_next_incoming_activity +from tests.utils import setup_inbox_delete +from tests.utils import setup_remote_actor +from tests.utils import setup_remote_actor_as_follower +from tests.utils import setup_remote_actor_as_following + + +def test_inbox_requires_httpsig( + client: TestClient, +): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json={}, + ) + assert response.status_code == 401 + assert response.json()["detail"] == "Invalid HTTP sig" + + +def test_inbox_incoming_follow_request( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key="pk", + ) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) + + # When receiving a Follow activity + follow_activity = RemoteObject( + factories.build_follow_activity( + from_remote_actor=ra, + for_remote_actor=LOCAL_ACTOR, + ), + ra, + ) + with mock_httpsig_checker(ra): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json=follow_activity.ap_object, + ) + + # Then the server returns a 202 + assert response.status_code == 202 + + run_process_next_incoming_activity() + + # And the actor was saved in DB + saved_actor = db.execute(select(models.Actor)).scalar_one() + assert saved_actor.ap_id == ra.ap_id + + # And the Follow activity was saved in the inbox + inbox_object = db.execute(select(models.InboxObject)).scalar_one() + assert inbox_object.ap_object == follow_activity.ap_object + + # And a follower was internally created + follower = db.execute(select(models.Follower)).scalar_one() + assert follower.ap_actor_id == ra.ap_id + assert follower.actor_id == saved_actor.id + assert follower.inbox_object_id == inbox_object.id + + # And an Accept activity was created in the outbox + outbox_object = db.execute(select(models.OutboxObject)).scalar_one() + assert outbox_object.ap_type == "Accept" + assert outbox_object.activity_object_ap_id == follow_activity.ap_id + + # And an outgoing activity was created to track the Accept activity delivery + outgoing_activity = db.execute(select(models.OutgoingActivity)).scalar_one() + assert outgoing_activity.outbox_object_id == outbox_object.id + + +def test_inbox_incoming_follow_request__manually_approves_followers( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key="pk", + ) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) + + # When receiving a Follow activity + follow_activity = RemoteObject( + factories.build_follow_activity( + from_remote_actor=ra, + for_remote_actor=LOCAL_ACTOR, + ), + ra, + ) + with mock_httpsig_checker(ra): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json=follow_activity.ap_object, + ) + + # Then the server returns a 202 + assert response.status_code == 202 + + with mock.patch("app.boxes.MANUALLY_APPROVES_FOLLOWERS", True): + run_process_next_incoming_activity() + + # And the actor was saved in DB + saved_actor = db.execute(select(models.Actor)).scalar_one() + assert saved_actor.ap_id == ra.ap_id + + # And the Follow activity was saved in the inbox + inbox_object = db.execute(select(models.InboxObject)).scalar_one() + assert inbox_object.ap_object == follow_activity.ap_object + + # And no follower was internally created + assert db.scalar(select(func.count(models.Follower.id))) == 0 + + +def test_inbox_accept_follow_request( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + ra = setup_remote_actor(respx_mock) + actor_in_db = factories.ActorFactory.from_remote_actor(ra) + + # And a Follow activity in the outbox + follow_id = uuid4().hex + follow_from_outbox = RemoteObject( + factories.build_follow_activity( + from_remote_actor=LOCAL_ACTOR, + for_remote_actor=ra, + outbox_public_id=follow_id, + ), + LOCAL_ACTOR, + ) + outbox_object = factories.OutboxObjectFactory.from_remote_object( + follow_id, follow_from_outbox + ) + + # When receiving a Accept activity + accept_activity = RemoteObject( + factories.build_accept_activity( + from_remote_actor=ra, + for_remote_object=follow_from_outbox, + ), + ra, + ) + with mock_httpsig_checker(ra): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json=accept_activity.ap_object, + ) + + # Then the server returns a 202 + assert response.status_code == 202 + + run_process_next_incoming_activity() + + # And the Accept activity was saved in the inbox + inbox_activity = db.execute(select(models.InboxObject)).scalar_one() + assert inbox_activity.ap_type == "Accept" + assert inbox_activity.relates_to_outbox_object_id == outbox_object.id + assert inbox_activity.actor_id == actor_in_db.id + + # And a following entry was created internally + following = db.execute(select(models.Following)).scalar_one() + assert following.ap_actor_id == actor_in_db.ap_id + + +def test_inbox__create_from_follower( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + ra = setup_remote_actor(respx_mock) + + # Who is also a follower + setup_remote_actor_as_follower(ra) + + create_activity = factories.build_create_activity( + factories.build_note_object( + from_remote_actor=ra, + outbox_public_id=str(uuid4()), + content="Hello", + to=[LOCAL_ACTOR.ap_id], + ) + ) + + # When receiving a Create activity + ro = RemoteObject(create_activity, ra) + + with mock_httpsig_checker(ra): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json=ro.ap_object, + ) + + # Then the server returns a 202 + assert response.status_code == 202 + + # And when processing the incoming activity + run_process_next_incoming_activity() + + # Then the Create activity was saved + create_activity_from_inbox: models.InboxObject | None = db.execute( + select(models.InboxObject).where(models.InboxObject.ap_type == "Create") + ).scalar_one_or_none() + assert create_activity_from_inbox + assert create_activity_from_inbox.ap_id == ro.ap_id + + # And the Note object was created + note_activity_from_inbox: models.InboxObject | None = db.execute( + select(models.InboxObject).where(models.InboxObject.ap_type == "Note") + ).scalar_one_or_none() + assert note_activity_from_inbox + assert note_activity_from_inbox.ap_id == ro.activity_object_ap_id + + +def test_inbox__create_already_deleted_object( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + ra = setup_remote_actor(respx_mock) + + # Who is also a follower + follower = setup_remote_actor_as_follower(ra) + + # And a Create activity for a Note object + create_activity = factories.build_create_activity( + factories.build_note_object( + from_remote_actor=ra, + outbox_public_id=str(uuid4()), + content="Hello", + to=[LOCAL_ACTOR.ap_id], + ) + ) + ro = RemoteObject(create_activity, ra) + + # And a Delete activity received for the create object + setup_inbox_delete(follower.actor, ro.activity_object_ap_id) # type: ignore + + # When receiving a Create activity + with mock_httpsig_checker(ra): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json=ro.ap_object, + ) + + # Then the server returns a 202 + assert response.status_code == 202 + + # And when processing the incoming activity + run_process_next_incoming_activity() + + # Then the Create activity was saved + create_activity_from_inbox: models.InboxObject | None = db.execute( + select(models.InboxObject).where(models.InboxObject.ap_type == "Create") + ).scalar_one_or_none() + assert create_activity_from_inbox + assert create_activity_from_inbox.ap_id == ro.ap_id + # But it has the deleted flag + assert create_activity_from_inbox.is_deleted is True + + # And the Note wasn't created + assert ( + db.execute( + select(models.InboxObject).where(models.InboxObject.ap_type == "Note") + ).scalar_one_or_none() + is None + ) + + +def test_inbox__actor_is_blocked( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + ra = setup_remote_actor(respx_mock) + + # Who is also a follower + follower = setup_remote_actor_as_follower(ra) + follower.actor.is_blocked = True + db.commit() + + create_activity = factories.build_create_activity( + factories.build_note_object( + from_remote_actor=ra, + outbox_public_id=str(uuid4()), + content="Hello", + to=[LOCAL_ACTOR.ap_id], + ) + ) + + # When receiving a Create activity + ro = RemoteObject(create_activity, ra) + + with mock_httpsig_checker(ra): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json=ro.ap_object, + ) + + # Then the server returns a 202 + assert response.status_code == 202 + + # And when processing the incoming activity from a blocked actor + run_process_next_incoming_activity() + + # Then the Create activity was discarded + assert ( + db.scalar( + select(func.count(models.InboxObject.id)).where( + models.InboxObject.ap_type != "Follow" + ) + ) + == 0 + ) + + +def test_inbox__move_activity( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + ra = setup_remote_actor(respx_mock) + + # Which is followed by the local actor + following = setup_remote_actor_as_following(ra) + old_actor = following.actor + assert old_actor + assert following.outbox_object + follow_id = following.outbox_object.ap_id + + # When receiving a Move activity + new_ra = setup_remote_actor( + respx_mock, + base_url="https://new-account.com", + also_known_as=[ra.ap_id], + ) + move_activity = RemoteObject( + factories.build_move_activity(ra, new_ra), + ra, + ) + + with mock_httpsig_checker(ra): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json=move_activity.ap_object, + ) + + # Then the server returns a 202 + assert response.status_code == 202 + + run_process_next_incoming_activity() + + # And the Move activity was saved in the inbox + inbox_activity = db.execute(select(models.InboxObject)).scalar_one() + assert inbox_activity.ap_type == "Move" + assert inbox_activity.actor_id == old_actor.id + + # And the following actor was deleted + assert db.scalar(select(func.count(models.Following.id))) == 0 + + # And the follow was undone + assert ( + db.scalar( + select(func.count(models.OutboxObject.id)).where( + models.OutboxObject.ap_type == "Undo", + models.OutboxObject.activity_object_ap_id == follow_id, + ) + ) + == 1 + ) + + # And the new account was followed + assert ( + db.scalar( + select(func.count(models.OutboxObject.id)).where( + models.OutboxObject.ap_type == "Follow", + models.OutboxObject.activity_object_ap_id == new_ra.ap_id, + ) + ) + == 1 + ) + + # And a notification was created + notif = db.execute( + select(models.Notification).where( + models.Notification.notification_type == models.NotificationType.MOVE + ) + ).scalar_one() + assert notif.actor.ap_id == new_ra.ap_id + assert notif.inbox_object_id == inbox_activity.id + + +def test_inbox__block_activity( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + ra = setup_remote_actor(respx_mock) + + # Which is followed by the local actor + setup_remote_actor_as_following(ra) + + # When receiving a Block activity + follow_activity = RemoteObject( + factories.build_block_activity( + from_remote_actor=ra, + for_remote_actor=LOCAL_ACTOR, + ), + ra, + ) + with mock_httpsig_checker(ra): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json=follow_activity.ap_object, + ) + + # Then the server returns a 202 + assert response.status_code == 202 + + run_process_next_incoming_activity() + + # And the actor was saved in DB + saved_actor = db.execute(select(models.Actor)).scalar_one() + assert saved_actor.ap_id == ra.ap_id + + # And the Block activity was saved in the inbox + inbox_activity = db.execute( + select(models.InboxObject).where(models.InboxObject.ap_type == "Block") + ).scalar_one() + + # And a notification was created + notif = db.execute( + select(models.Notification).where( + models.Notification.notification_type == models.NotificationType.BLOCKED + ) + ).scalar_one() + assert notif.actor.ap_id == ra.ap_id + assert notif.inbox_object_id == inbox_activity.id diff --git a/tests/test_ldsig.py b/tests/test_ldsig.py new file mode 100644 index 0000000..62ea525 --- /dev/null +++ b/tests/test_ldsig.py @@ -0,0 +1,54 @@ +from copy import deepcopy + +import httpx +import pytest +from respx import MockRouter + +from app import activitypub as ap +from app import ldsig +from app.database import AsyncSession +from app.key import Key +from tests import factories + +_SAMPLE_CREATE = { + "type": "Create", + "actor": "https://microblog.pub", + "object": { + "type": "Note", + "sensitive": False, + "cc": ["https://microblog.pub/followers"], + "to": ["https://www.w3.org/ns/activitystreams#Public"], + "content": "

Hello world!

", + "tag": [], + "attributedTo": "https://microblog.pub", + "published": "2018-05-21T15:51:59Z", + "id": "https://microblog.pub/outbox/988179f13c78b3a7/activity", + "url": "https://microblog.pub/note/988179f13c78b3a7", + }, + "@context": ap.AS_EXTENDED_CTX, + "published": "2018-05-21T15:51:59Z", + "to": ["https://www.w3.org/ns/activitystreams#Public"], + "cc": ["https://microblog.pub/followers"], + "id": "https://microblog.pub/outbox/988179f13c78b3a7", +} + + +@pytest.mark.asyncio +async def test_linked_data_sig( + async_db_session: AsyncSession, + respx_mock: MockRouter, +) -> None: + privkey, pubkey = factories.generate_key() + ra = factories.RemoteActorFactory( + base_url="https://microblog.pub", + username="dev", + public_key=pubkey, + ) + k = Key(ra.ap_id, f"{ra.ap_id}#main-key") + k.load(privkey) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) + + doc = deepcopy(_SAMPLE_CREATE) + + ldsig.generate_signature(doc, k) + assert (await ldsig.verify_signature(async_db_session, doc)) is True diff --git a/tests/test_outbox.py b/tests/test_outbox.py new file mode 100644 index 0000000..09e1269 --- /dev/null +++ b/tests/test_outbox.py @@ -0,0 +1,428 @@ +from unittest import mock + +import respx +from fastapi.testclient import TestClient +from sqlalchemy import select +from sqlalchemy.orm import Session + +from app import activitypub as ap +from app import models +from app import webfinger +from app.actor import LOCAL_ACTOR +from app.config import generate_csrf_token +from tests.utils import generate_admin_session_cookies +from tests.utils import setup_inbox_note +from tests.utils import setup_outbox_note +from tests.utils import setup_remote_actor +from tests.utils import setup_remote_actor_as_follower + + +def test_outbox__no_activities( + db: Session, + client: TestClient, +) -> None: + response = client.get("/outbox", headers={"Accept": ap.AP_CONTENT_TYPE}) + + assert response.status_code == 200 + + json_response = response.json() + assert json_response["totalItems"] == 0 + assert json_response["orderedItems"] == [] + + +def test_send_follow_request( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # given a remote actor + ra = setup_remote_actor(respx_mock) + + response = client.post( + "/admin/actions/follow", + data={ + "redirect_url": "http://testserver/", + "ap_actor_id": ra.ap_id, + "csrf_token": generate_csrf_token(), + }, + cookies=generate_admin_session_cookies(), + ) + + # Then the server returns a 302 + assert response.status_code == 302 + assert response.headers.get("Location") == "http://testserver/" + + # And the Follow activity was created in the outbox + outbox_object = db.execute(select(models.OutboxObject)).scalar_one() + assert outbox_object.ap_type == "Follow" + assert outbox_object.activity_object_ap_id == ra.ap_id + + # And an outgoing activity was queued + outgoing_activity = db.execute(select(models.OutgoingActivity)).scalar_one() + assert outgoing_activity.outbox_object_id == outbox_object.id + assert outgoing_activity.recipient == ra.inbox_url + + +def test_send_delete__reverts_side_effects( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # given a remote actor + ra = setup_remote_actor(respx_mock) + + # who is a follower + follower = setup_remote_actor_as_follower(ra) + actor = follower.actor + + # with a note that has existing replies + inbox_note = setup_inbox_note(actor) + # with a bogus counter + inbox_note.replies_count = 5 + db.commit() + + # and 2 local replies + setup_outbox_note( + to=[ap.AS_PUBLIC], + cc=[LOCAL_ACTOR.followers_collection_id], # type: ignore + in_reply_to=inbox_note.ap_id, + ) + outbox_note2 = setup_outbox_note( + to=[ap.AS_PUBLIC], + cc=[LOCAL_ACTOR.followers_collection_id], # type: ignore + in_reply_to=inbox_note.ap_id, + ) + db.commit() + + # When deleting one of the replies + response = client.post( + "/admin/actions/delete", + data={ + "redirect_url": "http://testserver/", + "ap_object_id": outbox_note2.ap_id, + "csrf_token": generate_csrf_token(), + }, + cookies=generate_admin_session_cookies(), + ) + + # Then the server returns a 302 + assert response.status_code == 302 + assert response.headers.get("Location") == "http://testserver/" + + # And the Delete activity was created in the outbox + outbox_object = db.execute( + select(models.OutboxObject).where(models.OutboxObject.ap_type == "Delete") + ).scalar_one() + assert outbox_object.ap_type == "Delete" + assert outbox_object.activity_object_ap_id == outbox_note2.ap_id + + # And an outgoing activity was queued + outgoing_activity = db.execute(select(models.OutgoingActivity)).scalar_one() + assert outgoing_activity.outbox_object_id == outbox_object.id + assert outgoing_activity.recipient == ra.inbox_url + + # And the replies count of the replied object was refreshed correctly + db.refresh(inbox_note) + assert inbox_note.replies_count == 1 + + +def test_send_create_activity__no_content( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # given a remote actor + ra = setup_remote_actor(respx_mock) + + with mock.patch.object(webfinger, "get_actor_url", return_value=ra.ap_id): + response = client.post( + "/admin/actions/new", + data={ + "redirect_url": "http://testserver/", + "visibility": ap.VisibilityEnum.PUBLIC.name, + "csrf_token": generate_csrf_token(), + }, + cookies=generate_admin_session_cookies(), + ) + + # Then the server returns a 422 + assert response.status_code == 422 + + +def test_send_create_activity__with_attachment( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # given a remote actor + ra = setup_remote_actor(respx_mock) + + with mock.patch.object(webfinger, "get_actor_url", return_value=ra.ap_id): + response = client.post( + "/admin/actions/new", + data={ + "content": "hello", + "redirect_url": "http://testserver/", + "visibility": ap.VisibilityEnum.PUBLIC.name, + "csrf_token": generate_csrf_token(), + }, + files=[ + ("files", ("attachment.txt", "hello")), + ], + cookies=generate_admin_session_cookies(), + ) + + # Then the server returns a 302 + assert response.status_code == 302 + + # And the Follow activity was created in the outbox + outbox_object = db.execute(select(models.OutboxObject)).scalar_one() + assert outbox_object.ap_type == "Note" + assert outbox_object.summary is None + assert outbox_object.content == "

hello

\n" + assert len(outbox_object.attachments) == 1 + attachment = outbox_object.attachments[0] + assert attachment.type == "Document" + + attachment_response = client.get(attachment.url) + assert attachment_response.status_code == 200 + assert attachment_response.content == b"hello" + + upload = db.execute(select(models.Upload)).scalar_one() + assert upload.content_hash == ( + "324dcf027dd4a30a932c441f365a25e86b173defa4b8e58948253471b81b72cf" + ) + + outbox_attachment = db.execute(select(models.OutboxObjectAttachment)).scalar_one() + assert outbox_attachment.upload_id == upload.id + assert outbox_attachment.outbox_object_id == outbox_object.id + assert outbox_attachment.filename == "attachment.txt" + + +def test_send_create_activity__no_content_with_cw_and_attachments( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # given a remote actor + ra = setup_remote_actor(respx_mock) + + with mock.patch.object(webfinger, "get_actor_url", return_value=ra.ap_id): + response = client.post( + "/admin/actions/new", + data={ + "content_warning": "cw", + "redirect_url": "http://testserver/", + "visibility": ap.VisibilityEnum.PUBLIC.name, + "csrf_token": generate_csrf_token(), + }, + files={"files": ("attachment.txt", "hello")}, + cookies=generate_admin_session_cookies(), + ) + + # Then the server returns a 302 + assert response.status_code == 302 + + # And the Follow activity was created in the outbox + outbox_object = db.execute(select(models.OutboxObject)).scalar_one() + assert outbox_object.ap_type == "Note" + assert outbox_object.summary is None + assert outbox_object.content == "

cw

\n" + assert len(outbox_object.attachments) == 1 + + +def test_send_create_activity__no_followers_and_with_mention( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # given a remote actor + ra = setup_remote_actor(respx_mock) + + with mock.patch.object(webfinger, "get_actor_url", return_value=ra.ap_id): + response = client.post( + "/admin/actions/new", + data={ + "redirect_url": "http://testserver/", + "content": "hi @toto@example.com", + "visibility": ap.VisibilityEnum.PUBLIC.name, + "csrf_token": generate_csrf_token(), + }, + cookies=generate_admin_session_cookies(), + ) + + # Then the server returns a 302 + assert response.status_code == 302 + + # And the Follow activity was created in the outbox + outbox_object = db.execute(select(models.OutboxObject)).scalar_one() + assert outbox_object.ap_type == "Note" + + # And an outgoing activity was queued + outgoing_activity = db.execute(select(models.OutgoingActivity)).scalar_one() + assert outgoing_activity.outbox_object_id == outbox_object.id + assert outgoing_activity.recipient == ra.inbox_url + + +def test_send_create_activity__with_followers( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # given a remote actor + ra = setup_remote_actor(respx_mock) + + # who is a follower + follower = setup_remote_actor_as_follower(ra) + + with mock.patch.object(webfinger, "get_actor_url", return_value=ra.ap_id): + response = client.post( + "/admin/actions/new", + data={ + "redirect_url": "http://testserver/", + "content": "hi followers", + "visibility": ap.VisibilityEnum.PUBLIC.name, + "csrf_token": generate_csrf_token(), + }, + cookies=generate_admin_session_cookies(), + ) + + # Then the server returns a 302 + assert response.status_code == 302 + + # And the Follow activity was created in the outbox + outbox_object = db.execute(select(models.OutboxObject)).scalar_one() + assert outbox_object.ap_type == "Note" + + # And an outgoing activity was queued + outgoing_activity = db.execute(select(models.OutgoingActivity)).scalar_one() + assert outgoing_activity.outbox_object_id == outbox_object.id + assert outgoing_activity.recipient == follower.actor.inbox_url + + +def test_send_create_activity__question__one_of( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # given a remote actor + ra = setup_remote_actor(respx_mock) + + # who is a follower + follower = setup_remote_actor_as_follower(ra) + + with mock.patch.object(webfinger, "get_actor_url", return_value=ra.ap_id): + response = client.post( + "/admin/actions/new", + data={ + "redirect_url": "http://testserver/", + "content": "hi followers", + "visibility": ap.VisibilityEnum.PUBLIC.name, + "csrf_token": generate_csrf_token(), + "poll_type": "oneOf", + "poll_duration": 5, + "poll_answer_1": "A", + "poll_answer_2": "B", + }, + cookies=generate_admin_session_cookies(), + ) + + # Then the server returns a 302 + assert response.status_code == 302 + + # And the Follow activity was created in the outbox + outbox_object = db.execute(select(models.OutboxObject)).scalar_one() + assert outbox_object.ap_type == "Question" + assert outbox_object.is_one_of_poll is True + assert len(outbox_object.poll_items) == 2 + assert {pi["name"] for pi in outbox_object.poll_items} == {"A", "B"} + assert outbox_object.is_poll_ended is False + + # And an outgoing activity was queued + outgoing_activity = db.execute(select(models.OutgoingActivity)).scalar_one() + assert outgoing_activity.outbox_object_id == outbox_object.id + assert outgoing_activity.recipient == follower.actor.inbox_url + + +def test_send_create_activity__question__any_of( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # given a remote actor + ra = setup_remote_actor(respx_mock) + + # who is a follower + follower = setup_remote_actor_as_follower(ra) + + with mock.patch.object(webfinger, "get_actor_url", return_value=ra.ap_id): + response = client.post( + "/admin/actions/new", + data={ + "redirect_url": "http://testserver/", + "content": "hi followers", + "visibility": ap.VisibilityEnum.PUBLIC.name, + "csrf_token": generate_csrf_token(), + "poll_type": "anyOf", + "poll_duration": 10, + "poll_answer_1": "A", + "poll_answer_2": "B", + "poll_answer_3": "C", + "poll_answer_4": "D", + }, + cookies=generate_admin_session_cookies(), + ) + + # Then the server returns a 302 + assert response.status_code == 302 + + # And the Follow activity was created in the outbox + outbox_object = db.execute(select(models.OutboxObject)).scalar_one() + assert outbox_object.ap_type == "Question" + assert outbox_object.is_one_of_poll is False + assert len(outbox_object.poll_items) == 4 + assert {pi["name"] for pi in outbox_object.poll_items} == {"A", "B", "C", "D"} + assert outbox_object.is_poll_ended is False + + # And an outgoing activity was queued + outgoing_activity = db.execute(select(models.OutgoingActivity)).scalar_one() + assert outgoing_activity.outbox_object_id == outbox_object.id + assert outgoing_activity.recipient == follower.actor.inbox_url + + +def test_send_create_activity__article( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # given a remote actor + ra = setup_remote_actor(respx_mock) + + # who is a follower + follower = setup_remote_actor_as_follower(ra) + + with mock.patch.object(webfinger, "get_actor_url", return_value=ra.ap_id): + response = client.post( + "/admin/actions/new", + data={ + "redirect_url": "http://testserver/", + "content": "hi followers", + "visibility": ap.VisibilityEnum.PUBLIC.name, + "csrf_token": generate_csrf_token(), + "name": "Article", + }, + cookies=generate_admin_session_cookies(), + ) + + # Then the server returns a 302 + assert response.status_code == 302 + + # And the Follow activity was created in the outbox + outbox_object = db.execute(select(models.OutboxObject)).scalar_one() + assert outbox_object.ap_type == "Article" + assert outbox_object.ap_object["name"] == "Article" + + # And an outgoing activity was queued + outgoing_activity = db.execute(select(models.OutgoingActivity)).scalar_one() + assert outgoing_activity.outbox_object_id == outbox_object.id + assert outgoing_activity.recipient == follower.actor.inbox_url diff --git a/tests/test_process_outgoing_activities.py b/tests/test_process_outgoing_activities.py new file mode 100644 index 0000000..7da510e --- /dev/null +++ b/tests/test_process_outgoing_activities.py @@ -0,0 +1,259 @@ +from uuid import uuid4 + +import httpx +import pytest +import respx +from fastapi.testclient import TestClient +from sqlalchemy import select + +from app import models +from app.actor import LOCAL_ACTOR +from app.ap_object import RemoteObject +from app.database import AsyncSession +from app.outgoing_activities import _MAX_RETRIES +from app.outgoing_activities import fetch_next_outgoing_activity +from app.outgoing_activities import new_outgoing_activity +from app.outgoing_activities import process_next_outgoing_activity +from tests import factories + + +def _setup_outbox_object() -> models.OutboxObject: + ra = factories.RemoteActorFactory( + base_url="https://example.com", + username="toto", + public_key="pk", + ) + + # And a Follow activity in the outbox + follow_id = uuid4().hex + follow_from_outbox = RemoteObject( + factories.build_follow_activity( + from_remote_actor=LOCAL_ACTOR, + for_remote_actor=ra, + outbox_public_id=follow_id, + ), + LOCAL_ACTOR, + ) + outbox_object = factories.OutboxObjectFactory.from_remote_object( + follow_id, follow_from_outbox + ) + return outbox_object + + +@pytest.mark.asyncio +async def test_new_outgoing_activity( + async_db_session: AsyncSession, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + outbox_object = _setup_outbox_object() + inbox_url = "https://example.com/inbox" + + if not outbox_object.id: + raise ValueError("Should never happen") + + # When queuing the activity + outgoing_activity = await new_outgoing_activity( + async_db_session, inbox_url, outbox_object.id + ) + await async_db_session.commit() + + assert ( + await async_db_session.execute(select(models.OutgoingActivity)) + ).scalar_one() == outgoing_activity + assert outgoing_activity.outbox_object_id == outbox_object.id + assert outgoing_activity.recipient == inbox_url + + +@pytest.mark.asyncio +async def test_process_next_outgoing_activity__no_next_activity( + respx_mock: respx.MockRouter, + async_db_session: AsyncSession, +) -> None: + next_activity = await fetch_next_outgoing_activity(async_db_session) + assert next_activity is None + + +@pytest.mark.asyncio +async def test_process_next_outgoing_activity__server_200( + async_db_session: AsyncSession, + respx_mock: respx.MockRouter, +) -> None: + # And an outgoing activity + outbox_object = _setup_outbox_object() + + recipient_inbox_url = "https://example.com/users/toto/inbox" + respx_mock.post(recipient_inbox_url).mock(return_value=httpx.Response(204)) + + outgoing_activity = factories.OutgoingActivityFactory( + recipient=recipient_inbox_url, + outbox_object_id=outbox_object.id, + inbox_object_id=None, + webmention_target=None, + ) + + # When processing the next outgoing activity + # Then it is processed + next_activity = await fetch_next_outgoing_activity(async_db_session) + assert next_activity + await process_next_outgoing_activity(async_db_session, next_activity) + + assert respx_mock.calls.call_count == 1 + + outgoing_activity = ( + await async_db_session.execute(select(models.OutgoingActivity)) + ).scalar_one() + assert outgoing_activity.is_sent is True + assert outgoing_activity.last_status_code == 204 + assert outgoing_activity.error is None + assert outgoing_activity.is_errored is False + + +@pytest.mark.asyncio +async def test_process_next_outgoing_activity__webmention( + async_db_session: AsyncSession, + respx_mock: respx.MockRouter, +) -> None: + # And an outgoing activity + outbox_object = _setup_outbox_object() + + recipient_url = "https://example.com/webmention" + respx_mock.post(recipient_url).mock(return_value=httpx.Response(204)) + + outgoing_activity = factories.OutgoingActivityFactory( + recipient=recipient_url, + outbox_object_id=outbox_object.id, + inbox_object_id=None, + webmention_target="http://example.com", + ) + + # When processing the next outgoing activity + # Then it is processed + next_activity = await fetch_next_outgoing_activity(async_db_session) + assert next_activity + await process_next_outgoing_activity(async_db_session, next_activity) + + assert respx_mock.calls.call_count == 1 + + outgoing_activity = ( + await async_db_session.execute(select(models.OutgoingActivity)) + ).scalar_one() + assert outgoing_activity.is_sent is True + assert outgoing_activity.last_status_code == 204 + assert outgoing_activity.error is None + assert outgoing_activity.is_errored is False + + +@pytest.mark.asyncio +async def test_process_next_outgoing_activity__error_500( + async_db_session: AsyncSession, + respx_mock: respx.MockRouter, +) -> None: + outbox_object = _setup_outbox_object() + recipient_inbox_url = "https://example.com/inbox" + respx_mock.post(recipient_inbox_url).mock( + return_value=httpx.Response(500, text="oops") + ) + + # And an outgoing activity + outgoing_activity = factories.OutgoingActivityFactory( + recipient=recipient_inbox_url, + outbox_object_id=outbox_object.id, + inbox_object_id=None, + webmention_target=None, + ) + + # When processing the next outgoing activity + # Then it is processed + next_activity = await fetch_next_outgoing_activity(async_db_session) + assert next_activity + await process_next_outgoing_activity(async_db_session, next_activity) + + assert respx_mock.calls.call_count == 1 + + outgoing_activity = ( + await async_db_session.execute(select(models.OutgoingActivity)) + ).scalar_one() + assert outgoing_activity.is_sent is False + assert outgoing_activity.last_status_code == 500 + assert outgoing_activity.last_response == "oops" + assert outgoing_activity.is_errored is False + assert outgoing_activity.tries == 1 + + +@pytest.mark.asyncio +async def test_process_next_outgoing_activity__errored( + async_db_session: AsyncSession, + respx_mock: respx.MockRouter, +) -> None: + outbox_object = _setup_outbox_object() + recipient_inbox_url = "https://example.com/inbox" + respx_mock.post(recipient_inbox_url).mock( + return_value=httpx.Response(500, text="oops") + ) + + # And an outgoing activity + outgoing_activity = factories.OutgoingActivityFactory.create( + recipient=recipient_inbox_url, + outbox_object_id=outbox_object.id, + inbox_object_id=None, + webmention_target=None, + tries=_MAX_RETRIES - 1, + ) + + # When processing the next outgoing activity + # Then it is processed + next_activity = await fetch_next_outgoing_activity(async_db_session) + assert next_activity + await process_next_outgoing_activity(async_db_session, next_activity) + + assert respx_mock.calls.call_count == 1 + + outgoing_activity = ( + await async_db_session.execute(select(models.OutgoingActivity)) + ).scalar_one() + assert outgoing_activity.is_sent is False + assert outgoing_activity.last_status_code == 500 + assert outgoing_activity.last_response == "oops" + assert outgoing_activity.is_errored is True + + # And it is skipped from processing + next_activity = await fetch_next_outgoing_activity(async_db_session) + assert next_activity is None + + +@pytest.mark.asyncio +async def test_process_next_outgoing_activity__connect_error( + async_db_session: AsyncSession, + respx_mock: respx.MockRouter, +) -> None: + outbox_object = _setup_outbox_object() + recipient_inbox_url = "https://example.com/inbox" + respx_mock.post(recipient_inbox_url).mock(side_effect=httpx.ConnectError) + + # And an outgoing activity + outgoing_activity = factories.OutgoingActivityFactory( + recipient=recipient_inbox_url, + outbox_object_id=outbox_object.id, + inbox_object_id=None, + webmention_target=None, + ) + + # When processing the next outgoing activity + # Then it is processed + next_activity = await fetch_next_outgoing_activity(async_db_session) + assert next_activity + await process_next_outgoing_activity(async_db_session, next_activity) + + assert respx_mock.calls.call_count == 1 + + outgoing_activity = ( + await async_db_session.execute(select(models.OutgoingActivity)) + ).scalar_one() + assert outgoing_activity.is_sent is False + assert outgoing_activity.error is not None + assert outgoing_activity.tries == 1 + + +# TODO(ts): +# - parse retry after diff --git a/tests/test_public.py b/tests/test_public.py new file mode 100644 index 0000000..1c94f3d --- /dev/null +++ b/tests/test_public.py @@ -0,0 +1,93 @@ +from unittest import mock + +import pytest +from fastapi.testclient import TestClient +from sqlalchemy.orm import Session + +from app import activitypub as ap +from app.actor import LOCAL_ACTOR + +_ACCEPTED_AP_HEADERS = [ + "application/activity+json", + "application/activity+json; charset=utf-8", + "application/ld+json", + 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"', +] + + +def test_index__html(db: Session, client: TestClient): + response = client.get("/") + assert response.status_code == 200 + assert response.headers["content-type"].startswith("text/html") + + +@pytest.mark.parametrize("accept", _ACCEPTED_AP_HEADERS) +def test_index__ap(db: Session, client: TestClient, accept: str): + response = client.get("/", headers={"Accept": accept}) + assert response.status_code == 200 + assert response.headers["content-type"] == ap.AP_CONTENT_TYPE + assert response.json() == LOCAL_ACTOR.ap_actor + + +def test_followers__ap(client, db) -> None: + response = client.get("/followers", headers={"Accept": ap.AP_CONTENT_TYPE}) + assert response.status_code == 200 + assert response.headers["content-type"] == ap.AP_CONTENT_TYPE + json_resp = response.json() + assert json_resp["id"].endswith("/followers") + assert "first" in json_resp + + +def test_followers__ap_hides_followers(client, db) -> None: + with mock.patch("app.main.config.HIDES_FOLLOWERS", True): + response = client.get("/followers", headers={"Accept": ap.AP_CONTENT_TYPE}) + assert response.status_code == 200 + assert response.headers["content-type"] == ap.AP_CONTENT_TYPE + json_resp = response.json() + assert json_resp["id"].endswith("/followers") + assert "first" not in json_resp + + +def test_followers__html(client, db) -> None: + response = client.get("/followers") + assert response.status_code == 200 + assert response.headers["content-type"].startswith("text/html") + + +def test_followers__html_hides_followers(client, db) -> None: + with mock.patch("app.main.config.HIDES_FOLLOWERS", True): + response = client.get("/followers", headers={"Accept": "text/html"}) + assert response.status_code == 404 + assert response.headers["content-type"].startswith("text/html") + + +def test_following__ap(client, db) -> None: + response = client.get("/following", headers={"Accept": ap.AP_CONTENT_TYPE}) + assert response.status_code == 200 + assert response.headers["content-type"] == ap.AP_CONTENT_TYPE + json_resp = response.json() + assert json_resp["id"].endswith("/following") + assert "first" in json_resp + + +def test_following__ap_hides_following(client, db) -> None: + with mock.patch("app.main.config.HIDES_FOLLOWING", True): + response = client.get("/following", headers={"Accept": ap.AP_CONTENT_TYPE}) + assert response.status_code == 200 + assert response.headers["content-type"] == ap.AP_CONTENT_TYPE + json_resp = response.json() + assert json_resp["id"].endswith("/following") + assert "first" not in json_resp + + +def test_following__html(client, db) -> None: + response = client.get("/following") + assert response.status_code == 200 + assert response.headers["content-type"].startswith("text/html") + + +def test_following__html_hides_following(client, db) -> None: + with mock.patch("app.main.config.HIDES_FOLLOWING", True): + response = client.get("/following", headers={"Accept": "text/html"}) + assert response.status_code == 404 + assert response.headers["content-type"].startswith("text/html") diff --git a/tests/test_remote_actor_deletion.py b/tests/test_remote_actor_deletion.py new file mode 100644 index 0000000..34ca73f --- /dev/null +++ b/tests/test_remote_actor_deletion.py @@ -0,0 +1,109 @@ +import httpx +import respx +from fastapi.testclient import TestClient +from sqlalchemy import func +from sqlalchemy import select +from sqlalchemy.orm import Session + +from app import activitypub as ap +from app import models +from app.ap_object import RemoteObject +from tests import factories +from tests.utils import mock_httpsig_checker +from tests.utils import run_process_next_incoming_activity +from tests.utils import setup_remote_actor +from tests.utils import setup_remote_actor_as_following_and_follower + + +def test_inbox__incoming_delete_for_unknown_actor( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor who is already deleted + ra = factories.RemoteActorFactory( + base_url="https://deleted.com", + username="toto", + public_key="pk", + ) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(404, json=ra.ap_actor)) + + # When receiving a Delete activity for an unknown actor + delete_activity = RemoteObject( + factories.build_delete_activity( + from_remote_actor=ra, + deleted_object_ap_id=ra.ap_id, + ), + ra, + ) + with mock_httpsig_checker(ra, has_valid_signature=False, is_ap_actor_gone=True): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json=delete_activity.ap_object, + ) + + # Then the server returns a 202 + assert response.status_code == 202 + + # And no incoming activity was created + assert db.scalar(select(func.count(models.IncomingActivity.id))) == 0 + + +def test_inbox__incoming_delete_for_known_actor( + db: Session, + client: TestClient, + respx_mock: respx.MockRouter, +) -> None: + # Given a remote actor + ra = setup_remote_actor(respx_mock) + + # Which is both followed and a follower + following, _ = setup_remote_actor_as_following_and_follower(ra) + actor = following.actor + assert actor + assert following.outbox_object + + # TODO: setup few more activities (like announce and create) + + # When receiving a Delete activity for an unknown actor + delete_activity = RemoteObject( + factories.build_delete_activity( + from_remote_actor=ra, + deleted_object_ap_id=ra.ap_id, + ), + ra, + ) + + with mock_httpsig_checker(ra): + response = client.post( + "/inbox", + headers={"Content-Type": ap.AS_CTX}, + json=delete_activity.ap_object, + ) + + # Then the server returns a 202 + assert response.status_code == 202 + + run_process_next_incoming_activity() + + # Then every inbox object from the actor was deleted + assert ( + db.scalar( + select(func.count(models.InboxObject.id)).where( + models.InboxObject.actor_id == actor.id, + models.InboxObject.is_deleted.is_(False), + ) + ) + == 0 + ) + + # And the following actor was deleted + assert db.scalar(select(func.count(models.Following.id))) == 0 + + # And the follower actor was deleted too + assert db.scalar(select(func.count(models.Follower.id))) == 0 + + # And the actor was marked in deleted + db.refresh(actor) + assert actor.is_deleted is True diff --git a/tests/test_tags.py b/tests/test_tags.py new file mode 100644 index 0000000..d3b4ce5 --- /dev/null +++ b/tests/test_tags.py @@ -0,0 +1,57 @@ +from fastapi.testclient import TestClient +from sqlalchemy import select +from sqlalchemy.orm import Session + +from app import activitypub as ap +from app import models +from app.config import generate_csrf_token +from tests.utils import generate_admin_session_cookies + + +def test_tags__no_tags( + db: Session, + client: TestClient, +) -> None: + response = client.get("/t/nope", headers={"Accept": ap.AP_CONTENT_TYPE}) + + assert response.status_code == 404 + + +def test_tags__note_with_tag(db: Session, client: TestClient) -> None: + # Call admin endpoint to create a note with + note_content = "Hello #testing" + + response = client.post( + "/admin/actions/new", + data={ + "redirect_url": "http://testserver/", + "content": note_content, + "visibility": ap.VisibilityEnum.PUBLIC.name, + "csrf_token": generate_csrf_token(), + }, + cookies=generate_admin_session_cookies(), + ) + + # Then the server returns a 302 + assert response.status_code == 302 + + # And the Follow activity was created in the outbox + outbox_object = db.execute(select(models.OutboxObject)).scalar_one() + assert outbox_object.ap_type == "Note" + assert len(outbox_object.tags) == 1 + emoji_tag = outbox_object.tags[0] + assert emoji_tag["type"] == "Hashtag" + assert emoji_tag["name"] == "#testing" + + # And the tag page returns this note + html_resp = client.get("/t/testing") + html_resp.raise_for_status() + assert html_resp.status_code == 200 + assert "Hello" in html_resp.text + + # And the AP version of the page turns the note too + ap_resp = client.get("/t/testing", headers={"Accept": ap.AP_CONTENT_TYPE}) + ap_resp.raise_for_status() + ap_json_resp = ap_resp.json() + assert ap_json_resp["totalItems"] == 1 + assert ap_json_resp["orderedItems"] == [outbox_object.ap_id] diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 0000000..ec478d6 --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,19 @@ +from unittest import mock + +import pytest + +from app.utils.url import is_hostname_blocked + + +@pytest.mark.parametrize( + "hostname,should_be_blocked", + [ + ("example.com", True), + ("subdomain.example.com", True), + ("example.xyz", False), + ], +) +def test_is_hostname_blocked(hostname: str, should_be_blocked: bool) -> None: + with mock.patch("app.utils.url.BLOCKED_SERVERS", ["example.com"]): + is_hostname_blocked.cache_clear() + assert is_hostname_blocked(hostname) is should_be_blocked diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..4e9cb05 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,250 @@ +import asyncio +from contextlib import contextmanager +from typing import Any +from uuid import uuid4 + +import fastapi +import httpx +import respx + +from app import activitypub as ap +from app import actor +from app import httpsig +from app import models +from app.actor import LOCAL_ACTOR +from app.ap_object import RemoteObject +from app.config import session_serializer +from app.database import AsyncSession +from app.database import async_session +from app.incoming_activities import fetch_next_incoming_activity +from app.incoming_activities import process_next_incoming_activity +from app.main import app +from tests import factories + + +@contextmanager +def mock_httpsig_checker( + ra: actor.RemoteActor, + has_valid_signature: bool = True, + is_ap_actor_gone: bool = False, +): + async def httpsig_checker( + request: fastapi.Request, + ) -> httpsig.HTTPSigInfo: + return httpsig.HTTPSigInfo( + has_valid_signature=has_valid_signature, + signed_by_ap_actor_id=ra.ap_id, + is_ap_actor_gone=is_ap_actor_gone, + ) + + app.dependency_overrides[httpsig.httpsig_checker] = httpsig_checker + try: + yield + finally: + del app.dependency_overrides[httpsig.httpsig_checker] + + +def generate_admin_session_cookies() -> dict[str, Any]: + return {"session": session_serializer.dumps({"is_logged_in": True})} + + +def setup_remote_actor( + respx_mock: respx.MockRouter, + base_url="https://example.com", + also_known_as=None, +) -> actor.RemoteActor: + ra = factories.RemoteActorFactory( + base_url=base_url, + username="toto", + public_key="pk", + also_known_as=also_known_as if also_known_as else [], + ) + respx_mock.get(ra.ap_id + "/outbox").mock( + return_value=httpx.Response( + 200, + json={ + "@context": ap.AS_EXTENDED_CTX, + "id": f"{ra.ap_id}/outbox", + "type": "OrderedCollection", + "totalItems": 0, + "orderedItems": [], + }, + ) + ) + respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) + return ra + + +def setup_remote_actor_as_follower(ra: actor.RemoteActor) -> models.Follower: + actor = factories.ActorFactory.from_remote_actor(ra) + + follow_id = uuid4().hex + follow_from_inbox = RemoteObject( + factories.build_follow_activity( + from_remote_actor=ra, + for_remote_actor=LOCAL_ACTOR, + outbox_public_id=follow_id, + ), + ra, + ) + inbox_object = factories.InboxObjectFactory.from_remote_object( + follow_from_inbox, actor + ) + + follower = factories.FollowerFactory( + inbox_object_id=inbox_object.id, + actor_id=actor.id, + ap_actor_id=actor.ap_id, + ) + return follower + + +def setup_remote_actor_as_following(ra: actor.RemoteActor) -> models.Following: + actor = factories.ActorFactory.from_remote_actor(ra) + + follow_id = uuid4().hex + follow_from_outbox = RemoteObject( + factories.build_follow_activity( + from_remote_actor=LOCAL_ACTOR, + for_remote_actor=ra, + outbox_public_id=follow_id, + ), + LOCAL_ACTOR, + ) + outbox_object = factories.OutboxObjectFactory.from_remote_object( + follow_id, follow_from_outbox + ) + + following = factories.FollowingFactory( + outbox_object_id=outbox_object.id, + actor_id=actor.id, + ap_actor_id=actor.ap_id, + ) + return following + + +def setup_remote_actor_as_following_and_follower( + ra: actor.RemoteActor, +) -> tuple[models.Following, models.Follower]: + actor = factories.ActorFactory.from_remote_actor(ra) + + follow_id = uuid4().hex + follow_from_outbox = RemoteObject( + factories.build_follow_activity( + from_remote_actor=LOCAL_ACTOR, + for_remote_actor=ra, + outbox_public_id=follow_id, + ), + LOCAL_ACTOR, + ) + outbox_object = factories.OutboxObjectFactory.from_remote_object( + follow_id, follow_from_outbox + ) + + following = factories.FollowingFactory( + outbox_object_id=outbox_object.id, + actor_id=actor.id, + ap_actor_id=actor.ap_id, + ) + + follow_id = uuid4().hex + follow_from_inbox = RemoteObject( + factories.build_follow_activity( + from_remote_actor=ra, + for_remote_actor=LOCAL_ACTOR, + outbox_public_id=follow_id, + ), + ra, + ) + inbox_object = factories.InboxObjectFactory.from_remote_object( + follow_from_inbox, actor + ) + + follower = factories.FollowerFactory( + inbox_object_id=inbox_object.id, + actor_id=actor.id, + ap_actor_id=actor.ap_id, + ) + + return following, follower + + +def setup_outbox_note( + content: str = "Hello", + to: list[str] = None, + cc: list[str] = None, + tags: list[ap.RawObject] = None, + in_reply_to: str | None = None, +) -> models.OutboxObject: + note_id = uuid4().hex + note_from_outbox = RemoteObject( + factories.build_note_object( + from_remote_actor=LOCAL_ACTOR, + outbox_public_id=note_id, + content=content, + to=to, + cc=cc, + tags=tags, + in_reply_to=in_reply_to, + ), + LOCAL_ACTOR, + ) + return factories.OutboxObjectFactory.from_remote_object(note_id, note_from_outbox) + + +def setup_inbox_note( + actor: models.Actor, + content: str = "Hello", + to: list[str] = None, + cc: list[str] = None, + tags: list[ap.RawObject] = None, + in_reply_to: str | None = None, +) -> models.OutboxObject: + note_id = uuid4().hex + note_from_outbox = RemoteObject( + factories.build_note_object( + from_remote_actor=actor, + outbox_public_id=note_id, + content=content, + to=to, + cc=cc, + tags=tags, + in_reply_to=in_reply_to, + ), + actor, + ) + return factories.InboxObjectFactory.from_remote_object(note_from_outbox, actor) + + +def setup_inbox_delete( + actor: models.Actor, deleted_object_ap_id: str +) -> models.InboxObject: + follow_from_inbox = RemoteObject( + factories.build_delete_activity( + from_remote_actor=actor, + deleted_object_ap_id=deleted_object_ap_id, + ), + actor, + ) + inbox_object = factories.InboxObjectFactory.from_remote_object( + follow_from_inbox, actor + ) + return inbox_object + + +def run_async(func, *args, **kwargs): + async def _func(): + async with async_session() as db: + return await func(db, *args, **kwargs) + + asyncio.run(_func()) + + +async def _process_next_incoming_activity(db_session: AsyncSession) -> None: + next_activity = await fetch_next_incoming_activity(db_session) + assert next_activity + await process_next_incoming_activity(db_session, next_activity) + + +def run_process_next_incoming_activity() -> None: + run_async(_process_next_incoming_activity) diff --git a/utils/__init__.py b/utils/__init__.py deleted file mode 100644 index 09517e3..0000000 --- a/utils/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -import logging -from datetime import datetime -from datetime import timezone - -from dateutil import parser -from little_boxes import activitypub as ap - -logger = logging.getLogger(__name__) - - -def strtobool(s: str) -> bool: - if s in ["y", "yes", "true", "on", "1"]: - return True - if s in ["n", "no", "false", "off", "0"]: - return False - - raise ValueError(f"cannot convert {s} to bool") - - -def parse_datetime(s: str) -> datetime: - # Parses the datetime with dateutil - dt = parser.parse(s) - - # If no TZ is set, assumes it's UTC - if not dt.tzinfo: - dt = dt.replace(tzinfo=timezone.utc) - - return dt - - -def now() -> str: - return ap.format_datetime(datetime.now(timezone.utc)) diff --git a/utils/blacklist.py b/utils/blacklist.py deleted file mode 100644 index abd3d7a..0000000 --- a/utils/blacklist.py +++ /dev/null @@ -1,38 +0,0 @@ -import logging -from typing import Any -from typing import Dict -from urllib.parse import urlparse - -import config - -logger = logging.getLogger(__name__) - - -def is_url_blacklisted(url: str) -> bool: - try: - return urlparse(url).netloc in config.BLACKLIST - except Exception: - logger.exception(f"failed to blacklist for {url}") - return False - - -def is_blacklisted(data: Dict[str, Any]) -> bool: - """Returns True if the activity is coming/or referencing a blacklisted host.""" - if ( - "id" in data - and is_url_blacklisted(data["id"]) - or ( - "object" in data - and isinstance(data["object"], dict) - and "id" in data["object"] - and is_url_blacklisted(data["object"]["id"]) - ) - or ( - "object" in data - and isinstance(data["object"], str) - and is_url_blacklisted(data["object"]) - ) - ): - return True - - return False diff --git a/utils/emojis.py b/utils/emojis.py deleted file mode 100644 index 5d04071..0000000 --- a/utils/emojis.py +++ /dev/null @@ -1,48 +0,0 @@ -import mimetypes -import re -from datetime import datetime -from pathlib import Path -from typing import Any -from typing import Dict -from typing import List -from typing import Set - -from little_boxes import activitypub as ap - -EMOJI_REGEX = re.compile(r"(:[\d\w]+:)") - -EMOJIS: Dict[str, Dict[str, Any]] = {} -EMOJIS_BY_NAME: Dict[str, Dict[str, Any]] = {} - - -def _load_emojis(root_dir: Path, base_url: str) -> None: - if EMOJIS: - return - for emoji in (root_dir / "static" / "emojis").iterdir(): - mt = mimetypes.guess_type(emoji.name)[0] - if mt and mt.startswith("image/"): - name = emoji.name.split(".")[0] - ap_emoji = dict( - type=ap.ActivityType.EMOJI.value, - name=f":{name}:", - updated=ap.format_datetime(datetime.fromtimestamp(0.0).astimezone()), - id=f"{base_url}/emoji/{name}", - icon={ - "mediaType": mt, - "type": ap.ActivityType.IMAGE.value, - "url": f"{base_url}/static/emojis/{emoji.name}", - }, - ) - EMOJIS[emoji.name] = ap_emoji - EMOJIS_BY_NAME[ap_emoji["name"]] = ap_emoji - - -def tags(content: str) -> List[Dict[str, Any]]: - tags: List[Dict[str, Any]] = [] - added: Set[str] = set() - for e in re.findall(EMOJI_REGEX, content): - if e not in added and e in EMOJIS_BY_NAME: - tags.append(EMOJIS_BY_NAME[e]) - added.add(e) - - return tags diff --git a/utils/highlight.py b/utils/highlight.py deleted file mode 100644 index 074108d..0000000 --- a/utils/highlight.py +++ /dev/null @@ -1,32 +0,0 @@ -from functools import lru_cache - -from bs4 import BeautifulSoup -from pygments import highlight as phighlight -from pygments.formatters import HtmlFormatter -from pygments.lexers import guess_lexer - -from config import THEME_STYLE -from config import ThemeStyle - -_FORMATTER = HtmlFormatter( - style="default" if THEME_STYLE == ThemeStyle.LIGHT else "vim" -) - -HIGHLIGHT_CSS = _FORMATTER.get_style_defs() - - -@lru_cache(512) -def highlight(html: str) -> str: - soup = BeautifulSoup(html, "html5lib") - for code in soup.find_all("code"): - if not code.parent.name == "pre": - continue - lexer = guess_lexer(code.text) - tag = BeautifulSoup( - phighlight(code.text, lexer, _FORMATTER), "html5lib" - ).body.next - pre = code.parent - pre.replaceWith(tag) - out = soup.body - out.name = "div" - return str(out) diff --git a/utils/key.py b/utils/key.py deleted file mode 100644 index 4e035f0..0000000 --- a/utils/key.py +++ /dev/null @@ -1,42 +0,0 @@ -import binascii -import os -from typing import Callable - -from little_boxes.key import Key - -KEY_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "config") - - -def _new_key() -> str: - return binascii.hexlify(os.urandom(32)).decode("utf-8") - - -def get_secret_key(name: str, new_key: Callable[[], str] = _new_key) -> str: - """Loads or generates a cryptographic key.""" - key_path = os.path.join(KEY_DIR, f"{name}.key") - if not os.path.exists(key_path): - k = new_key() - with open(key_path, "w+") as f: - f.write(k) - return k - - with open(key_path) as f: - return f.read() - - -def get_key(owner: str, _id: str, user: str, domain: str) -> Key: - """"Loads or generates an RSA key.""" - k = Key(owner, _id) - user = user.replace(".", "_") - domain = domain.replace(".", "_") - key_path = os.path.join(KEY_DIR, f"key_{user}_{domain}.pem") - if os.path.isfile(key_path): - with open(key_path) as f: - privkey_pem = f.read() - k.load(privkey_pem) - else: - k.new() - with open(key_path, "w") as f: - f.write(k.privkey_pem) - - return k diff --git a/utils/local_actor_cache.py b/utils/local_actor_cache.py deleted file mode 100644 index 3b10735..0000000 --- a/utils/local_actor_cache.py +++ /dev/null @@ -1,18 +0,0 @@ -from pathlib import Path - -_CACHE_FILE = Path(__file__).parent.absolute() / ".." / "config" / "local_actor_hash" - - -def is_actor_updated(actor_hash: str) -> bool: - actor_updated = False - cache_exists = _CACHE_FILE.exists() - if cache_exists: - current_hash = _CACHE_FILE.read_text() - if actor_hash != current_hash: - actor_updated = True - - if actor_updated or not cache_exists: - with _CACHE_FILE.open("w") as f: - f.write(actor_hash) - - return actor_updated diff --git a/utils/lookup.py b/utils/lookup.py deleted file mode 100644 index 8739746..0000000 --- a/utils/lookup.py +++ /dev/null @@ -1,54 +0,0 @@ -import little_boxes.activitypub as ap -import mf2py -import requests -from little_boxes.errors import NotAnActivityError -from little_boxes.errors import RemoteServerUnavailableError -from little_boxes.webfinger import get_actor_url - - -def lookup(url: str) -> ap.BaseActivity: - """Try to find an AP object related to the given URL.""" - try: - if url.startswith("@"): - actor_url = get_actor_url(url) - if actor_url: - return ap.fetch_remote_activity(actor_url) - except NotAnActivityError: - pass - except requests.HTTPError: - # Some websites may returns 404, 503 or others when they don't support webfinger, and we're just taking a guess - # when performing the lookup. - pass - except requests.RequestException as err: - raise RemoteServerUnavailableError(f"failed to fetch {url}: {err!r}") - - backend = ap.get_backend() - try: - resp = requests.head( - url, - timeout=10, - allow_redirects=True, - headers={"User-Agent": backend.user_agent()}, - ) - except requests.RequestException as err: - raise RemoteServerUnavailableError(f"failed to GET {url}: {err!r}") - - try: - resp.raise_for_status() - except Exception: - return ap.fetch_remote_activity(url) - - # If the page is HTML, maybe it contains an alternate link pointing to an AP object - for alternate in mf2py.parse(resp.text).get("alternates", []): - if alternate.get("type") == "application/activity+json": - return ap.fetch_remote_activity(alternate["url"]) - - try: - # Maybe the page was JSON-LD? - data = resp.json() - return ap.parse_activity(data) - except Exception: - pass - - # Try content negotiation (retry with the AP Accept header) - return ap.fetch_remote_activity(url) diff --git a/utils/media.py b/utils/media.py deleted file mode 100644 index 19288df..0000000 --- a/utils/media.py +++ /dev/null @@ -1,246 +0,0 @@ -import base64 -import mimetypes -from enum import Enum -from enum import unique -from functools import lru_cache -from gzip import GzipFile -from io import BytesIO -from typing import Any -from typing import Dict -from typing import Optional -from typing import Tuple - -import gridfs -import piexif -import requests -from little_boxes import activitypub as ap -from PIL import Image - - -@lru_cache(2048) -def _is_img(filename): - mimetype, _ = mimetypes.guess_type(filename.lower()) - if mimetype and mimetype.split("/")[0] in ["image"]: - return True - return False - - -@lru_cache(2048) -def is_video(filename): - mimetype, _ = mimetypes.guess_type(filename.lower()) - if mimetype and mimetype.split("/")[0] in ["video"]: - return True - return False - - -def _load(url: str, user_agent: str) -> Tuple[BytesIO, Optional[str]]: - """Initializes a `PIL.Image` from the URL.""" - out = BytesIO() - with requests.get(url, stream=True, headers={"User-Agent": user_agent}) as resp: - resp.raise_for_status() - - resp.raw.decode_content = True - while 1: - buf = resp.raw.read() - if not buf: - break - out.write(buf) - out.seek(0) - return out, resp.headers.get("content-type") - - -def load(url: str, user_agent: str) -> Image: - """Initializes a `PIL.Image` from the URL.""" - out, _ = _load(url, user_agent) - return Image.open(out) - - -def to_data_uri(img: Image) -> str: - out = BytesIO() - img.save(out, format=img.format) - out.seek(0) - data = base64.b64encode(out.read()).decode("utf-8") - return f"data:{img.get_format_mimetype()};base64,{data}" - - -@unique -class Kind(Enum): - ATTACHMENT = "attachment" - ACTOR_ICON = "actor_icon" - UPLOAD = "upload" - OG_IMAGE = "og" - EMOJI = "emoji" - - -class MediaCache(object): - def __init__(self, gridfs_db: str, user_agent: str) -> None: - self.fs = gridfs.GridFS(gridfs_db) - self.user_agent = user_agent - - def cache_og_image(self, url: str, remote_id: str) -> None: - if self.fs.find_one({"url": url, "kind": Kind.OG_IMAGE.value}): - return - i = load(url, self.user_agent) - # Save the original attachment (gzipped) - i.thumbnail((100, 100)) - with BytesIO() as buf: - with GzipFile(mode="wb", fileobj=buf) as f1: - i.save(f1, format=i.format) - buf.seek(0) - self.fs.put( - buf, - url=url, - size=100, - content_type=i.get_format_mimetype(), - kind=Kind.OG_IMAGE.value, - remote_id=remote_id, - ) - - def cache_attachment(self, attachment: Dict[str, Any], remote_id: str) -> None: - url = attachment["url"] - - # Ensure it's not already there - if self.fs.find_one( - {"url": url, "kind": Kind.ATTACHMENT.value, "remote_id": remote_id} - ): - return - - # If it's an image, make some thumbnails - if ( - _is_img(url) - or attachment.get("mediaType", "").startswith("image/") - or ap._has_type(attachment.get("type"), ap.ActivityType.IMAGE) - ): - try: - i = load(url, self.user_agent) - # Save the original attachment (gzipped) - with BytesIO() as buf: - f1 = GzipFile(mode="wb", fileobj=buf) - i.save(f1, format=i.format) - f1.close() - buf.seek(0) - self.fs.put( - buf, - url=url, - size=None, - content_type=i.get_format_mimetype(), - kind=Kind.ATTACHMENT.value, - remote_id=remote_id, - ) - # Save a thumbnail (gzipped) - i.thumbnail((720, 720)) - with BytesIO() as buf: - with GzipFile(mode="wb", fileobj=buf) as f1: - i.save(f1, format=i.format) - buf.seek(0) - self.fs.put( - buf, - url=url, - size=720, - content_type=i.get_format_mimetype(), - kind=Kind.ATTACHMENT.value, - remote_id=remote_id, - ) - return - except Exception: - # FIXME(tsileo): logging - pass - - # The attachment is not an image, download and save it anyway - with requests.get( - url, stream=True, headers={"User-Agent": self.user_agent} - ) as resp: - resp.raise_for_status() - with BytesIO() as buf: - with GzipFile(mode="wb", fileobj=buf) as f1: - for chunk in resp.iter_content(chunk_size=2 << 20): - if chunk: - print(len(chunk)) - f1.write(chunk) - buf.seek(0) - self.fs.put( - buf, - url=url, - size=None, - content_type=mimetypes.guess_type(url)[0], - kind=Kind.ATTACHMENT.value, - remote_id=remote_id, - ) - - def is_actor_icon_cached(self, url: str) -> bool: - return bool(self.fs.find_one({"url": url, "kind": Kind.ACTOR_ICON.value})) - - def cache_actor_icon(self, url: str) -> None: - if self.is_actor_icon_cached(url): - return - i = load(url, self.user_agent) - for size in [50, 80]: - t1 = i.copy() - t1.thumbnail((size, size)) - with BytesIO() as buf: - with GzipFile(mode="wb", fileobj=buf) as f1: - t1.save(f1, format=i.format) - buf.seek(0) - self.fs.put( - buf, - url=url, - size=size, - content_type=i.get_format_mimetype(), - kind=Kind.ACTOR_ICON.value, - ) - - def is_emoji_cached(self, url: str) -> bool: - return bool(self.fs.find_one({"url": url, "kind": Kind.EMOJI.value})) - - def cache_emoji(self, url: str, iri: str) -> None: - if self.is_emoji_cached(url): - return - i = load(url, self.user_agent) - for size in [25]: - t1 = i.copy() - t1.thumbnail((size, size)) - with BytesIO() as buf: - with GzipFile(mode="wb", fileobj=buf) as f1: - t1.save(f1, format=i.format) - buf.seek(0) - self.fs.put( - buf, - url=url, - size=size, - remote_id=iri, - content_type=i.get_format_mimetype(), - kind=Kind.EMOJI.value, - ) - - def save_upload(self, obuf: BytesIO, filename: str) -> str: - # Remove EXIF metadata - if filename.lower().endswith(".jpg") or filename.lower().endswith(".jpeg"): - obuf.seek(0) - with BytesIO() as buf2: - piexif.remove(obuf.getvalue(), buf2) - obuf.truncate(0) - obuf.write(buf2.getvalue()) - - obuf.seek(0) - mtype = mimetypes.guess_type(filename)[0] - with BytesIO() as gbuf: - with GzipFile(mode="wb", fileobj=gbuf) as gzipfile: - gzipfile.write(obuf.getvalue()) - - gbuf.seek(0) - oid = self.fs.put( - gbuf, - content_type=mtype, - upload_filename=filename, - kind=Kind.UPLOAD.value, - ) - return str(oid) - - def get_actor_icon(self, url: str, size: int) -> Any: - return self.get_file(url, size, Kind.ACTOR_ICON) - - def get_attachment(self, url: str, size: int) -> Any: - return self.get_file(url, size, Kind.ATTACHMENT) - - def get_file(self, url: str, size: int, kind: Kind) -> Any: - return self.fs.find_one({"url": url, "size": size, "kind": kind.value}) diff --git a/utils/migrations.py b/utils/migrations.py deleted file mode 100644 index cbf6eb2..0000000 --- a/utils/migrations.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Automatic migration tools for the da:ta stored in MongoDB.""" -import logging -from abc import ABC -from abc import abstractmethod -from typing import List -from typing import Type - -from config import DB - -logger = logging.getLogger(__name__) - -# Used to keep track of all the defined migrations -_MIGRATIONS: List[Type["Migration"]] = [] - - -def perform() -> None: - """Perform all the defined migration.""" - for migration in _MIGRATIONS: - migration().perform() - - -class Migration(ABC): - """Abstract class for migrations.""" - - def __init__(self) -> None: - self.name = self.__class__.__qualname__ - self._col = DB.migrations - - def __init_subclass__(cls, **kwargs): - super().__init_subclass__(**kwargs) - _MIGRATIONS.append(cls) - - def _apply(self) -> None: - self._col.insert_one({"name": self.name}) - - def _reset(self) -> None: - self._col.delete_one({"name": self.name}) - - def _is_applied(self) -> bool: - return bool(self._col.find_one({"name": self.name})) - - @abstractmethod - def migrate(self) -> None: - """Expected to be implemented by actual migrations.""" - pass - - def perform(self) -> None: - if self._is_applied(): - logger.info(f"Skipping migration {self.name} (already applied)") - return - - logger.info(f"Performing migration {self.name}...") - self.migrate() - - self._apply() - logger.info("Done") diff --git a/utils/nodeinfo.py b/utils/nodeinfo.py deleted file mode 100644 index 40295cc..0000000 --- a/utils/nodeinfo.py +++ /dev/null @@ -1,77 +0,0 @@ -from enum import Enum -from enum import unique -from functools import lru_cache -from typing import Optional - -import little_boxes.activitypub as ap -import requests - - -@unique -class SoftwareName(Enum): - UNKNOWN = "unknown" - MASTODON = "mastodon" - MICROBLOGPUB = "microblogpub" - - -def _get_nodeinfo_url(server: str) -> Optional[str]: - backend = ap.get_backend() - for scheme in {"https", "http"}: - try: - resp = requests.get( - f"{scheme}://{server}/.well-known/nodeinfo", - timeout=10, - allow_redirects=True, - headers={"User-Agent": backend.user_agent()}, - ) - resp.raise_for_status() - data = resp.json() - for link in data.get("links", []): - return link["href"] - except requests.HTTPError: - return None - except requests.RequestException: - continue - - return None - - -def _try_mastodon_api(server: str) -> bool: - for scheme in {"https", "http"}: - try: - resp = requests.get(f"{scheme}://{server}/api/v1/instance") - resp.raise_for_status() - if resp.json(): - return True - except requests.HTTPError: - return False - except requests.RequestException: - continue - - return False - - -@lru_cache(2048) -def get_software_name(server: str) -> str: - backend = ap.get_backend() - nodeinfo_endpoint = _get_nodeinfo_url(server) - if nodeinfo_endpoint: - try: - resp = requests.get( - nodeinfo_endpoint, - timeout=10, - headers={"User-Agent": backend.user_agent()}, - ) - resp.raise_for_status() - software_name = resp.json().get("software", {}).get("name") - if software_name: - return software_name - - return SoftwareName.UNKNOWN.value - except requests.RequestException: - return SoftwareName.UNKNOWN.value - - if _try_mastodon_api(server): - return SoftwareName.MASTODON.value - - return SoftwareName.UNKNOWN.value diff --git a/utils/opengraph.py b/utils/opengraph.py deleted file mode 100644 index 8d487b3..0000000 --- a/utils/opengraph.py +++ /dev/null @@ -1,129 +0,0 @@ -import logging -import mimetypes -from typing import Any -from typing import Dict -from typing import Set -from urllib.parse import urlparse - -import opengraph -import requests -from bs4 import BeautifulSoup -from little_boxes import activitypub as ap -from little_boxes.errors import NotAnActivityError -from little_boxes.urlutils import check_url -from little_boxes.urlutils import is_url_valid - -from .lookup import lookup - -logger = logging.getLogger(__name__) - - -def links_from_note(note: Dict[str, Any]) -> Set[str]: - note_host = urlparse(ap._get_id(note["id"]) or "").netloc - - links = set() - if "content" in note: - soup = BeautifulSoup(note["content"], "html5lib") - for link in soup.find_all("a"): - h = link.get("href") - ph = urlparse(h) - if ( - ph.scheme in {"http", "https"} - and ph.netloc != note_host - and is_url_valid(h) - ): - links.add(h) - - # FIXME(tsileo): support summary and name fields - - return links - - -def fetch_og_metadata(user_agent, links): - res = [] - for l in links: - # Try to skip media early - mimetype, _ = mimetypes.guess_type(l) - if mimetype and mimetype.split("/")[0] in ["image", "video", "audio"]: - logger.info(f"skipping media link {l}") - continue - - check_url(l) - - # Remove any AP objects - try: - lookup(l) - continue - except NotAnActivityError: - pass - except Exception: - logger.exception(f"skipping {l} because of issues during AP lookup") - continue - - try: - h = requests.head( - l, headers={"User-Agent": user_agent}, timeout=3, allow_redirects=True - ) - h.raise_for_status() - except requests.HTTPError as http_err: - logger.debug( - f"failed to HEAD {l}, got a {http_err.response.status_code}: {http_err.response.text}" - ) - continue - except requests.RequestException as err: - logger.debug(f"failed to HEAD {l}: {err!r}") - continue - - if h.headers.get("content-type") and not h.headers.get( - "content-type" - ).startswith("text/html"): - logger.debug(f"skipping {l} for bad content type") - continue - - try: - r = requests.get( - l, headers={"User-Agent": user_agent}, timeout=5, allow_redirects=True - ) - r.raise_for_status() - except requests.HTTPError as http_err: - logger.debug( - f"failed to GET {l}, got a {http_err.response.status_code}: {http_err.response.text}" - ) - continue - except requests.RequestException as err: - logger.debug(f"failed to GET {l}: {err!r}") - continue - - # FIXME(tsileo): check mimetype via the URL too (like we do for images) - if not r.headers.get("content-type") or not r.headers.get( - "content-type" - ).startswith("text/html"): - continue - - r.encoding = "UTF-8" - html = r.text - try: - data = dict(opengraph.OpenGraph(html=html)) - except Exception: - logger.exception(f"failed to parse {l}") - continue - - # Keep track of the fetched URL as some crappy websites use relative URLs everywhere - data["_input_url"] = l - u = urlparse(l) - - # If it's a relative URL, build the absolute version - if "image" in data and data["image"].startswith("/"): - data["image"] = u._replace( - path=data["image"], params="", query="", fragment="" - ).geturl() - - if "url" in data and data["url"].startswith("/"): - data["url"] = u._replace( - path=data["url"], params="", query="", fragment="" - ).geturl() - - if data.get("url"): - res.append(data) - - return res diff --git a/utils/template_filters.py b/utils/template_filters.py deleted file mode 100644 index f5e4364..0000000 --- a/utils/template_filters.py +++ /dev/null @@ -1,468 +0,0 @@ -import logging -import urllib -from datetime import datetime -from datetime import timezone -from functools import lru_cache -from urllib.parse import urlparse - -import bleach -import emoji_unicode -import flask -import html2text -import timeago -from bs4 import BeautifulSoup -from cachetools import LRUCache -from little_boxes import activitypub as ap -from little_boxes.activitypub import _to_list -from little_boxes.errors import ActivityGoneError -from little_boxes.errors import ActivityNotFoundError - -from config import BASE_URL -from config import EMOJI_TPL -from config import ID -from config import MEDIA_CACHE -from core.activitypub import _answer_key -from utils import parse_datetime -from utils.highlight import highlight -from utils.media import Kind -from utils.media import _is_img - -_logger = logging.getLogger(__name__) - -H2T = html2text.HTML2Text() -H2T.ignore_links = True -H2T.ignore_images = True - - -filters = flask.Blueprint("filters", __name__) - - -@filters.app_template_filter() -def get_visibility(meta): - if "object_visibility" in meta and meta["object_visibility"]: - return meta["object_visibility"] - return meta.get("visibility") - - -@filters.app_template_filter() -def visibility(v: str) -> str: - try: - return ap.Visibility[v].value.lower() - except Exception: - return v - - -@filters.app_template_filter() -def visibility_is_public(v: str) -> bool: - return v in [ap.Visibility.PUBLIC.name, ap.Visibility.UNLISTED.name] - - -@filters.app_template_filter() -def code_highlight(content): - return highlight(content) - - -@filters.app_template_filter() -def emojify(text): - return emoji_unicode.replace( - text, lambda e: EMOJI_TPL.format(filename=e.code_points, raw=e.unicode) - ) - - -# HTML/templates helper -ALLOWED_TAGS = [ - "a", - "abbr", - "acronym", - "b", - "br", - "blockquote", - "code", - "pre", - "em", - "i", - "li", - "ol", - "strong", - "sup", - "sub", - "del", - "ul", - "span", - "div", - "p", - "h1", - "h2", - "h3", - "h4", - "h5", - "h6", - "table", - "th", - "tr", - "td", - "thead", - "tbody", - "tfoot", - "colgroup", - "caption", - "img", -] - -ALLOWED_ATTRIBUTES = { - "a": ["href", "title"], - "abbr": ["title"], - "acronym": ["title"], - "img": ["src", "alt", "title"], -} - - -@filters.app_template_filter() -def replace_custom_emojis(content, note): - idx = {} - for tag in note.get("tag", []): - if tag.get("type") == "Emoji": - # try: - idx[tag["name"]] = _get_file_url(tag["icon"]["url"], 25, Kind.EMOJI) - - for emoji_name, emoji_url in idx.items(): - content = content.replace( - emoji_name, - f'{emoji_name}', - ) - - return content - - -def clean_html(html): - try: - return bleach.clean( - html, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES, strip=True - ) - except Exception: - return "failed to clean HTML" - - -@filters.app_template_filter() -def gtone(n): - return n > 1 - - -@filters.app_template_filter() -def gtnow(dtstr): - return ap.format_datetime(datetime.now(timezone.utc)) > dtstr - - -@filters.app_template_filter() -def clean(html): - out = clean_html(html) - return emoji_unicode.replace( - out, lambda e: EMOJI_TPL.format(filename=e.code_points, raw=e.unicode) - ) - - -@filters.app_template_filter() -def permalink_id(val): - return str(hash(val)) - - -@filters.app_template_filter() -def quote_plus(t): - return urllib.parse.quote_plus(t) - - -@filters.app_template_filter() -def is_from_outbox(t): - return t.startswith(ID) - - -@filters.app_template_filter() -def html2plaintext(body): - return H2T.handle(body) - - -@filters.app_template_filter() -def domain(url): - return urlparse(url).netloc - - -@filters.app_template_filter() -def format_time(val): - if val: - dt = parse_datetime(val) - return datetime.strftime(dt, "%B %d, %Y, %H:%M %p") - return val - - -@filters.app_template_filter() -def format_ts(val): - return datetime.fromtimestamp(val).strftime("%B %d, %Y, %H:%M %p") - - -@filters.app_template_filter() -def gt_ts(val): - return datetime.now() > datetime.fromtimestamp(val) - - -@filters.app_template_filter() -def format_timeago(val): - if val: - dt = parse_datetime(val) - return timeago.format(dt.astimezone(timezone.utc), datetime.now(timezone.utc)) - return val - - -@filters.app_template_filter() -def url_or_id(d): - if isinstance(d, dict): - if "url" in d and isinstance(d["url"], str): - return d["url"] - else: - return d["id"] - return "" - - -@filters.app_template_filter() -def get_url(u): - if isinstance(u, list): - for l in u: - if l.get("mimeType") == "text/html": - u = l - if isinstance(u, dict): - return u["href"] - elif isinstance(u, str): - return u - else: - return u - - -@filters.app_template_filter() -def get_actor(url): - if not url: - return None - if isinstance(url, list): - url = url[0] - if isinstance(url, dict): - url = url.get("id") - try: - return ap.get_backend().fetch_iri(url) - except (ActivityNotFoundError, ActivityGoneError): - return f"Deleted<{url}>" - except Exception as exc: - return f"Error<{url}/{exc!r}>" - - -@filters.app_template_filter() -def has_place(note): - if note.get("location") and note["location"].get("type") == "Place": - return True - return False - - -@filters.app_template_filter() -def get_place(note): - if note.get("location") and note["location"].get("type") == "Place": - tag = note["location"] - if tag.get("latitude") and tag.get("longitude"): - lat = tag["latitude"] - lng = tag["longitude"] - out = "" - if tag.get("name"): - out += f"{tag['name']} " - - out += ( - '' - f'' - f'' - f'{lat},{lng}' - "" - ) - - return out - - return "" - - return "" - - -@filters.app_template_filter() -def poll_answer_key(choice: str) -> str: - return _answer_key(choice) - - -@filters.app_template_filter() -def get_answer_count(choice, obj, meta): - count_from_meta = meta.get("question_answers", {}).get(_answer_key(choice), 0) - if count_from_meta: - return count_from_meta - for option in obj.get("oneOf", obj.get("anyOf", [])): - if option.get("name") == choice: - return option.get("replies", {}).get("totalItems", 0) - - _logger.warning(f"invalid poll data {choice} {obj} {meta}") - return 0 - - -@filters.app_template_filter() -def get_total_answers_count(obj, meta): - cached = meta.get("question_replies", 0) - if cached: - return cached - cnt = 0 - for choice in obj.get("anyOf", obj.get("oneOf", [])): - cnt += choice.get("replies", {}).get("totalItems", 0) - return cnt - - -_FILE_URL_CACHE = LRUCache(4096) - - -def _get_file_url(url, size, kind) -> str: - k = (url, size, kind) - cached = _FILE_URL_CACHE.get(k) - if cached: - return cached - - doc = MEDIA_CACHE.get_file(*k) - if doc: - out = f"/media/{str(doc._id)}" - _FILE_URL_CACHE[k] = out - return out - - _logger.error(f"cache not available for {url}/{size}/{kind}") - if url.startswith(BASE_URL): - return url - - p = urlparse(url) - return f"/p/{p.scheme}" + p._replace(scheme="").geturl()[1:] - - -@filters.app_template_filter() -def get_actor_icon_url(url, size): - return _get_file_url(url, size, Kind.ACTOR_ICON) - - -@filters.app_template_filter() -def get_attachment_url(url, size): - return _get_file_url(url, size, Kind.ATTACHMENT) - - -@filters.app_template_filter() -@lru_cache(maxsize=256) -def update_inline_imgs(content): - soup = BeautifulSoup(content, "html5lib") - imgs = soup.find_all("img") - if not imgs: - return content - for img in imgs: - if not img.attrs.get("src"): - continue - - img.attrs["src"] = _get_file_url(img.attrs["src"], 720, Kind.ATTACHMENT) - - return soup.find("body").decode_contents() - - -@filters.app_template_filter() -def get_video_url(url): - if isinstance(url, list): - for link in url: - if link.get("mediaType", "").startswith("video/"): - return _get_file_url(link.get("href"), None, Kind.ATTACHMENT) - else: - return _get_file_url(url, None, Kind.ATTACHMENT) - - -@filters.app_template_filter() -def get_og_image_url(url, size=100): - try: - return _get_file_url(url, size, Kind.OG_IMAGE) - except Exception: - return "" - - -@filters.app_template_filter() -def remove_mongo_id(dat): - if isinstance(dat, list): - return [remove_mongo_id(item) for item in dat] - if "_id" in dat: - dat["_id"] = str(dat["_id"]) - for k, v in dat.items(): - if isinstance(v, dict): - dat[k] = remove_mongo_id(dat[k]) - return dat - - -@filters.app_template_filter() -def get_video_link(data): - if isinstance(data, list): - for link in data: - if link.get("mimeType", "").startswith("video/"): - return link.get("href") - elif isinstance(data, str): - return data - return None - - -@filters.app_template_filter() -def get_text(data): - """return first in 'content', 'name' or ''""" - for _t in ("content", "name"): - if _t in data: - return data[_t] - return "" - - -@filters.app_template_filter() -def has_type(doc, _types): - for _type in _to_list(_types): - if _type in _to_list(doc["type"]): - return True - return False - - -@filters.app_template_filter() -def has_actor_type(doc): - # FIXME(tsileo): skipping the last one "Question", cause Mastodon sends question restuls as an update coming from - # the question... Does Pleroma do that too? - for t in ap.ACTOR_TYPES[:-1]: - if has_type(doc, t.value): - return True - return False - - -@lru_cache(maxsize=256) -def _get_inlined_imgs(content): - imgs = [] - if not content: - return imgs - - soup = BeautifulSoup(content, "html5lib") - for img in soup.find_all("img"): - src = img.attrs.get("src") - if src: - imgs.append(src) - - return imgs - - -@filters.app_template_filter() -def iter_note_attachments(note): - attachments = note.get("attachment", []) - imgs = _get_inlined_imgs(note.get("content")) - return [a for a in attachments if a.get("url") not in imgs] - - -@filters.app_template_filter() -def not_only_imgs(attachment): - for a in attachment: - if isinstance(a, dict) and not _is_img(a["url"]): - return True - if isinstance(a, str) and not _is_img(a): - return True - return False - - -@filters.app_template_filter() -def is_img(filename): - return _is_img(filename) diff --git a/utils/webmentions.py b/utils/webmentions.py deleted file mode 100644 index 72fae7d..0000000 --- a/utils/webmentions.py +++ /dev/null @@ -1,54 +0,0 @@ -import logging -from typing import Optional -from urllib.parse import urlparse - -import requests -from bs4 import BeautifulSoup -from little_boxes.urlutils import is_url_valid - -logger = logging.getLogger(__name__) - - -def _make_abs(url: Optional[str], parent: str) -> Optional[str]: - if url is None: - return None - - if url.startswith("http"): - return url - - return ( - urlparse(parent)._replace(path=url, params="", query="", fragment="").geturl() - ) - - -def _discover_webmention_endoint(url: str) -> Optional[str]: - try: - resp = requests.get(url, timeout=3) - except Exception: - return None - - for k, v in resp.links.items(): - if "webmention" in k: - return _make_abs(resp.links[k].get("url"), url) - - soup = BeautifulSoup(resp.text, "html5lib") - wlinks = soup.find_all(["link", "a"], attrs={"rel": "webmention"}) - for wlink in wlinks: - if "href" in wlink.attrs: - return _make_abs(wlink.attrs["href"], url) - - return None - - -def discover_webmention_endpoint(url: str) -> Optional[str]: - """Discover the Webmention endpoint of a given URL, if any. - - Passes all the tests at https://webmention.rocks! - - """ - wurl = _discover_webmention_endoint(url) - if wurl is None: - return None - if not is_url_valid(wurl): - return None - return wurl