Merge branch 'develop' of git.pleroma.social:pleroma/pleroma into pleroma-feature/akkoma-prune-old-posts

This commit is contained in:
Lain Soykaf 2024-05-28 16:51:19 +04:00
commit cc42b50c5b
173 changed files with 4361 additions and 745 deletions

View file

@ -1,8 +1,8 @@
image: git.pleroma.social:5050/pleroma/pleroma/ci-base
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24
variables: &global_variables
# Only used for the release
ELIXIR_VER: 1.12.3
ELIXIR_VER: 1.13.4
POSTGRES_DB: pleroma_test
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
@ -72,7 +72,7 @@ check-changelog:
tags:
- amd64
build-1.12.3:
build-1.13.4:
extends:
- .build_changes_policy
- .using-ci-base
@ -85,7 +85,7 @@ build-1.15.7-otp-25:
- .build_changes_policy
- .using-ci-base
stage: build
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25
allow_failure: true
script:
- mix compile --force

View file

@ -1,7 +1,7 @@
ARG ELIXIR_IMG=hexpm/elixir
ARG ELIXIR_VER=1.12.3
ARG ERLANG_VER=24.2.1
ARG ALPINE_VER=3.17.0
ARG ELIXIR_VER=1.13.4
ARG ERLANG_VER=24.3.4.15
ARG ALPINE_VER=3.17.5
FROM ${ELIXIR_IMG}:${ELIXIR_VER}-erlang-${ERLANG_VER}-alpine-${ALPINE_VER} as build

View file

@ -0,0 +1 @@
HTTP Security: By default, don't allow unsafe-eval. The setting needs to be changed to allow Flash emulation.

0
changelog.d/3907.skip Normal file
View file

View file

@ -0,0 +1 @@
Uploader: Add support for uploading attachments using IPFS

View file

@ -0,0 +1 @@
Add NSFW-detecting MRF

View file

@ -0,0 +1 @@
Add DNSRBL MRF

View file

@ -0,0 +1 @@
Add Anti-mention Spam MRF backported from Rebased

View file

View file

@ -0,0 +1 @@
HTTPSignaturePlug: Add :authorized_fetch_mode_exceptions configuration

View file

@ -0,0 +1 @@
Add an option to reject certain domains when authorized fetch is enabled.

View file

@ -0,0 +1 @@
Elixir 1.13 is the minimum required version.

View file

@ -0,0 +1 @@
Implement `/api/v1/accounts/familiar_followers`

View file

@ -0,0 +1 @@
Fix webfinger spoofing.

View file

@ -0,0 +1 @@
Add instance rules

View file

@ -0,0 +1 @@
Logger metadata is now attached to some logs to help with troubleshooting and analysis

View file

@ -0,0 +1 @@
The query for marking notifications as read has been simplified

View file

@ -0,0 +1 @@
Add new parameters to /api/v2/instance: configuration[accounts][max_pinned_statuses] and configuration[statuses][characters_reserved_per_url]

View file

@ -0,0 +1 @@
Ensure MediaProxy HTTP requests obey all the defined connection settings

View file

@ -0,0 +1 @@
Startup detection for configured MRF modules that are missing or incorrectly defined

View file

@ -0,0 +1 @@
Oban queues have refactored to simplify the queue design

1
changelog.d/pools.change Normal file
View file

@ -0,0 +1 @@
HTTP connection pool adjustments

View file

@ -0,0 +1 @@
Update the documentation for configuring Prometheus metrics.

View file

@ -0,0 +1 @@
PromEx documentation

View file

@ -0,0 +1 @@
Add Qdrant/OpenAI embedding search

View file

@ -0,0 +1 @@
pleroma_ctl: Use realpath(1) instead of readlink(1)

View file

@ -0,0 +1 @@
A 422 error is returned when attempting to reply to a deleted status

View file

@ -0,0 +1 @@
Parsing of RichMedia TTLs for Amazon URLs when query parameters are nil

View file

@ -0,0 +1 @@
Monitoring of search backend health to control the processing of jobs in the search indexing Oban queue

View file

@ -0,0 +1 @@
Display reposted replies with exclude_replies: true

View file

@ -0,0 +1 @@
Add "status" notification type

View file

@ -0,0 +1 @@
Support honk-style attachment summaries as alt-text.

View file

@ -0,0 +1 @@
Web Push notifications are no longer generated for muted/blocked threads and users.

View file

@ -0,0 +1 @@
Fix validate_webfinger when running a different domain for Webfinger

View file

@ -0,0 +1,8 @@
FROM elixir:1.13.4-otp-24
# Single RUN statement, otherwise intermediate images are created
# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run
RUN apt-get update &&\
apt-get install -y libmagic-dev cmake libimage-exiftool-perl ffmpeg &&\
mix local.hex --force &&\
mix local.rebar --force

View file

@ -0,0 +1 @@
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24 --push .

View file

@ -1 +1 @@
docker buildx build --platform linux/amd64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 --push .
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 --push .

View file

@ -82,6 +82,10 @@
# region: "us-east-1", # may be required for Amazon AWS
scheme: "https://"
config :pleroma, Pleroma.Uploaders.IPFS,
post_gateway_url: nil,
get_gateway_url: nil
config :pleroma, :emoji,
shortcode_globs: ["/emoji/custom/**/*.png"],
pack_extensions: [".png", ".gif"],
@ -131,13 +135,13 @@
config :logger, :console,
level: :debug,
format: "\n$time $metadata[$level] $message\n",
metadata: [:request_id]
metadata: [:actor, :path, :type, :user]
config :logger, :ex_syslogger,
level: :debug,
ident: "pleroma",
format: "$metadata[$level] $message",
metadata: [:request_id]
metadata: [:actor, :path, :type, :user]
config :mime, :types, %{
"application/xml" => ["xml"],
@ -188,6 +192,7 @@
allow_relay: true,
public: true,
quarantined_instances: [],
rejected_instances: [],
static_dir: "instance/static/",
allowed_post_formats: [
"text/plain",
@ -406,11 +411,23 @@
accept: [],
reject: []
config :pleroma, :mrf_dnsrbl,
nameserver: "127.0.0.1",
port: 53,
zone: "bl.pleroma.com"
# threshold of 7 days
config :pleroma, :mrf_object_age,
threshold: 604_800,
actions: [:delist, :strip_followers]
config :pleroma, :mrf_nsfw_api,
url: "http://127.0.0.1:5000/",
threshold: 0.7,
mark_sensitive: true,
unlist: false,
reject: false
config :pleroma, :mrf_follow_bot, follower_nickname: nil
config :pleroma, :mrf_inline_quote, template: "<bdi>RT:</bdi> {url}"
@ -419,6 +436,8 @@
mention_parent: true,
mention_quoted: true
config :pleroma, :mrf_antimentionspam, user_age_limit: 30_000
config :pleroma, :rich_media,
enabled: true,
ignore_hosts: [],
@ -501,7 +520,8 @@
sts: false,
sts_max_age: 31_536_000,
ct_max_age: 2_592_000,
referrer_policy: "same-origin"
referrer_policy: "same-origin",
allow_unsafe_eval: false
config :cors_plug,
max_age: 86_400,
@ -563,24 +583,14 @@
log: false,
queues: [
activity_expiration: 10,
token_expiration: 5,
filter_expiration: 1,
backup: 1,
federator_incoming: 5,
federator_outgoing: 5,
ingestion_queue: 50,
web_push: 50,
mailer: 10,
transmogrifier: 20,
scheduled_activities: 10,
poll_notifications: 10,
background: 5,
remote_fetcher: 2,
attachments_cleanup: 1,
new_users_digest: 1,
mute_expire: 5,
search_indexing: 10,
rich_media_expiration: 2
search_indexing: [limit: 10, paused: true],
slow: 1
],
plugins: [Oban.Plugins.Pruner],
crontab: [
@ -818,22 +828,27 @@
config :pleroma, :pools,
federation: [
size: 50,
max_waiting: 10,
size: 75,
max_waiting: 20,
recv_timeout: 10_000
],
media: [
size: 50,
size: 75,
max_waiting: 20,
recv_timeout: 15_000
],
rich_media: [
size: 25,
max_waiting: 20,
recv_timeout: 15_000
],
upload: [
size: 25,
max_waiting: 5,
max_waiting: 20,
recv_timeout: 15_000
],
default: [
size: 10,
size: 50,
max_waiting: 2,
recv_timeout: 5_000
]
@ -847,6 +862,10 @@
max_connections: 50,
timeout: 150_000
],
rich_media: [
max_connections: 50,
timeout: 150_000
],
upload: [
max_connections: 25,
timeout: 300_000
@ -892,8 +911,6 @@
process_chunk_size: 100
config :pleroma, ConcurrentLimiter, [
{Pleroma.Web.RichMedia.Helpers, [max_running: 5, max_waiting: 5]},
{Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy, [max_running: 5, max_waiting: 5]},
{Pleroma.Search, [max_running: 30, max_waiting: 50]}
]
@ -915,6 +932,19 @@
config :pleroma, Pleroma.Uploaders.Uploader, timeout: 30_000
config :pleroma, Pleroma.Search.QdrantSearch,
qdrant_url: "http://127.0.0.1:6333/",
qdrant_api_key: "",
openai_url: "http://127.0.0.1:11345",
# The healthcheck url has to be set to nil when used with the real openai
# API, as it doesn't have a healthcheck endpoint.
openai_healthcheck_url: "http://127.0.0.1:11345/health",
openai_model: "snowflake/snowflake-arctic-embed-xs",
openai_api_key: "",
qdrant_index_configuration: %{
vectors: %{size: 384, distance: "Cosine"}
}
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"

View file

@ -136,6 +136,31 @@
}
]
},
%{
group: :pleroma,
key: Pleroma.Uploaders.IPFS,
type: :group,
description: "IPFS uploader-related settings",
children: [
%{
key: :get_gateway_url,
type: :string,
description: "GET Gateway URL",
suggestions: [
"https://ipfs.mydomain.com/{CID}",
"https://{CID}.ipfs.mydomain.com/"
]
},
%{
key: :post_gateway_url,
type: :string,
description: "POST Gateway URL",
suggestions: [
"http://localhost:5001/"
]
}
]
},
%{
group: :pleroma,
key: Pleroma.Uploaders.S3,
@ -749,6 +774,18 @@
{"*.quarantined.com", "Reason"}
]
},
%{
key: :rejected_instances,
type: {:list, :tuple},
key_placeholder: "instance",
value_placeholder: "reason",
description:
"List of ActivityPub instances to reject requests from if authorized_fetch_mode is enabled",
suggestions: [
{"rejected.com", "Reason"},
{"*.rejected.com", "Reason"}
]
},
%{
key: :static_dir,
type: :string,
@ -1791,6 +1828,12 @@
type: :boolean,
description: "Require HTTP signatures for AP fetches"
},
%{
key: :authorized_fetch_mode_exceptions,
type: {:list, :string},
description:
"List of IPs (CIDR format accepted) to exempt from HTTP Signatures requirement (for example to allow debugging, you shouldn't otherwise need this)"
},
%{
key: :note_replies_output_limit,
type: :integer,

View file

@ -35,8 +35,8 @@
# configured to run both http and https servers on
# different ports.
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Do not include timestamps in development logs
config :logger, :console, format: "$metadata[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.

View file

@ -153,6 +153,12 @@
config :pleroma, Pleroma.Upload, config_impl: Pleroma.UnstubbedConfigMock
config :pleroma, Pleroma.ScheduledActivity, config_impl: Pleroma.UnstubbedConfigMock
config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock
config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMock
config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, config_impl: Pleroma.StaticStubbedConfigMock
config :pleroma, Pleroma.Web.Plugs.HTTPSignaturePlug, config_impl: Pleroma.StaticStubbedConfigMock
config :pleroma, Pleroma.Web.Plugs.HTTPSignaturePlug,
http_signatures_impl: Pleroma.StubbedHTTPSignaturesMock
peer_module =
if String.to_integer(System.otp_release()) >= 25 do

View file

@ -41,6 +41,7 @@ To add configuration to your config file, you can copy it from the base config.
* `allow_relay`: Permits remote instances to subscribe to all public posts of your instance. This may increase the visibility of your instance.
* `public`: Makes the client API in authenticated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network. Note that there is a dependent setting restricting or allowing unauthenticated access to specific resources, see `restrict_unauthenticated` for more details.
* `quarantined_instances`: ActivityPub instances where private (DMs, followers-only) activities will not be send.
* `rejected_instances`: ActivityPub instances to reject requests from if authorized_fetch_mode is enabled.
* `allowed_post_formats`: MIME-type list of formats allowed to be posted (transformed into HTML).
* `extended_nickname_format`: Set to `true` to use extended local nicknames format (allows underscores/dashes). This will break federation with
older software for theses nicknames.
@ -284,6 +285,7 @@ Notes:
* `deny_follow_blocked`: Whether to disallow following an account that has blocked the user in question
* `sign_object_fetches`: Sign object fetches with HTTP signatures
* `authorized_fetch_mode`: Require HTTP signatures for AP fetches
* `authorized_fetch_mode_exceptions`: List of IPs (CIDR format accepted) to exempt from HTTP Signatures requirement (for example to allow debugging, you shouldn't otherwise need this)
## Pleroma.User
@ -472,6 +474,7 @@ This will make Pleroma listen on `127.0.0.1` port `8080` and generate urls start
* ``ct_max_age``: The maximum age for the `Expect-CT` header if sent.
* ``referrer_policy``: The referrer policy to use, either `"same-origin"` or `"no-referrer"`.
* ``report_uri``: Adds the specified url to `report-uri` and `report-to` group in CSP header.
* `allow_unsafe_eval`: Adds `wasm-unsafe-eval` to the CSP header. Needed for some non-essential frontend features like Flash emulation.
### Pleroma.Web.Plugs.RemoteIp
@ -661,6 +664,19 @@ config :ex_aws, :s3,
host: "s3.eu-central-1.amazonaws.com"
```
#### Pleroma.Uploaders.IPFS
* `post_gateway_url`: URL with port of POST Gateway (unauthenticated)
* `get_gateway_url`: URL of public GET Gateway
Example:
```elixir
config :pleroma, Pleroma.Uploaders.IPFS,
post_gateway_url: "http://localhost:5001",
get_gateway_url: "http://{CID}.ipfs.mydomain.com"
```
### Upload filters
#### Pleroma.Upload.Filter.AnonymizeFilename

View file

@ -10,6 +10,30 @@ To use built-in search that has no external dependencies, set the search module
While it has no external dependencies, it has problems with performance and relevancy.
## QdrantSearch
This uses the vector search engine [Qdrant](https://qdrant.tech) to search the posts in a vector space. This needs a way to generate embeddings and uses the [OpenAI API](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings). This is implemented by several project besides OpenAI itself, including the python-based fastembed-server found in `supplemental/search/fastembed-api`.
The default settings will support a setup where both the fastembed server and Qdrant run on the same system as pleroma. To use it, set the search provider and run the fastembed server, see the README in `supplemental/search/fastembed-api`:
> config :pleroma, Pleroma.Search, module: Pleroma.Search.QdrantSearch
Then, start the Qdrant server, see [here](https://qdrant.tech/documentation/quick-start/) for instructions.
You will also need to create the Qdrant index once by running `mix pleroma.search.indexer create_index`. Running `mix pleroma.search.indexer index` will retroactively index the last 100_000 activities.
### Indexing and model options
To see the available configuration options, check out the QdrantSearch section in `config/config.exs`.
The default indexing option work for the default model (`snowflake-arctic-embed-xs`). To optimize for a low memory footprint, adjust the index configuration as described in the [Qdrant docs](https://qdrant.tech/documentation/guides/optimize/). See also [this blog post](https://qdrant.tech/articles/memory-consumption/) that goes into detail.
Different embedding models will need different vector size settings. You can see a list of the models supported by the fastembed server [here](https://qdrant.github.io/fastembed/examples/Supported_Models), including their vector dimensions. These vector dimensions need to be set in the `qdrant_index_configuration`.
E.g, If you want to use `sentence-transformers/all-MiniLM-L6-v2` as a model, you will not need to adjust things, because it and `snowflake-arctic-embed-xs` are both 384 dimensional models. If you want to use `snowflake/snowflake-arctic-embed-l`, you will need to adjust the `size` parameter in the `qdrant_index_configuration` to 1024, as it has a dimension of 1024.
When using a different model, you will need do drop the index and recreate it (`mix pleroma.search.indexer drop_index` and `mix pleroma.search.indexer create_index`), as the different embeddings are not compatible with each other.
## Meilisearch
Note that it's quite a bit more memory hungry than PostgreSQL (around 4-5G for ~1.2 million

View file

@ -1751,3 +1751,53 @@ Note that this differs from the Mastodon API variant: Mastodon API only returns
```json
{}
```
## `GET /api/v1/pleroma/admin/rules`
### List rules
- Response: JSON, list of rules
```json
[
{
"id": "1",
"priority": 1,
"text": "There are no rules",
"hint": null
}
]
```
## `POST /api/v1/pleroma/admin/rules`
### Create a rule
- Params:
- `text`: string, required, rule content
- `hint`: string, optional, rule description
- `priority`: integer, optional, rule ordering priority
- Response: JSON, a single rule
## `PATCH /api/v1/pleroma/admin/rules/:id`
### Update a rule
- Params:
- `text`: string, optional, rule content
- `hint`: string, optional, rule description
- `priority`: integer, optional, rule ordering priority
- Response: JSON, a single rule
## `DELETE /api/v1/pleroma/admin/rules/:id`
### Delete a rule
- Response: JSON, empty object
```json
{}
```

View file

@ -295,9 +295,7 @@ See [Admin-API](admin_api.md)
"id": "9umDrYheeY451cQnEe",
"name": "Read later",
"emoji": "🕓",
"source": {
"emoji": "🕓"
}
"emoji_url": null
}
]
```

View file

@ -1,44 +1,47 @@
# Prometheus Metrics
# Prometheus / OpenTelemetry Metrics
Pleroma includes support for exporting metrics via the [prometheus_ex](https://github.com/deadtrickster/prometheus.ex) library.
Pleroma includes support for exporting metrics via the [prom_ex](https://github.com/akoutmos/prom_ex) library.
The metrics are exposed by a dedicated webserver/port to improve privacy and security.
Config example:
```
config :prometheus, Pleroma.Web.Endpoint.MetricsExporter,
enabled: true,
auth: {:basic, "myusername", "mypassword"},
ip_whitelist: ["127.0.0.1"],
path: "/api/pleroma/app_metrics",
format: :text
```
* `enabled` (Pleroma extension) enables the endpoint
* `ip_whitelist` (Pleroma extension) could be used to restrict access only to specified IPs
* `auth` sets the authentication (`false` for no auth; configurable to HTTP Basic Auth, see [prometheus-plugs](https://github.com/deadtrickster/prometheus-plugs#exporting) documentation)
* `format` sets the output format (`:text` or `:protobuf`)
* `path` sets the path to app metrics page
## `/api/pleroma/app_metrics`
### Exports Prometheus application metrics
* Method: `GET`
* Authentication: not required by default (see configuration options above)
* Params: none
* Response: text
## Grafana
### Config example
The following is a config example to use with [Grafana](https://grafana.com)
config :pleroma, Pleroma.PromEx,
disabled: false,
manual_metrics_start_delay: :no_delay,
drop_metrics_groups: [],
grafana: [
host: System.get_env("GRAFANA_HOST", "http://localhost:3000"),
auth_token: System.get_env("GRAFANA_TOKEN"),
upload_dashboards_on_start: false,
folder_name: "BEAM",
annotate_app_lifecycle: true
],
metrics_server: [
port: 4021,
path: "/metrics",
protocol: :http,
pool_size: 5,
cowboy_opts: [],
auth_strategy: :none
],
datasource: "Prometheus"
```
- job_name: 'beam'
metrics_path: /api/pleroma/app_metrics
scheme: https
PromEx supports the ability to automatically publish dashboards to your Grafana server as well as register Annotations. If you do not wish to configure this capability you must generate the dashboard JSON files and import them directly. You can find the mix commands in the upstream [documentation](https://hexdocs.pm/prom_ex/Mix.Tasks.PromEx.Dashboard.Export.html). You can find the list of modules enabled in Pleroma for which you should generate dashboards for by examining the contents of the `lib/pleroma/prom_ex.ex` module.
## prometheus.yml
The following is a bare minimum config example to use with [Prometheus](https://prometheus.io) or Prometheus-compatible software like [VictoriaMetrics](https://victoriametrics.com).
```
global:
scrape_interval: 15s
scrape_configs:
- job_name: 'pleroma'
scheme: http
static_configs:
- targets: ['pleroma.soykaf.com']
- targets: ['pleroma.soykaf.com:4021']
```

View file

@ -14,7 +14,7 @@ Note: This article is potentially outdated because at this time we may not have
- PostgreSQL 11.0以上 (Ubuntu16.04では9.5しか提供されていないので,[](https://www.postgresql.org/download/linux/ubuntu/)こちらから新しいバージョンを入手してください)
- `postgresql-contrib` 11.0以上 (同上)
- Elixir 1.8 以上 ([Debianのリポジトリからインストールしないこと ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください)
- Elixir 1.13 以上 ([Debianのリポジトリからインストールしないこと ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください)
- `erlang-dev`
- `erlang-nox`
- `git`

View file

@ -1,7 +1,7 @@
## Required dependencies
* PostgreSQL >=11.0
* Elixir >=1.11.0 <1.15
* Elixir >=1.13.0 <1.15
* Erlang OTP >=22.2.0 (supported: <27)
* git
* file / libmagic

View file

@ -0,0 +1,15 @@
[Unit]
Description=NSFW API
After=docker.service
Requires=docker.service
[Service]
TimeoutStartSec=0
Restart=always
ExecStartPre=-/usr/bin/docker stop %n
ExecStartPre=-/usr/bin/docker rm %n
ExecStartPre=/usr/bin/docker pull eugencepoi/nsfw_api:latest
ExecStart=/usr/bin/docker run --rm -p 127.0.0.1:5000:5000/tcp --env PORT=5000 --name %n eugencepoi/nsfw_api:latest
[Install]
WantedBy=multi-user.target

View file

@ -0,0 +1,80 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Search.Indexer do
import Mix.Pleroma
import Ecto.Query
alias Pleroma.Workers.SearchIndexingWorker
def run(["create_index"]) do
start_pleroma()
with :ok <- Pleroma.Config.get([Pleroma.Search, :module]).create_index() do
IO.puts("Index created")
else
e -> IO.puts("Could not create index: #{inspect(e)}")
end
end
def run(["drop_index"]) do
start_pleroma()
with :ok <- Pleroma.Config.get([Pleroma.Search, :module]).drop_index() do
IO.puts("Index dropped")
else
e -> IO.puts("Could not drop index: #{inspect(e)}")
end
end
def run(["index" | options]) do
{options, [], []} =
OptionParser.parse(
options,
strict: [
limit: :integer
]
)
start_pleroma()
limit = Keyword.get(options, :limit, 100_000)
per_step = 1000
chunks = max(div(limit, per_step), 1)
1..chunks
|> Enum.each(fn step ->
q =
from(a in Pleroma.Activity,
limit: ^per_step,
offset: ^per_step * (^step - 1),
select: [:id],
order_by: [desc: :id]
)
{:ok, ids} =
Pleroma.Repo.transaction(fn ->
Pleroma.Repo.stream(q, timeout: :infinity)
|> Enum.map(fn a ->
a.id
end)
end)
IO.puts("Got #{length(ids)} activities, adding to indexer")
ids
|> Enum.chunk_every(100)
|> Enum.each(fn chunk ->
IO.puts("Adding #{length(chunk)} activities to indexing queue")
chunk
|> Enum.map(fn id ->
SearchIndexingWorker.new(%{"op" => "add_to_index", "activity" => id})
end)
|> Oban.insert_all()
end)
end)
end
end

View file

@ -14,6 +14,7 @@ defmodule Pleroma.Application do
@name Mix.Project.config()[:name]
@version Mix.Project.config()[:version]
@repository Mix.Project.config()[:source_url]
@compile_env Mix.env()
def name, do: @name
def version, do: @version
@ -51,7 +52,11 @@ def start(_type, _args) do
Pleroma.HTML.compile_scrubbers()
Pleroma.Config.Oban.warn()
Config.DeprecationWarnings.warn()
if @compile_env != :test do
Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
end
Pleroma.ApplicationRequirements.verify!()
load_custom_modules()
Pleroma.Docs.JSON.compile()
@ -109,7 +114,8 @@ def start(_type, _args) do
streamer_registry() ++
background_migrators() ++
shout_child(shout_enabled?()) ++
[Pleroma.Gopher.Server]
[Pleroma.Gopher.Server] ++
[Pleroma.Search.Healthcheck]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
@ -162,7 +168,8 @@ defp cachex_children do
expiration: chat_message_id_idempotency_key_expiration(),
limit: 500_000
),
build_cachex("rel_me", limit: 2500)
build_cachex("rel_me", limit: 2500),
build_cachex("host_meta", default_ttl: :timer.minutes(120), limit: 5000)
]
end

View file

@ -28,6 +28,7 @@ def verify! do
|> check_welcome_message_config!()
|> check_rum!()
|> check_repo_pool_size!()
|> check_mrfs()
|> handle_result()
end
@ -234,4 +235,25 @@ defp check_filter(filter, command_required) do
true
end
end
defp check_mrfs(:ok) do
mrfs = Config.get!([:mrf, :policies])
missing_mrfs =
Enum.reduce(mrfs, [], fn x, acc ->
if Code.ensure_compiled(x) do
acc
else
acc ++ [x]
end
end)
if Enum.empty?(missing_mrfs) do
:ok
else
{:error, "The following MRF modules are configured but missing: #{inspect(missing_mrfs)}"}
end
end
defp check_mrfs(result), do: result
end

View file

@ -19,7 +19,8 @@ defmodule Pleroma.Constants do
"context_id",
"deleted_activity_id",
"pleroma_internal",
"generator"
"generator",
"rules"
]
)

View file

@ -16,4 +16,15 @@ def parse_address(ip) when is_binary(ip) do
def parse_address(ip) do
:inet.parse_address(ip)
end
def parse_cidr(proxy) when is_binary(proxy) do
proxy =
cond do
"/" in String.codepoints(proxy) -> proxy
InetCidr.v4?(InetCidr.parse_address!(proxy)) -> proxy <> "/32"
InetCidr.v6?(InetCidr.parse_address!(proxy)) -> proxy <> "/128"
end
InetCidr.parse_cidr!(proxy, true)
end
end

View file

@ -0,0 +1,4 @@
defmodule Pleroma.HTTPSignaturesAPI do
@callback validate_conn(conn :: Plug.Conn.t()) :: boolean
@callback signature_for_conn(conn :: Plug.Conn.t()) :: map
end

View file

@ -73,6 +73,7 @@ def unread_notifications_count(%User{id: user_id}) do
pleroma:report
reblog
poll
status
}
def changeset(%Notification{} = notification, attrs) do
@ -280,15 +281,10 @@ def set_read_up_to(%{id: user_id} = user, id) do
select: n.id
)
{:ok, %{ids: {_, notification_ids}}} =
Multi.new()
|> Multi.update_all(:ids, query, set: [seen: true, updated_at: NaiveDateTime.utc_now()])
|> Marker.multi_set_last_read_id(user, "notifications")
|> Repo.transaction()
for_user_query(user)
|> where([n], n.id in ^notification_ids)
|> Repo.all()
end
@spec read_one(User.t(), String.t()) ::
@ -299,10 +295,6 @@ def read_one(%User{} = user, notification_id) do
|> Multi.update(:update, changeset(notification, %{seen: true}))
|> Marker.multi_set_last_read_id(user, "notifications")
|> Repo.transaction()
|> case do
{:ok, %{update: notification}} -> {:ok, notification}
{:error, :update, changeset, _} -> {:error, changeset}
end
end
end
@ -361,37 +353,38 @@ def dismiss(%{id: user_id} = _user, id) do
end
end
@spec create_notifications(Activity.t(), keyword()) :: {:ok, [Notification.t()] | []}
def create_notifications(activity, options \\ [])
@spec create_notifications(Activity.t()) :: {:ok, [Notification.t()] | []}
def create_notifications(activity)
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity, options) do
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity) do
object = Object.normalize(activity, fetch: false)
if object && object.data["type"] == "Answer" do
{:ok, []}
else
do_create_notifications(activity, options)
do_create_notifications(activity)
end
end
def create_notifications(%Activity{data: %{"type" => type}} = activity, options)
def create_notifications(%Activity{data: %{"type" => type}} = activity)
when type in ["Follow", "Like", "Announce", "Move", "EmojiReact", "Flag", "Update"] do
do_create_notifications(activity, options)
do_create_notifications(activity)
end
def create_notifications(_, _), do: {:ok, []}
def create_notifications(_), do: {:ok, []}
defp do_create_notifications(%Activity{} = activity, options) do
do_send = Keyword.get(options, :do_send, true)
defp do_create_notifications(%Activity{} = activity) do
enabled_receivers = get_notified_from_activity(activity)
{enabled_receivers, disabled_receivers} = get_notified_from_activity(activity)
potential_receivers = enabled_receivers ++ disabled_receivers
enabled_subscribers = get_notified_subscribers_from_activity(activity)
notifications =
Enum.map(potential_receivers, fn user ->
do_send = do_send && user in enabled_receivers
create_notification(activity, user, do_send: do_send)
end)
(Enum.map(enabled_receivers, fn user ->
create_notification(activity, user)
end) ++
Enum.map(enabled_subscribers -- enabled_receivers, fn user ->
create_notification(activity, user, type: "status")
end))
|> Enum.reject(&is_nil/1)
{:ok, notifications}
@ -450,7 +443,6 @@ defp type_from_activity_object(%{data: %{"type" => "Create"}} = activity) do
# TODO move to sql, too.
def create_notification(%Activity{} = activity, %User{} = user, opts \\ []) do
do_send = Keyword.get(opts, :do_send, true)
type = Keyword.get(opts, :type, type_from_activity(activity))
unless skip?(activity, user, opts) do
@ -465,11 +457,6 @@ def create_notification(%Activity{} = activity, %User{} = user, opts \\ []) do
|> Marker.multi_set_last_read_id(user, "notifications")
|> Repo.transaction()
if do_send do
Streamer.stream(["user", "user:notification"], notification)
Push.send(notification)
end
notification
end
end
@ -527,13 +514,28 @@ def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, lo
|> exclude_relationship_restricted_ap_ids(activity)
|> exclude_thread_muter_ap_ids(activity)
notification_enabled_users =
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
{notification_enabled_users, potential_receivers -- notification_enabled_users}
end
def get_notified_from_activity(_, _local_only), do: {[], []}
def get_notified_from_activity(_, _local_only), do: []
def get_notified_subscribers_from_activity(activity, local_only \\ true)
def get_notified_subscribers_from_activity(
%Activity{data: %{"type" => "Create"}} = activity,
local_only
) do
notification_enabled_ap_ids =
[]
|> Utils.maybe_notify_subscribers(activity)
potential_receivers =
User.get_users_from_set(notification_enabled_ap_ids, local_only: local_only)
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
end
def get_notified_subscribers_from_activity(_, _), do: []
# For some activities, only notify the author of the object
def get_potential_receiver_ap_ids(%{data: %{"type" => type, "object" => object_id}})
@ -576,7 +578,6 @@ def get_potential_receiver_ap_ids(activity) do
[]
|> Utils.maybe_notify_to_recipients(activity)
|> Utils.maybe_notify_mentioned_recipients(activity)
|> Utils.maybe_notify_subscribers(activity)
|> Utils.maybe_notify_followers(activity)
|> Enum.uniq()
end
@ -643,6 +644,7 @@ def skip?(activity, user, opts \\ [])
def skip?(%Activity{} = activity, %User{} = user, opts) do
[
:self,
:internal,
:invisible,
:block_from_strangers,
:recently_followed,
@ -662,6 +664,12 @@ def skip?(:self, %Activity{} = activity, %User{} = user, opts) do
end
end
def skip?(:internal, %Activity{} = activity, _user, _opts) do
actor = activity.data["actor"]
user = User.get_cached_by_ap_id(actor)
User.internal?(user)
end
def skip?(:invisible, %Activity{} = activity, _user, _opts) do
actor = activity.data["actor"]
user = User.get_cached_by_ap_id(actor)
@ -748,4 +756,12 @@ def mark_context_as_read(%User{id: id}, context) do
)
|> Repo.update_all(set: [seen: true])
end
@spec send(list(Notification.t())) :: :ok
def send(notifications) do
Enum.each(notifications, fn notification ->
Streamer.stream(["user", "user:notification"], notification)
Push.send(notification)
end)
end
end

68
lib/pleroma/rule.ex Normal file
View file

@ -0,0 +1,68 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Rule do
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query
alias Pleroma.Repo
alias Pleroma.Rule
schema "rules" do
field(:priority, :integer, default: 0)
field(:text, :string)
field(:hint, :string)
timestamps()
end
def changeset(%Rule{} = rule, params \\ %{}) do
rule
|> cast(params, [:priority, :text, :hint])
|> validate_required([:text])
end
def query do
Rule
|> order_by(asc: :priority)
|> order_by(asc: :id)
end
def get(ids) when is_list(ids) do
from(r in __MODULE__, where: r.id in ^ids)
|> Repo.all()
end
def get(id), do: Repo.get(__MODULE__, id)
def exists?(id) do
from(r in __MODULE__, where: r.id == ^id)
|> Repo.exists?()
end
def create(params) do
{:ok, rule} =
%Rule{}
|> changeset(params)
|> Repo.insert()
rule
end
def update(params, id) do
{:ok, rule} =
get(id)
|> changeset(params)
|> Repo.update()
rule
end
def delete(id) do
get(id)
|> Repo.delete()
end
end

View file

@ -204,7 +204,7 @@ def due_activities(offset \\ 0) do
def job_query(scheduled_activity_id) do
from(j in Oban.Job,
where: j.queue == "scheduled_activities",
where: j.queue == "federator_outgoing",
where: fragment("args ->> 'activity_id' = ?::text", ^to_string(scheduled_activity_id))
)
end

View file

@ -10,8 +10,12 @@ def remove_from_index(%Pleroma.Object{id: object_id}) do
end
def search(query, options) do
search_module = Pleroma.Config.get([Pleroma.Search, :module], Pleroma.Activity)
search_module = Pleroma.Config.get([Pleroma.Search, :module])
search_module.search(options[:for_user], query, options)
end
def healthcheck_endpoints do
search_module = Pleroma.Config.get([Pleroma.Search, :module])
search_module.healthcheck_endpoints
end
end

View file

@ -48,6 +48,15 @@ def add_to_index(_activity), do: :ok
@impl true
def remove_from_index(_object), do: :ok
@impl true
def create_index, do: :ok
@impl true
def drop_index, do: :ok
@impl true
def healthcheck_endpoints, do: nil
def maybe_restrict_author(query, %User{} = author) do
Activity.Queries.by_author(query, author)
end

View file

@ -0,0 +1,86 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Search.Healthcheck do
@doc """
Monitors health of search backend to control processing of events based on health and availability.
"""
use GenServer
require Logger
@queue :search_indexing
@tick :timer.seconds(5)
@timeout :timer.seconds(2)
def start_link(_) do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@impl true
def init(_) do
state = %{healthy: false}
{:ok, state, {:continue, :start}}
end
@impl true
def handle_continue(:start, state) do
tick()
{:noreply, state}
end
@impl true
def handle_info(:check, state) do
urls = Pleroma.Search.healthcheck_endpoints()
new_state =
if check(urls) do
Oban.resume_queue(queue: @queue)
Map.put(state, :healthy, true)
else
Oban.pause_queue(queue: @queue)
Map.put(state, :healthy, false)
end
maybe_log_state_change(state, new_state)
tick()
{:noreply, new_state}
end
@impl true
def handle_call(:state, _from, state) do
{:reply, state, state, :hibernate}
end
def state, do: GenServer.call(__MODULE__, :state)
def check([]), do: true
def check(urls) when is_list(urls) do
Enum.all?(
urls,
fn url ->
case Pleroma.HTTP.get(url, [], recv_timeout: @timeout) do
{:ok, %{status: 200}} -> true
_ -> false
end
end
)
end
def check(_), do: true
defp tick do
Process.send_after(self(), :check, @tick)
end
defp maybe_log_state_change(%{healthy: true}, %{healthy: false}) do
Logger.error("Pausing Oban queue #{@queue} due to search backend healthcheck failure")
end
defp maybe_log_state_change(%{healthy: false}, %{healthy: true}) do
Logger.info("Resuming Oban queue #{@queue} due to search backend healthcheck pass")
end
defp maybe_log_state_change(_, _), do: :ok
end

View file

@ -10,6 +10,12 @@ defmodule Pleroma.Search.Meilisearch do
@behaviour Pleroma.Search.SearchBackend
@impl true
def create_index, do: :ok
@impl true
def drop_index, do: :ok
defp meili_headers do
private_key = Config.get([Pleroma.Search.Meilisearch, :private_key])
@ -178,4 +184,15 @@ def add_to_index(activity) do
def remove_from_index(object) do
meili_delete("/indexes/objects/documents/#{object.id}")
end
@impl true
def healthcheck_endpoints do
endpoint =
Config.get([Pleroma.Search.Meilisearch, :url])
|> URI.parse()
|> Map.put(:path, "/health")
|> URI.to_string()
[endpoint]
end
end

View file

@ -0,0 +1,182 @@
defmodule Pleroma.Search.QdrantSearch do
@behaviour Pleroma.Search.SearchBackend
import Ecto.Query
alias Pleroma.Activity
alias Pleroma.Config.Getting, as: Config
alias __MODULE__.OpenAIClient
alias __MODULE__.QdrantClient
import Pleroma.Search.Meilisearch, only: [object_to_search_data: 1]
import Pleroma.Search.DatabaseSearch, only: [maybe_fetch: 3]
@impl true
def create_index do
payload = Config.get([Pleroma.Search.QdrantSearch, :qdrant_index_configuration])
with {:ok, %{status: 200}} <- QdrantClient.put("/collections/posts", payload) do
:ok
else
e -> {:error, e}
end
end
@impl true
def drop_index do
with {:ok, %{status: 200}} <- QdrantClient.delete("/collections/posts") do
:ok
else
e -> {:error, e}
end
end
def get_embedding(text) do
with {:ok, %{body: %{"data" => [%{"embedding" => embedding}]}}} <-
OpenAIClient.post("/v1/embeddings", %{
input: text,
model: Config.get([Pleroma.Search.QdrantSearch, :openai_model])
}) do
{:ok, embedding}
else
_ ->
{:error, "Failed to get embedding"}
end
end
defp actor_from_activity(%{data: %{"actor" => actor}}) do
actor
end
defp actor_from_activity(_), do: nil
defp build_index_payload(activity, embedding) do
actor = actor_from_activity(activity)
published_at = activity.data["published"]
%{
points: [
%{
id: activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!(),
vector: embedding,
payload: %{actor: actor, published_at: published_at}
}
]
}
end
defp build_search_payload(embedding, options) do
base = %{
vector: embedding,
limit: options[:limit] || 20,
offset: options[:offset] || 0
}
if author = options[:author] do
Map.put(base, :filter, %{
must: [%{key: "actor", match: %{value: author.ap_id}}]
})
else
base
end
end
@impl true
def add_to_index(activity) do
# This will only index public or unlisted notes
maybe_search_data = object_to_search_data(activity.object)
if activity.data["type"] == "Create" and maybe_search_data do
with {:ok, embedding} <- get_embedding(maybe_search_data.content),
{:ok, %{status: 200}} <-
QdrantClient.put(
"/collections/posts/points",
build_index_payload(activity, embedding)
) do
:ok
else
e -> {:error, e}
end
else
:ok
end
end
@impl true
def remove_from_index(object) do
activity = Activity.get_by_object_ap_id_with_object(object.data["id"])
id = activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!()
with {:ok, %{status: 200}} <-
QdrantClient.post("/collections/posts/points/delete", %{"points" => [id]}) do
:ok
else
e -> {:error, e}
end
end
@impl true
def search(user, original_query, options) do
query = "Represent this sentence for searching relevant passages: #{original_query}"
with {:ok, embedding} <- get_embedding(query),
{:ok, %{body: %{"result" => result}}} <-
QdrantClient.post(
"/collections/posts/points/search",
build_search_payload(embedding, options)
) do
ids =
Enum.map(result, fn %{"id" => id} ->
Ecto.UUID.dump!(id)
end)
from(a in Activity, where: a.id in ^ids)
|> Activity.with_preloaded_object()
|> Activity.restrict_deactivated_users()
|> Ecto.Query.order_by([a], fragment("array_position(?, ?)", ^ids, a.id))
|> Pleroma.Repo.all()
|> maybe_fetch(user, original_query)
else
_ ->
[]
end
end
@impl true
def healthcheck_endpoints do
qdrant_health =
Config.get([Pleroma.Search.QdrantSearch, :qdrant_url])
|> URI.parse()
|> Map.put(:path, "/healthz")
|> URI.to_string()
openai_health = Config.get([Pleroma.Search.QdrantSearch, :openai_healthcheck_url])
[qdrant_health, openai_health] |> Enum.filter(& &1)
end
end
defmodule Pleroma.Search.QdrantSearch.OpenAIClient do
use Tesla
alias Pleroma.Config.Getting, as: Config
plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :openai_url]))
plug(Tesla.Middleware.JSON)
plug(Tesla.Middleware.Headers, [
{"Authorization",
"Bearer #{Pleroma.Config.get([Pleroma.Search.QdrantSearch, :openai_api_key])}"}
])
end
defmodule Pleroma.Search.QdrantSearch.QdrantClient do
use Tesla
alias Pleroma.Config.Getting, as: Config
plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :qdrant_url]))
plug(Tesla.Middleware.JSON)
plug(Tesla.Middleware.Headers, [
{"api-key", Pleroma.Config.get([Pleroma.Search.QdrantSearch, :qdrant_api_key])}
])
end

View file

@ -21,4 +21,22 @@ defmodule Pleroma.Search.SearchBackend do
from index.
"""
@callback remove_from_index(object :: Pleroma.Object.t()) :: :ok | {:error, any()}
@doc """
Create the index
"""
@callback create_index() :: :ok | {:error, any()}
@doc """
Drop the index
"""
@callback drop_index() :: :ok | {:error, any()}
@doc """
Healthcheck endpoints of search backend infrastructure to monitor for controlling
processing of jobs in the Oban queue.
It is expected a 200 response is healthy and other responses are unhealthy.
"""
@callback healthcheck_endpoints :: list() | nil
end

View file

@ -44,8 +44,7 @@ defp remove_suffix(uri, [test | rest]) do
defp remove_suffix(uri, []), do: uri
def fetch_public_key(conn) do
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
{:ok, actor_id} <- key_id_to_actor_id(kid),
with {:ok, actor_id} <- get_actor_id(conn),
{:ok, public_key} <- User.get_public_key_for_ap_id(actor_id) do
{:ok, public_key}
else
@ -55,8 +54,7 @@ def fetch_public_key(conn) do
end
def refetch_public_key(conn) do
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
{:ok, actor_id} <- key_id_to_actor_id(kid),
with {:ok, actor_id} <- get_actor_id(conn),
{:ok, _user} <- ActivityPub.make_user_from_ap_id(actor_id),
{:ok, public_key} <- User.get_public_key_for_ap_id(actor_id) do
{:ok, public_key}
@ -66,6 +64,16 @@ def refetch_public_key(conn) do
end
end
def get_actor_id(conn) do
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
{:ok, actor_id} <- key_id_to_actor_id(kid) do
{:ok, actor_id}
else
e ->
{:error, e}
end
end
def sign(%User{keys: keys} = user, headers) do
with {:ok, private_key, _} <- Keys.keys_from_pem(keys) do
HTTPSignatures.sign(private_key, user.ap_id <> "#main-key", headers)

View file

@ -239,9 +239,13 @@ defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do
""
end
if String.contains?(base_url, Pleroma.Uploaders.IPFS.placeholder()) do
String.replace(base_url, Pleroma.Uploaders.IPFS.placeholder(), path)
else
[base_url, path]
|> Path.join()
end
end
defp url_from_spec(_upload, _base_url, {:url, url}), do: url
@ -277,6 +281,9 @@ def base_url do
Path.join([upload_base_url, bucket_with_namespace])
end
Pleroma.Uploaders.IPFS ->
@config_impl.get([Pleroma.Uploaders.IPFS, :get_gateway_url])
_ ->
public_endpoint || upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
end

View file

@ -0,0 +1,77 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Uploaders.IPFS do
@behaviour Pleroma.Uploaders.Uploader
require Logger
alias Tesla.Multipart
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
defp get_final_url(method) do
config = @config_impl.get([__MODULE__])
post_base_url = Keyword.get(config, :post_gateway_url)
Path.join([post_base_url, method])
end
def put_file_endpoint do
get_final_url("/api/v0/add")
end
def delete_file_endpoint do
get_final_url("/api/v0/files/rm")
end
@placeholder "{CID}"
def placeholder, do: @placeholder
@impl true
def get_file(file) do
b_url = Pleroma.Upload.base_url()
if String.contains?(b_url, @placeholder) do
{:ok, {:url, String.replace(b_url, @placeholder, URI.decode(file))}}
else
{:error, "IPFS Get URL doesn't contain 'cid' placeholder"}
end
end
@impl true
def put_file(%Pleroma.Upload{} = upload) do
mp =
Multipart.new()
|> Multipart.add_content_type_param("charset=utf-8")
|> Multipart.add_file(upload.tempfile)
case Pleroma.HTTP.post(put_file_endpoint(), mp, [], params: ["cid-version": "1"]) do
{:ok, ret} ->
case Jason.decode(ret.body) do
{:ok, ret} ->
if Map.has_key?(ret, "Hash") do
{:ok, {:file, ret["Hash"]}}
else
{:error, "JSON doesn't contain Hash key"}
end
error ->
Logger.error("#{__MODULE__}: #{inspect(error)}")
{:error, "JSON decode failed"}
end
error ->
Logger.error("#{__MODULE__}: #{inspect(error)}")
{:error, "IPFS Gateway upload failed"}
end
end
@impl true
def delete_file(file) do
case Pleroma.HTTP.post(delete_file_endpoint(), "", [], params: [arg: file]) do
{:ok, %{status: 204}} -> :ok
error -> {:error, inspect(error)}
end
end
end

View file

@ -1404,6 +1404,40 @@ def get_friends_ids(%User{} = user, page \\ nil) do
|> Repo.all()
end
@spec get_familiar_followers_query(User.t(), User.t(), pos_integer() | nil) :: Ecto.Query.t()
def get_familiar_followers_query(%User{} = user, %User{} = current_user, nil) do
friends =
get_friends_query(current_user)
|> where([u], not u.hide_follows)
|> select([u], u.id)
User.Query.build(%{is_active: true})
|> where([u], u.id not in ^[user.id, current_user.id])
|> join(:inner, [u], r in FollowingRelationship,
as: :followers_relationships,
on: r.following_id == ^user.id and r.follower_id == u.id
)
|> where([followers_relationships: r], r.state == ^:follow_accept)
|> where([followers_relationships: r], r.follower_id in subquery(friends))
end
def get_familiar_followers_query(%User{} = user, %User{} = current_user, page) do
user
|> get_familiar_followers_query(current_user, nil)
|> User.Query.paginate(page, 20)
end
@spec get_familiar_followers_query(User.t(), User.t()) :: Ecto.Query.t()
def get_familiar_followers_query(%User{} = user, %User{} = current_user),
do: get_familiar_followers_query(user, current_user, nil)
@spec get_familiar_followers(User.t(), User.t(), pos_integer() | nil) :: {:ok, list(User.t())}
def get_familiar_followers(%User{} = user, %User{} = current_user, page \\ nil) do
user
|> get_familiar_followers_query(current_user, page)
|> Repo.all()
end
def increase_note_count(%User{} = user) do
User
|> where(id: ^user.id)

View file

@ -200,7 +200,8 @@ defp insert_activity_with_expiration(data, local, recipients) do
end
def notify_and_stream(activity) do
Notification.create_notifications(activity)
{:ok, notifications} = Notification.create_notifications(activity)
Notification.send(notifications)
original_activity =
case activity do
@ -978,8 +979,9 @@ defp restrict_media(query, _), do: query
defp restrict_replies(query, %{exclude_replies: true}) do
from(
[_activity, object] in query,
where: fragment("?->>'inReplyTo' is null", object.data)
[activity, object] in query,
where:
fragment("?->>'inReplyTo' is null or ?->>'type' = 'Announce'", object.data, activity.data)
)
end
@ -1259,6 +1261,15 @@ defp restrict_quote_url(query, %{quote_url: quote_url}) do
defp restrict_quote_url(query, _), do: query
defp restrict_rule(query, %{rule_id: rule_id}) do
from(
activity in query,
where: fragment("(?)->'rules' \\? (?)", activity.data, ^rule_id)
)
end
defp restrict_rule(query, _), do: query
defp exclude_poll_votes(query, %{include_poll_votes: true}), do: query
defp exclude_poll_votes(query, _) do
@ -1421,6 +1432,7 @@ def fetch_activities_query(recipients, opts \\ %{}) do
|> restrict_instance(opts)
|> restrict_announce_object_actor(opts)
|> restrict_filtered(opts)
|> restrict_rule(opts)
|> restrict_quote_url(opts)
|> maybe_restrict_deactivated_users(opts)
|> exclude_poll_votes(opts)

View file

@ -52,6 +52,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
when action in [:activity, :object]
)
plug(:log_inbox_metadata when action in [:inbox])
plug(:set_requester_reachable when action in [:inbox])
plug(:relay_active? when action in [:relay])
@ -521,6 +522,13 @@ defp set_requester_reachable(%Plug.Conn{} = conn, _) do
conn
end
defp log_inbox_metadata(%{params: %{"actor" => actor, "type" => type}} = conn, _) do
Logger.metadata(actor: actor, type: type)
conn
end
defp log_inbox_metadata(conn, _), do: conn
def upload_media(%{assigns: %{user: %User{} = user}} = conn, %{"file" => file} = data) do
with {:ok, object} <-
ActivityPub.upload(

View file

@ -0,0 +1,87 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy do
alias Pleroma.Config
alias Pleroma.User
require Pleroma.Constants
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
defp user_has_posted?(%User{} = u), do: u.note_count > 0
defp user_has_age?(%User{} = u) do
user_age_limit = Config.get([:mrf_antimentionspam, :user_age_limit], 30_000)
diff = NaiveDateTime.utc_now() |> NaiveDateTime.diff(u.inserted_at, :millisecond)
diff >= user_age_limit
end
defp good_reputation?(%User{} = u) do
user_has_age?(u) and user_has_posted?(u)
end
# copied from HellthreadPolicy
defp get_recipient_count(message) do
recipients = (message["to"] || []) ++ (message["cc"] || [])
follower_collection =
User.get_cached_by_ap_id(message["actor"] || message["attributedTo"]).follower_address
if Enum.member?(recipients, Pleroma.Constants.as_public()) do
recipients =
recipients
|> List.delete(Pleroma.Constants.as_public())
|> List.delete(follower_collection)
{:public, length(recipients)}
else
recipients =
recipients
|> List.delete(follower_collection)
{:not_public, length(recipients)}
end
end
defp object_has_recipients?(%{"object" => object} = activity) do
{_, object_count} = get_recipient_count(object)
{_, activity_count} = get_recipient_count(activity)
object_count + activity_count > 0
end
defp object_has_recipients?(object) do
{_, count} = get_recipient_count(object)
count > 0
end
@impl true
def filter(%{"type" => "Create", "actor" => actor} = activity) do
with {:ok, %User{local: false} = u} <- User.get_or_fetch_by_ap_id(actor),
{:has_mentions, true} <- {:has_mentions, object_has_recipients?(activity)},
{:good_reputation, true} <- {:good_reputation, good_reputation?(u)} do
{:ok, activity}
else
{:ok, %User{local: true}} ->
{:ok, activity}
{:has_mentions, false} ->
{:ok, activity}
{:good_reputation, false} ->
{:reject, "[AntiMentionSpamPolicy] User rejected"}
{:error, _} ->
{:reject, "[AntiMentionSpamPolicy] Failed to get or fetch user by ap_id"}
e ->
{:reject, "[AntiMentionSpamPolicy] Unhandled error #{inspect(e)}"}
end
end
# in all other cases, pass through
def filter(message), do: {:ok, message}
@impl true
def describe, do: {:ok, %{}}
end

View file

@ -0,0 +1,142 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy do
@moduledoc """
Dynamic activity filtering based on an RBL database
This MRF makes queries to a custom DNS server which will
respond with values indicating the classification of the domain
the activity originated from. This method has been widely used
in the email anti-spam industry for very fast reputation checks.
e.g., if the DNS response is 127.0.0.1 or empty, the domain is OK
Other values such as 127.0.0.2 may be used for specific classifications.
Information for why the host is blocked can be stored in a corresponding TXT record.
This method is fail-open so if the queries fail the activites are accepted.
An example of software meant for this purpsoe is rbldnsd which can be found
at http://www.corpit.ru/mjt/rbldnsd.html or mirrored at
https://git.pleroma.social/feld/rbldnsd
It is highly recommended that you run your own copy of rbldnsd and use an
external mechanism to sync/share the contents of the zone file. This is
important to keep the latency on the queries as low as possible and prevent
your DNS server from being attacked so it fails and content is permitted.
"""
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
alias Pleroma.Config
require Logger
@query_retries 1
@query_timeout 500
@impl true
def filter(%{"actor" => actor} = object) do
actor_info = URI.parse(actor)
with {:ok, object} <- check_rbl(actor_info, object) do
{:ok, object}
else
_ -> {:reject, "[DNSRBLPolicy]"}
end
end
@impl true
def filter(object), do: {:ok, object}
@impl true
def describe do
mrf_dnsrbl =
Config.get(:mrf_dnsrbl)
|> Enum.into(%{})
{:ok, %{mrf_dnsrbl: mrf_dnsrbl}}
end
@impl true
def config_description do
%{
key: :mrf_dnsrbl,
related_policy: "Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy",
label: "MRF DNSRBL",
description: "DNS RealTime Blackhole Policy",
children: [
%{
key: :nameserver,
type: {:string},
description: "DNSRBL Nameserver to Query (IP or hostame)",
suggestions: ["127.0.0.1"]
},
%{
key: :port,
type: {:string},
description: "Nameserver port",
suggestions: ["53"]
},
%{
key: :zone,
type: {:string},
description: "Root zone for querying",
suggestions: ["bl.pleroma.com"]
}
]
}
end
defp check_rbl(%{host: actor_host}, object) do
with false <- match?(^actor_host, Pleroma.Web.Endpoint.host()),
zone when not is_nil(zone) <- Keyword.get(Config.get([:mrf_dnsrbl]), :zone) do
query =
Enum.join([actor_host, zone], ".")
|> String.to_charlist()
rbl_response = rblquery(query)
if Enum.empty?(rbl_response) do
{:ok, object}
else
Task.start(fn ->
reason = rblquery(query, :txt) || "undefined"
Logger.warning(
"DNSRBL Rejected activity from #{actor_host} for reason: #{inspect(reason)}"
)
end)
:error
end
else
_ -> {:ok, object}
end
end
defp get_rblhost_ip(rblhost) do
case rblhost |> String.to_charlist() |> :inet_parse.address() do
{:ok, _} -> rblhost |> String.to_charlist() |> :inet_parse.address()
_ -> {:ok, rblhost |> String.to_charlist() |> :inet_res.lookup(:in, :a) |> Enum.random()}
end
end
defp rblquery(query, type \\ :a) do
config = Config.get([:mrf_dnsrbl])
case get_rblhost_ip(config[:nameserver]) do
{:ok, rblnsip} ->
:inet_res.lookup(query, :in, type,
nameservers: [{rblnsip, config[:port]}],
timeout: @query_timeout,
retry: @query_retries
)
_ ->
[]
end
end
end

View file

@ -11,11 +11,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
require Logger
@adapter_options [
pool: :media,
recv_timeout: 10_000
]
@impl true
def history_awareness, do: :auto
@ -27,17 +22,14 @@ defp prefetch(url) do
Logger.debug("Prefetching #{inspect(url)} as #{inspect(prefetch_url)}")
if Pleroma.Config.get(:env) == :test do
fetch(prefetch_url)
else
ConcurrentLimiter.limit(__MODULE__, fn ->
Task.start(fn -> fetch(prefetch_url) end)
end)
end
end
end
defp fetch(url), do: HTTP.get(url, [], @adapter_options)
defp fetch(url) do
http_client_opts = Pleroma.Config.get([:media_proxy, :proxy_opts, :http], pool: :media)
HTTP.get(url, [], http_client_opts)
end
defp preload(%{"object" => %{"attachment" => attachments}} = _message) do
Enum.each(attachments, fn

View file

@ -0,0 +1,265 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.NsfwApiPolicy do
@moduledoc """
Hide, delete, or mark sensitive NSFW content with artificial intelligence.
Requires a NSFW API server, configured like so:
config :pleroma, Pleroma.Web.ActivityPub.MRF.NsfwMRF,
url: "http://127.0.0.1:5000/",
threshold: 0.7,
mark_sensitive: true,
unlist: false,
reject: false
The NSFW API server must implement an HTTP endpoint like this:
curl http://localhost:5000/?url=https://fedi.com/images/001.jpg
Returning a response like this:
{"score", 0.314}
Where a score is 0-1, with `1` being definitely NSFW.
A good API server is here: https://github.com/EugenCepoi/nsfw_api
You can run it with Docker with a one-liner:
docker run -it -p 127.0.0.1:5000:5000/tcp --env PORT=5000 eugencepoi/nsfw_api:latest
Options:
- `url`: Base URL of the API server. Default: "http://127.0.0.1:5000/"
- `threshold`: Lowest score to take action on. Default: `0.7`
- `mark_sensitive`: Mark sensitive all detected NSFW content? Default: `true`
- `unlist`: Unlist all detected NSFW content? Default: `false`
- `reject`: Reject all detected NSFW content (takes precedence)? Default: `false`
"""
alias Pleroma.Config
alias Pleroma.Constants
alias Pleroma.HTTP
alias Pleroma.User
require Logger
require Pleroma.Constants
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
@policy :mrf_nsfw_api
def build_request_url(url) do
Config.get([@policy, :url])
|> URI.parse()
|> fix_path()
|> Map.put(:query, "url=#{url}")
|> URI.to_string()
end
def parse_url(url) do
request = build_request_url(url)
with {:ok, %Tesla.Env{body: body}} <- HTTP.get(request) do
Jason.decode(body)
else
error ->
Logger.warn("""
[NsfwApiPolicy]: The API server failed. Skipping.
#{inspect(error)}
""")
error
end
end
def check_url_nsfw(url) when is_binary(url) do
threshold = Config.get([@policy, :threshold])
case parse_url(url) do
{:ok, %{"score" => score}} when score >= threshold ->
{:nsfw, %{url: url, score: score, threshold: threshold}}
{:ok, %{"score" => score}} ->
{:sfw, %{url: url, score: score, threshold: threshold}}
_ ->
{:sfw, %{url: url, score: nil, threshold: threshold}}
end
end
def check_url_nsfw(%{"href" => url}) when is_binary(url) do
check_url_nsfw(url)
end
def check_url_nsfw(url) do
threshold = Config.get([@policy, :threshold])
{:sfw, %{url: url, score: nil, threshold: threshold}}
end
def check_attachment_nsfw(%{"url" => urls} = attachment) when is_list(urls) do
if Enum.all?(urls, &match?({:sfw, _}, check_url_nsfw(&1))) do
{:sfw, attachment}
else
{:nsfw, attachment}
end
end
def check_attachment_nsfw(%{"url" => url} = attachment) when is_binary(url) do
case check_url_nsfw(url) do
{:sfw, _} -> {:sfw, attachment}
{:nsfw, _} -> {:nsfw, attachment}
end
end
def check_attachment_nsfw(attachment), do: {:sfw, attachment}
def check_object_nsfw(%{"attachment" => attachments} = object) when is_list(attachments) do
if Enum.all?(attachments, &match?({:sfw, _}, check_attachment_nsfw(&1))) do
{:sfw, object}
else
{:nsfw, object}
end
end
def check_object_nsfw(%{"object" => %{} = child_object} = object) do
case check_object_nsfw(child_object) do
{:sfw, _} -> {:sfw, object}
{:nsfw, _} -> {:nsfw, object}
end
end
def check_object_nsfw(object), do: {:sfw, object}
@impl true
def filter(object) do
with {:sfw, object} <- check_object_nsfw(object) do
{:ok, object}
else
{:nsfw, _data} -> handle_nsfw(object)
_ -> {:reject, "NSFW: Attachment rejected"}
end
end
defp handle_nsfw(object) do
if Config.get([@policy, :reject]) do
{:reject, object}
else
{:ok,
object
|> maybe_unlist()
|> maybe_mark_sensitive()}
end
end
defp maybe_unlist(object) do
if Config.get([@policy, :unlist]) do
unlist(object)
else
object
end
end
defp maybe_mark_sensitive(object) do
if Config.get([@policy, :mark_sensitive]) do
mark_sensitive(object)
else
object
end
end
def unlist(%{"to" => to, "cc" => cc, "actor" => actor} = object) do
with %User{} = user <- User.get_cached_by_ap_id(actor) do
to =
[user.follower_address | to]
|> List.delete(Constants.as_public())
|> Enum.uniq()
cc =
[Constants.as_public() | cc]
|> List.delete(user.follower_address)
|> Enum.uniq()
object
|> Map.put("to", to)
|> Map.put("cc", cc)
else
_ -> raise "[NsfwApiPolicy]: Could not find user #{actor}"
end
end
def mark_sensitive(%{"object" => child_object} = object) when is_map(child_object) do
Map.put(object, "object", mark_sensitive(child_object))
end
def mark_sensitive(object) when is_map(object) do
tags = (object["tag"] || []) ++ ["nsfw"]
object
|> Map.put("tag", tags)
|> Map.put("sensitive", true)
end
# Hackney needs a trailing slash
defp fix_path(%URI{path: path} = uri) when is_binary(path) do
path = String.trim_trailing(path, "/") <> "/"
Map.put(uri, :path, path)
end
defp fix_path(%URI{path: nil} = uri), do: Map.put(uri, :path, "/")
@impl true
def describe do
options = %{
threshold: Config.get([@policy, :threshold]),
mark_sensitive: Config.get([@policy, :mark_sensitive]),
unlist: Config.get([@policy, :unlist]),
reject: Config.get([@policy, :reject])
}
{:ok, %{@policy => options}}
end
@impl true
def config_description do
%{
key: @policy,
related_policy: to_string(__MODULE__),
label: "NSFW API Policy",
description:
"Hide, delete, or mark sensitive NSFW content with artificial intelligence. Requires running an external API server.",
children: [
%{
key: :url,
type: :string,
description: "Base URL of the API server.",
suggestions: ["http://127.0.0.1:5000/"]
},
%{
key: :threshold,
type: :float,
description: "Lowest score to take action on. Between 0 and 1.",
suggestions: [0.7]
},
%{
key: :mark_sensitive,
type: :boolean,
description: "Mark sensitive all detected NSFW content?",
suggestions: [true]
},
%{
key: :unlist,
type: :boolean,
description: "Unlist sensitive all detected NSFW content?",
suggestions: [false]
},
%{
key: :reject,
type: :boolean,
description: "Reject sensitive all detected NSFW content (takes precedence)?",
suggestions: [false]
}
]
}
end
end

View file

@ -15,6 +15,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do
field(:type, :string, default: "Link")
field(:mediaType, ObjectValidators.MIME, default: "application/octet-stream")
field(:name, :string)
field(:summary, :string)
field(:blurhash, :string)
embeds_many :url, UrlObjectValidator, primary_key: false do
@ -44,7 +45,7 @@ def changeset(struct, data) do
|> fix_url()
struct
|> cast(data, [:id, :type, :mediaType, :name, :blurhash])
|> cast(data, [:id, :type, :mediaType, :name, :summary, :blurhash])
|> cast_embed(:url, with: &url_changeset/2, required: true)
|> validate_inclusion(:type, ~w[Link Document Audio Image Video])
|> validate_required([:type, :mediaType])

View file

@ -21,7 +21,6 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
alias Pleroma.Web.ActivityPub.Builder
alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.Push
alias Pleroma.Web.Streamer
alias Pleroma.Workers.PollWorker
@ -125,7 +124,7 @@ def handle(
nil
end
{:ok, notifications} = Notification.create_notifications(object, do_send: false)
{:ok, notifications} = Notification.create_notifications(object)
meta =
meta
@ -184,7 +183,11 @@ def handle(%{data: %{"type" => "Like"}} = object, meta) do
liked_object = Object.get_by_ap_id(object.data["object"])
Utils.add_like_to_object(object, liked_object)
Notification.create_notifications(object)
{:ok, notifications} = Notification.create_notifications(object)
meta =
meta
|> add_notifications(notifications)
{:ok, object, meta}
end
@ -202,7 +205,7 @@ def handle(%{data: %{"type" => "Like"}} = object, meta) do
def handle(%{data: %{"type" => "Create"}} = activity, meta) do
with {:ok, object, meta} <- handle_object_creation(meta[:object_data], activity, meta),
%User{} = user <- User.get_cached_by_ap_id(activity.data["actor"]) do
{:ok, notifications} = Notification.create_notifications(activity, do_send: false)
{:ok, notifications} = Notification.create_notifications(activity)
{:ok, _user} = ActivityPub.increase_note_count_if_public(user, object)
{:ok, _user} = ActivityPub.update_last_status_at_if_public(user, object)
@ -256,11 +259,13 @@ def handle(%{data: %{"type" => "Announce"}} = object, meta) do
Utils.add_announce_to_object(object, announced_object)
if !User.internal?(user) do
Notification.create_notifications(object)
{:ok, notifications} = Notification.create_notifications(object)
ap_streamer().stream_out(object)
end
if !User.internal?(user), do: ap_streamer().stream_out(object)
meta =
meta
|> add_notifications(notifications)
{:ok, object, meta}
end
@ -281,7 +286,11 @@ def handle(%{data: %{"type" => "EmojiReact"}} = object, meta) do
reacted_object = Object.get_by_ap_id(object.data["object"])
Utils.add_emoji_reaction_to_object(object, reacted_object)
Notification.create_notifications(object)
{:ok, notifications} = Notification.create_notifications(object)
meta =
meta
|> add_notifications(notifications)
{:ok, object, meta}
end
@ -585,10 +594,7 @@ defp delete_object(object) do
defp send_notifications(meta) do
Keyword.get(meta, :notifications, [])
|> Enum.each(fn notification ->
Streamer.stream(["user", "user:notification"], notification)
Push.send(notification)
end)
|> Notification.send()
meta
end

View file

@ -721,14 +721,18 @@ def make_listen_data(params, additional) do
#### Flag-related helpers
@spec make_flag_data(map(), map()) :: map()
def make_flag_data(%{actor: actor, context: context, content: content} = params, additional) do
def make_flag_data(
%{actor: actor, context: context, content: content} = params,
additional
) do
%{
"type" => "Flag",
"actor" => actor.ap_id,
"content" => content,
"object" => build_flag_object(params),
"context" => context,
"state" => "open"
"state" => "open",
"rules" => Map.get(params, :rules, nil)
}
|> Map.merge(additional)
end

View file

@ -0,0 +1,62 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.AdminAPI.RuleController do
use Pleroma.Web, :controller
alias Pleroma.Repo
alias Pleroma.Rule
alias Pleroma.Web.Plugs.OAuthScopesPlug
import Pleroma.Web.ControllerHelper,
only: [
json_response: 3
]
plug(Pleroma.Web.ApiSpec.CastAndValidate)
plug(
OAuthScopesPlug,
%{scopes: ["admin:write"]}
when action in [:create, :update, :delete]
)
plug(OAuthScopesPlug, %{scopes: ["admin:read"]} when action == :index)
action_fallback(AdminAPI.FallbackController)
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.Admin.RuleOperation
def index(conn, _) do
rules =
Rule.query()
|> Repo.all()
render(conn, "index.json", rules: rules)
end
def create(%{body_params: params} = conn, _) do
rule =
params
|> Rule.create()
render(conn, "show.json", rule: rule)
end
def update(%{body_params: params} = conn, %{id: id}) do
rule =
params
|> Rule.update(id)
render(conn, "show.json", rule: rule)
end
def delete(conn, %{id: id}) do
with {:ok, _} <- Rule.delete(id) do
json(conn, %{})
else
_ -> json_response(conn, :bad_request, "")
end
end
end

View file

@ -6,9 +6,11 @@ defmodule Pleroma.Web.AdminAPI.ReportView do
use Pleroma.Web, :view
alias Pleroma.HTML
alias Pleroma.Rule
alias Pleroma.User
alias Pleroma.Web.AdminAPI
alias Pleroma.Web.AdminAPI.Report
alias Pleroma.Web.AdminAPI.RuleView
alias Pleroma.Web.CommonAPI.Utils
alias Pleroma.Web.MastodonAPI.StatusView
@ -46,7 +48,8 @@ def render("show.json", %{report: report, user: user, account: account, statuses
as: :activity
}),
state: report.data["state"],
notes: render(__MODULE__, "index_notes.json", %{notes: report.report_notes})
notes: render(__MODULE__, "index_notes.json", %{notes: report.report_notes}),
rules: rules(Map.get(report.data, "rules", nil))
}
end
@ -71,4 +74,16 @@ def render("show_note.json", %{
created_at: Utils.to_masto_date(inserted_at)
}
end
defp rules(nil) do
[]
end
defp rules(rule_ids) do
rules =
rule_ids
|> Rule.get()
render(RuleView, "index.json", rules: rules)
end
end

View file

@ -0,0 +1,22 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.AdminAPI.RuleView do
use Pleroma.Web, :view
require Pleroma.Constants
def render("index.json", %{rules: rules} = _opts) do
render_many(rules, __MODULE__, "show.json")
end
def render("show.json", %{rule: rule} = _opts) do
%{
id: to_string(rule.id),
priority: rule.priority,
text: rule.text,
hint: rule.hint
}
end
end

View file

@ -97,6 +97,7 @@ def spec(opts \\ []) do
"Frontend management",
"Instance configuration",
"Instance documents",
"Instance rule managment",
"Invites",
"MediaProxy cache",
"OAuth application management",

View file

@ -11,6 +11,7 @@ defmodule Pleroma.Web.ApiSpec.AccountOperation do
alias Pleroma.Web.ApiSpec.Schemas.ActorType
alias Pleroma.Web.ApiSpec.Schemas.ApiError
alias Pleroma.Web.ApiSpec.Schemas.BooleanLike
alias Pleroma.Web.ApiSpec.Schemas.FlakeID
alias Pleroma.Web.ApiSpec.Schemas.List
alias Pleroma.Web.ApiSpec.Schemas.Status
alias Pleroma.Web.ApiSpec.Schemas.VisibilityScope
@ -513,6 +514,48 @@ def identity_proofs_operation do
}
end
def familiar_followers_operation do
%Operation{
tags: ["Retrieve account information"],
summary: "Followers that you follow",
operationId: "AccountController.familiar_followers",
description:
"Obtain a list of all accounts that follow a given account, filtered for accounts you follow.",
security: [%{"oAuth" => ["read:follows"]}],
parameters: [
Operation.parameter(
:id,
:query,
%Schema{
oneOf: [%Schema{type: :array, items: %Schema{type: :string}}, %Schema{type: :string}]
},
"Account IDs",
example: "123"
)
],
responses: %{
200 =>
Operation.response("Accounts", "application/json", %Schema{
title: "ArrayOfAccounts",
type: :array,
items: %Schema{
title: "Account",
type: :object,
properties: %{
id: FlakeID,
accounts: %Schema{
title: "ArrayOfAccounts",
type: :array,
items: Account,
example: [Account.schema().example]
}
}
}
})
}
}
end
defp create_request do
%Schema{
title: "AccountCreateRequest",

View file

@ -30,6 +30,12 @@ def index_operation do
report_state(),
"Filter by report state"
),
Operation.parameter(
:rule_id,
:query,
%Schema{type: :string},
"Filter by selected rule id"
),
Operation.parameter(
:limit,
:query,
@ -169,6 +175,17 @@ defp report do
inserted_at: %Schema{type: :string, format: :"date-time"}
}
}
},
rules: %Schema{
type: :array,
items: %Schema{
type: :object,
properties: %{
id: %Schema{type: :string},
text: %Schema{type: :string},
hint: %Schema{type: :string, nullable: true}
}
}
}
}
}

View file

@ -0,0 +1,115 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ApiSpec.Admin.RuleOperation do
alias OpenApiSpex.Operation
alias OpenApiSpex.Schema
alias Pleroma.Web.ApiSpec.Schemas.ApiError
import Pleroma.Web.ApiSpec.Helpers
def open_api_operation(action) do
operation = String.to_existing_atom("#{action}_operation")
apply(__MODULE__, operation, [])
end
def index_operation do
%Operation{
tags: ["Instance rule managment"],
summary: "Retrieve list of instance rules",
operationId: "AdminAPI.RuleController.index",
security: [%{"oAuth" => ["admin:read"]}],
responses: %{
200 =>
Operation.response("Response", "application/json", %Schema{
type: :array,
items: rule()
}),
403 => Operation.response("Forbidden", "application/json", ApiError)
}
}
end
def create_operation do
%Operation{
tags: ["Instance rule managment"],
summary: "Create new rule",
operationId: "AdminAPI.RuleController.create",
security: [%{"oAuth" => ["admin:write"]}],
parameters: admin_api_params(),
requestBody: request_body("Parameters", create_request(), required: true),
responses: %{
200 => Operation.response("Response", "application/json", rule()),
400 => Operation.response("Bad Request", "application/json", ApiError),
403 => Operation.response("Forbidden", "application/json", ApiError)
}
}
end
def update_operation do
%Operation{
tags: ["Instance rule managment"],
summary: "Modify existing rule",
operationId: "AdminAPI.RuleController.update",
security: [%{"oAuth" => ["admin:write"]}],
parameters: [Operation.parameter(:id, :path, :string, "Rule ID")],
requestBody: request_body("Parameters", update_request(), required: true),
responses: %{
200 => Operation.response("Response", "application/json", rule()),
400 => Operation.response("Bad Request", "application/json", ApiError),
403 => Operation.response("Forbidden", "application/json", ApiError)
}
}
end
def delete_operation do
%Operation{
tags: ["Instance rule managment"],
summary: "Delete rule",
operationId: "AdminAPI.RuleController.delete",
parameters: [Operation.parameter(:id, :path, :string, "Rule ID")],
security: [%{"oAuth" => ["admin:write"]}],
responses: %{
200 => empty_object_response(),
404 => Operation.response("Not Found", "application/json", ApiError),
403 => Operation.response("Forbidden", "application/json", ApiError)
}
}
end
defp create_request do
%Schema{
type: :object,
required: [:text],
properties: %{
priority: %Schema{type: :integer},
text: %Schema{type: :string},
hint: %Schema{type: :string}
}
}
end
defp update_request do
%Schema{
type: :object,
properties: %{
priority: %Schema{type: :integer},
text: %Schema{type: :string},
hint: %Schema{type: :string}
}
}
end
defp rule do
%Schema{
type: :object,
properties: %{
id: %Schema{type: :string},
priority: %Schema{type: :integer},
text: %Schema{type: :string},
hint: %Schema{type: :string, nullable: true}
}
}
end
end

View file

@ -46,10 +46,30 @@ def peers_operation do
}
end
def rules_operation do
%Operation{
tags: ["Instance misc"],
summary: "Retrieve list of instance rules",
operationId: "InstanceController.rules",
responses: %{
200 => Operation.response("Array of domains", "application/json", array_of_rules())
}
}
end
defp instance do
%Schema{
type: :object,
properties: %{
accounts: %Schema{
type: :object,
properties: %{
max_featured_tags: %Schema{
type: :integer,
description: "The maximum number of featured tags allowed for each account."
}
}
},
uri: %Schema{type: :string, description: "The domain name of the instance"},
title: %Schema{type: :string, description: "The title of the website"},
description: %Schema{
@ -172,7 +192,8 @@ defp instance do
"urls" => %{
"streaming_api" => "wss://lain.com"
},
"version" => "2.7.2 (compatible; Pleroma 2.0.50-536-g25eec6d7-develop)"
"version" => "2.7.2 (compatible; Pleroma 2.0.50-536-g25eec6d7-develop)",
"rules" => array_of_rules()
}
}
end
@ -272,6 +293,19 @@ defp instance2 do
type: :object,
description: "Instance configuration",
properties: %{
accounts: %Schema{
type: :object,
properties: %{
max_featured_tags: %Schema{
type: :integer,
description: "The maximum number of featured tags allowed for each account."
},
max_pinned_statuses: %Schema{
type: :integer,
description: "The maximum number of pinned statuses for each account."
}
}
},
urls: %Schema{
type: :object,
properties: %{
@ -285,6 +319,11 @@ defp instance2 do
type: :object,
description: "A map with poll limits for local statuses",
properties: %{
characters_reserved_per_url: %Schema{
type: :integer,
description:
"Each URL in a status will be assumed to be exactly this many characters."
},
max_characters: %Schema{
type: :integer,
description: "Posts character limit (CW/Subject included in the counter)"
@ -344,4 +383,18 @@ defp array_of_domains do
example: ["pleroma.site", "lain.com", "bikeshed.party"]
}
end
defp array_of_rules do
%Schema{
type: :array,
items: %Schema{
type: :object,
properties: %{
id: %Schema{type: :string},
text: %Schema{type: :string},
hint: %Schema{type: :string}
}
}
}
end
end

View file

@ -202,7 +202,8 @@ defp notification_type do
"pleroma:report",
"move",
"follow_request",
"poll"
"poll",
"status"
],
description: """
The type of event that resulted in the notification.
@ -216,6 +217,7 @@ defp notification_type do
- `pleroma:emoji_reaction` - Someone reacted with emoji to your status
- `pleroma:chat_mention` - Someone mentioned you in a chat message
- `pleroma:report` - Someone was reported
- `status` - Someone you are subscribed to created a status
"""
}
end

View file

@ -5,7 +5,6 @@
defmodule Pleroma.Web.ApiSpec.PleromaNotificationOperation do
alias OpenApiSpex.Operation
alias OpenApiSpex.Schema
alias Pleroma.Web.ApiSpec.NotificationOperation
alias Pleroma.Web.ApiSpec.Schemas.ApiError
import Pleroma.Web.ApiSpec.Helpers
@ -35,12 +34,7 @@ def mark_as_read_operation do
Operation.response(
"A Notification or array of Notifications",
"application/json",
%Schema{
anyOf: [
%Schema{type: :array, items: NotificationOperation.notification()},
NotificationOperation.notification()
]
}
%Schema{type: :string}
),
400 => Operation.response("Bad Request", "application/json", ApiError)
}

View file

@ -53,6 +53,12 @@ defp create_request do
default: false,
description:
"If the account is remote, should the report be forwarded to the remote admin?"
},
rule_ids: %Schema{
type: :array,
nullable: true,
items: %Schema{type: :string},
description: "Array of rules"
}
},
required: [:account_id],
@ -60,7 +66,8 @@ defp create_request do
"account_id" => "123",
"status_ids" => ["1337"],
"comment" => "bad status!",
"forward" => "false"
"forward" => "false",
"rule_ids" => ["3"]
}
}
end

View file

@ -50,7 +50,11 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Attachment do
pleroma: %Schema{
type: :object,
properties: %{
mime_type: %Schema{type: :string, description: "mime type of the attachment"}
mime_type: %Schema{type: :string, description: "mime type of the attachment"},
name: %Schema{
type: :string,
description: "Name of the attachment, typically the filename"
}
}
}
},

View file

@ -8,6 +8,7 @@ defmodule Pleroma.Web.CommonAPI do
alias Pleroma.Formatter
alias Pleroma.ModerationLog
alias Pleroma.Object
alias Pleroma.Rule
alias Pleroma.ThreadMute
alias Pleroma.User
alias Pleroma.UserRelationship
@ -568,14 +569,16 @@ def thread_muted?(_, _), do: false
def report(user, data) do
with {:ok, account} <- get_reported_account(data.account_id),
{:ok, {content_html, _, _}} <- make_report_content_html(data[:comment]),
{:ok, statuses} <- get_report_statuses(account, data) do
{:ok, statuses} <- get_report_statuses(account, data),
rules <- get_report_rules(Map.get(data, :rule_ids, nil)) do
ActivityPub.flag(%{
context: Utils.generate_context_id(),
actor: user,
account: account,
statuses: statuses,
content: content_html,
forward: Map.get(data, :forward, false)
forward: Map.get(data, :forward, false),
rules: rules
})
end
end
@ -587,6 +590,15 @@ defp get_reported_account(account_id) do
end
end
defp get_report_rules(nil) do
nil
end
defp get_report_rules(rule_ids) do
rule_ids
|> Enum.filter(&Rule.exists?/1)
end
def update_report_state(activity_ids, state) when is_list(activity_ids) do
case Utils.update_report_state(activity_ids, state) do
:ok -> {:ok, activity_ids}

View file

@ -129,8 +129,22 @@ defp attachments(%{params: params} = draft) do
defp in_reply_to(%{params: %{in_reply_to_status_id: ""}} = draft), do: draft
defp in_reply_to(%{params: %{in_reply_to_status_id: id}} = draft) when is_binary(id) do
%__MODULE__{draft | in_reply_to: Activity.get_by_id(id)}
defp in_reply_to(%{params: %{in_reply_to_status_id: :deleted}} = draft) do
add_error(draft, dgettext("errors", "Cannot reply to a deleted status"))
end
defp in_reply_to(%{params: %{in_reply_to_status_id: id} = params} = draft) when is_binary(id) do
activity = Activity.get_by_id(id)
params =
if is_nil(activity) do
# Deleted activities are returned as nil
Map.put(params, :in_reply_to_status_id, :deleted)
else
Map.put(params, :in_reply_to_status_id, activity)
end
in_reply_to(%{draft | params: params})
end
defp in_reply_to(%{params: %{in_reply_to_status_id: %Activity{} = in_reply_to}} = draft) do

View file

@ -38,6 +38,8 @@ defmodule Pleroma.Web.Endpoint do
plug(Plug.Telemetry, event_prefix: [:phoenix, :endpoint])
plug(Pleroma.Web.Plugs.LoggerMetadataPath)
plug(Pleroma.Web.Plugs.SetLocalePlug)
plug(CORSPlug)
plug(Pleroma.Web.Plugs.HTTPSecurityPlug)

View file

@ -44,7 +44,7 @@ def incoming_ap_doc(%{params: params, req_headers: req_headers}) do
end
def incoming_ap_doc(%{"type" => "Delete"} = params) do
ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params}, priority: 3)
ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params}, priority: 3, queue: :slow)
end
def incoming_ap_doc(params) do

View file

@ -72,7 +72,10 @@ defmodule Pleroma.Web.MastodonAPI.AccountController do
%{scopes: ["follow", "write:blocks"]} when action in [:block, :unblock]
)
plug(OAuthScopesPlug, %{scopes: ["read:follows"]} when action == :relationships)
plug(
OAuthScopesPlug,
%{scopes: ["read:follows"]} when action in [:relationships, :familiar_followers]
)
plug(
OAuthScopesPlug,
@ -629,6 +632,35 @@ def endorsements(%{assigns: %{user: user}} = conn, params) do
)
end
@doc "GET /api/v1/accounts/familiar_followers"
def familiar_followers(
%{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn,
_id
) do
users =
User.get_all_by_ids(List.wrap(id))
|> Enum.map(&%{id: &1.id, accounts: get_familiar_followers(&1, user)})
conn
|> render("familiar_followers.json",
for: user,
users: users,
as: :user
)
end
defp get_familiar_followers(%{id: id} = user, %{id: id}) do
User.get_familiar_followers(user, user)
end
defp get_familiar_followers(%{hide_followers: true}, _current_user) do
[]
end
defp get_familiar_followers(user, current_user) do
User.get_familiar_followers(user, current_user)
end
@doc "GET /api/v1/identity_proofs"
def identity_proofs(conn, params), do: MastodonAPIController.empty_array(conn, params)
end

View file

@ -25,4 +25,9 @@ def show2(conn, _params) do
def peers(conn, _params) do
json(conn, Pleroma.Stats.get_peers())
end
@doc "GET /api/v1/instance/rules"
def rules(conn, _params) do
render(conn, "rules.json")
end
end

View file

@ -34,6 +34,7 @@ defmodule Pleroma.Web.MastodonAPI.NotificationController do
pleroma:emoji_reaction
poll
update
status
}
# GET /api/v1/notifications

View file

@ -193,6 +193,25 @@ def render("relationships.json", %{user: user, targets: targets} = opts) do
render_many(targets, AccountView, "relationship.json", render_opts)
end
def render("familiar_followers.json", %{users: users} = opts) do
opts =
opts
|> Map.merge(%{as: :user})
|> Map.delete(:users)
users
|> render_many(AccountView, "familiar_followers.json", opts)
end
def render("familiar_followers.json", %{user: %{id: id, accounts: accounts}} = opts) do
accounts =
accounts
|> render_many(AccountView, "show.json", opts)
|> Enum.filter(&Enum.any?/1)
%{id: id, accounts: accounts}
end
defp do_render("show.json", %{user: user} = opts) do
self = opts[:for] == user

View file

@ -76,12 +76,26 @@ def render("show2.json", _) do
})
end
def render("rules.json", _) do
Pleroma.Rule.query()
|> Pleroma.Repo.all()
|> render_many(__MODULE__, "rule.json", as: :rule)
end
def render("rule.json", %{rule: rule}) do
%{
id: to_string(rule.id),
text: rule.text,
hint: rule.hint || ""
}
end
defp common_information(instance) do
%{
title: Keyword.get(instance, :name),
version: "#{@mastodon_api_level} (compatible; #{Pleroma.Application.named_version()})",
languages: Keyword.get(instance, :languages, ["en"]),
rules: []
rules: render(__MODULE__, "rules.json"),
title: Keyword.get(instance, :name),
version: "#{@mastodon_api_level} (compatible; #{Pleroma.Application.named_version()})"
}
end
@ -138,6 +152,7 @@ def features do
def federation do
quarantined = Config.get([:instance, :quarantined_instances], [])
rejected = Config.get([:instance, :rejected_instances], [])
if Config.get([:mrf, :transparency]) do
{:ok, data} = MRF.describe()
@ -157,6 +172,12 @@ def federation do
|> Enum.map(fn {instance, reason} -> {instance, %{"reason" => reason}} end)
|> Map.new()
})
|> Map.put(
:rejected_instances,
rejected
|> Enum.map(fn {instance, reason} -> {instance, %{"reason" => reason}} end)
|> Map.new()
)
else
%{}
end
@ -213,6 +234,8 @@ defp configuration do
defp configuration2 do
configuration()
|> put_in([:accounts, :max_pinned_statuses], Config.get([:instance, :max_pinned_statuses], 0))
|> put_in([:statuses, :characters_reserved_per_url], 0)
|> Map.merge(%{
urls: %{
streaming: Pleroma.Web.Endpoint.websocket_url(),

View file

@ -108,6 +108,9 @@ def render(
"mention" ->
put_status(response, activity, reading_user, status_render_opts)
"status" ->
put_status(response, activity, reading_user, status_render_opts)
"favourite" ->
put_status(response, parent_activity_fn.(), reading_user, status_render_opts)

View file

@ -624,6 +624,19 @@ def render("attachment.json", %{attachment: attachment}) do
to_string(attachment["id"] || hash_id)
end
description =
if attachment["summary"] do
HTML.strip_tags(attachment["summary"])
else
attachment["name"]
end
name = if attachment["summary"], do: attachment["name"]
pleroma =
%{mime_type: media_type}
|> Maps.put_if_present(:name, name)
%{
id: attachment_id,
url: href,
@ -631,8 +644,8 @@ def render("attachment.json", %{attachment: attachment}) do
preview_url: href_preview,
text_url: href,
type: type,
description: attachment["name"],
pleroma: %{mime_type: media_type},
description: description,
pleroma: pleroma,
blurhash: attachment["blurhash"]
}
|> Maps.put_if_present(:meta, meta)

Some files were not shown because too many files have changed in this diff Show more