Merge remote-tracking branch 'origin/develop' into uguu-uwu-notices-bulge

This commit is contained in:
lain 2020-02-11 13:58:36 +01:00
commit 24c526a0b1
50 changed files with 733 additions and 436 deletions

View File

@ -121,7 +121,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- MRF: `Delete` activities being exempt from MRF policies - MRF: `Delete` activities being exempt from MRF policies
- OTP releases: Not being able to configure OAuth expired token cleanup interval - OTP releases: Not being able to configure OAuth expired token cleanup interval
- OTP releases: Not being able to configure HTML sanitization policy - OTP releases: Not being able to configure HTML sanitization policy
- OTP releases: Not being able to change upload limit (again)
- Favorites timeline now ordered by favorite date instead of post date - Favorites timeline now ordered by favorite date instead of post date
- Support for cancellation of a follow request
<details> <details>
<summary>API Changes</summary> <summary>API Changes</summary>

View File

@ -51,20 +51,6 @@
telemetry_event: [Pleroma.Repo.Instrumenter], telemetry_event: [Pleroma.Repo.Instrumenter],
migration_lock: nil migration_lock: nil
scheduled_jobs =
with digest_config <- Application.get_env(:pleroma, :email_notifications)[:digest],
true <- digest_config[:active] do
[{digest_config[:schedule], {Pleroma.Daemons.DigestEmailDaemon, :perform, []}}]
else
_ -> []
end
config :pleroma, Pleroma.Scheduler,
global: true,
overlap: true,
timezone: :utc,
jobs: scheduled_jobs
config :pleroma, Pleroma.Captcha, config :pleroma, Pleroma.Captcha,
enabled: true, enabled: true,
seconds_valid: 300, seconds_valid: 300,
@ -495,6 +481,12 @@
scheduled_activities: 10, scheduled_activities: 10,
background: 5, background: 5,
attachments_cleanup: 5 attachments_cleanup: 5
],
crontab: [
{"0 0 * * *", Pleroma.Workers.Cron.ClearOauthTokenWorker},
{"0 * * * *", Pleroma.Workers.Cron.StatsWorker},
{"* * * * *", Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker},
{"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker}
] ]
config :pleroma, :workers, config :pleroma, :workers,
@ -578,7 +570,6 @@
config :pleroma, :email_notifications, config :pleroma, :email_notifications,
digest: %{ digest: %{
active: false, active: false,
schedule: "0 0 * * 0",
interval: 7, interval: 7,
inactivity_threshold: 7 inactivity_threshold: 7
} }
@ -586,8 +577,7 @@
config :pleroma, :oauth2, config :pleroma, :oauth2,
token_expires_in: 600, token_expires_in: 600,
issue_new_refresh_token: true, issue_new_refresh_token: true,
clean_expired_tokens: false, clean_expired_tokens: false
clean_expired_tokens_interval: 86_400_000
config :pleroma, :database, rum_enabled: false config :pleroma, :database, rum_enabled: false
@ -622,7 +612,6 @@
config :pleroma, configurable_from_database: false config :pleroma, configurable_from_database: false
config :swarm, node_blacklist: [~r/myhtml_.*$/]
# Import environment specific config. This must remain at the bottom # Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above. # of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs" import_config "#{Mix.env()}.exs"

View File

@ -2519,13 +2519,6 @@
key: :clean_expired_tokens, key: :clean_expired_tokens,
type: :boolean, type: :boolean,
description: "Enable a background job to clean expired oauth tokens. Default: `false`." description: "Enable a background job to clean expired oauth tokens. Default: `false`."
},
%{
key: :clean_expired_tokens_interval,
type: :integer,
description:
"Interval to run the job to clean expired tokens. Default: 86_400_000 (24 hours).",
suggestions: [86_400_000]
} }
] ]
}, },

View File

@ -66,11 +66,8 @@
config :pleroma, Oban, config :pleroma, Oban,
queues: false, queues: false,
prune: :disabled prune: :disabled,
crontab: false
config :pleroma, Pleroma.Scheduler,
jobs: [],
global: false
config :pleroma, Pleroma.ScheduledActivity, config :pleroma, Pleroma.ScheduledActivity,
daily_user_limit: 2, daily_user_limit: 2,

View File

@ -88,6 +88,9 @@ Behavior has changed:
- `/api/v1/accounts/search`: Does not require authentication - `/api/v1/accounts/search`: Does not require authentication
## Search (global)
Unlisted posts are available in search results, they are considered to be public posts that shouldn't be shown in local/federated timeline.
## Notifications ## Notifications

View File

@ -1,17 +1,35 @@
# Backup/Restore your instance # Backup/Restore/Move/Remove your instance
## Backup ## Backup
1. Stop the Pleroma service. 1. Stop the Pleroma service.
2. Go to the working directory of Pleroma (default is `/opt/pleroma`) 2. Go to the working directory of Pleroma (default is `/opt/pleroma`)
3. Run `sudo -Hu postgres pg_dump -d <pleroma_db> --format=custom -f </path/to/backup_location/pleroma.pgdump>` 3. Run `sudo -Hu postgres pg_dump -d <pleroma_db> --format=custom -f </path/to/backup_location/pleroma.pgdump>` (make sure the postgres user has write access to the destination file)
4. Copy `pleroma.pgdump`, `config/prod.secret.exs` and the `uploads` folder to your backup destination. If you have other modifications, copy those changes too. 4. Copy `pleroma.pgdump`, `config/prod.secret.exs` and the `uploads` folder to your backup destination. If you have other modifications, copy those changes too.
5. Restart the Pleroma service. 5. Restart the Pleroma service.
## Restore ## Restore/Move
1. Stop the Pleroma service. 1. Optionally reinstall Pleroma (either on the same server or on another server if you want to move servers). Try to use the same database name.
2. Go to the working directory of Pleroma (default is `/opt/pleroma`) 2. Stop the Pleroma service.
3. Copy the above mentioned files back to their original position. 3. Go to the working directory of Pleroma (default is `/opt/pleroma`)
4. Run `sudo -Hu postgres pg_restore -d <pleroma_db> -v -1 </path/to/backup_location/pleroma.pgdump>` 4. Copy the above mentioned files back to their original position.
5. Restart the Pleroma service. 5. Drop the existing database and recreate an empty one `sudo -Hu postgres psql -c 'DROP DATABASE <pleroma_db>;';` `sudo -Hu postgres psql -c 'CREATE DATABASE <pleroma_db>;';`
6. Run `sudo -Hu postgres pg_restore -d <pleroma_db> -v -1 </path/to/backup_location/pleroma.pgdump>`
7. If you installed a newer Pleroma version, you should run `mix ecto.migrate`[^1]. This task performs database migrations, if there were any.
8. Restart the Pleroma service.
[^1]: Prefix with `MIX_ENV=prod` to run it using the production config file.
## Remove
1. Optionally you can remove the users of your instance. This will trigger delete requests for their accounts and posts. Note that this is 'best effort' and doesn't mean that all traces of your instance will be gone from the fediverse.
* You can do this from the admin-FE where you can select all local users and delete the accounts using the *Moderate multiple users* dropdown.
* You can also list local users and delete them individualy using the CLI tasks for [Managing users](./CLI_tasks/user.md).
2. Stop the Pleroma service `systemctl stop pleroma`
3. Disable pleroma from systemd `systemctl disable pleroma`
4. Remove the files and folders you created during installation (see installation guide). This includes the pleroma, nginx and systemd files and folders.
5. Reload nginx now that the configuration is removed `systemctl reload nginx`
6. Remove the database and database user `sudo -Hu postgres psql -c 'DROP DATABASE <pleroma_db>;';` `sudo -Hu postgres psql -c 'DROP USER <pleroma_db>;';`
7. Remove the system user `userdel pleroma`
8. Remove the dependencies that you don't need anymore (see installation guide). Make sure you don't remove packages that are still needed for other software that you have running!

View File

@ -37,6 +37,11 @@ Feel free to contact us to be added to this list!
- Platforms: Android - Platforms: Android
- Features: Streaming Ready, Moderation, Text Formatting - Features: Streaming Ready, Moderation, Text Formatting
### Kyclos
- Source Code: <https://git.pleroma.social/pleroma/harbour-kyclos>
- Platforms: SailfishOS
- Features: No Streaming
### Nekonium ### Nekonium
- Homepage: [F-Droid Repository](https://repo.gdgd.jp.net/), [Google Play](https://play.google.com/store/apps/details?id=com.apps.nekonium), [Amazon](https://www.amazon.co.jp/dp/B076FXPRBC/) - Homepage: [F-Droid Repository](https://repo.gdgd.jp.net/), [Google Play](https://play.google.com/store/apps/details?id=com.apps.nekonium), [Amazon](https://www.amazon.co.jp/dp/B076FXPRBC/)
- Source: <https://gogs.gdgd.jp.net/lin/nekonium> - Source: <https://gogs.gdgd.jp.net/lin/nekonium>

View File

@ -513,6 +513,7 @@ Configuration options described in [Oban readme](https://github.com/sorentwo/oba
* `verbose` - logs verbosity * `verbose` - logs verbosity
* `prune` - non-retryable jobs [pruning settings](https://github.com/sorentwo/oban#pruning) (`:disabled` / `{:maxlen, value}` / `{:maxage, value}`) * `prune` - non-retryable jobs [pruning settings](https://github.com/sorentwo/oban#pruning) (`:disabled` / `{:maxlen, value}` / `{:maxage, value}`)
* `queues` - job queues (see below) * `queues` - job queues (see below)
* `crontab` - periodic jobs, see [`Oban.Cron`](#obancron)
Pleroma has the following queues: Pleroma has the following queues:
@ -524,6 +525,12 @@ Pleroma has the following queues:
* `web_push` - Web push notifications * `web_push` - Web push notifications
* `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivity`](#pleromascheduledactivity) * `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivity`](#pleromascheduledactivity)
#### Oban.Cron
Pleroma has these periodic job workers:
`Pleroma.Workers.Cron.ClearOauthTokenWorker` - a job worker to cleanup expired oauth tokens.
Example: Example:
```elixir ```elixir
@ -534,6 +541,9 @@ config :pleroma, Oban,
queues: [ queues: [
federator_incoming: 50, federator_incoming: 50,
federator_outgoing: 50 federator_outgoing: 50
],
crontab: [
{"0 0 * * *", Pleroma.Workers.Cron.ClearOauthTokenWorker}
] ]
``` ```
@ -816,8 +826,7 @@ Configure OAuth 2 provider capabilities:
* `token_expires_in` - The lifetime in seconds of the access token. * `token_expires_in` - The lifetime in seconds of the access token.
* `issue_new_refresh_token` - Keeps old refresh token or generate new refresh token when to obtain an access token. * `issue_new_refresh_token` - Keeps old refresh token or generate new refresh token when to obtain an access token.
* `clean_expired_tokens` - Enable a background job to clean expired oauth tokens. Defaults to `false`. * `clean_expired_tokens` - Enable a background job to clean expired oauth tokens. Defaults to `false`. Interval settings sets in configuration periodic jobs [`Oban.Cron`](#obancron)
* `clean_expired_tokens_interval` - Interval to run the job to clean expired tokens. Defaults to `86_400_000` (24 hours).
## Link parsing ## Link parsing

View File

@ -1,5 +1,6 @@
defmodule Mix.Tasks.Pleroma.Email do defmodule Mix.Tasks.Pleroma.Email do
use Mix.Task use Mix.Task
import Mix.Pleroma
@shortdoc "Simple Email test" @shortdoc "Simple Email test"
@moduledoc File.read!("docs/administration/CLI_tasks/email.md") @moduledoc File.read!("docs/administration/CLI_tasks/email.md")
@ -18,8 +19,6 @@ def run(["test" | args]) do
email = Pleroma.Emails.AdminEmail.test_email(options[:to]) email = Pleroma.Emails.AdminEmail.test_email(options[:to])
{:ok, _} = Pleroma.Emails.Mailer.deliver(email) {:ok, _} = Pleroma.Emails.Mailer.deliver(email)
Mix.shell().info( shell_info("Test email has been sent to #{inspect(email.to)} from #{inspect(email.from)}")
"Test email has been sent to #{inspect(email.to)} from #{inspect(email.from)}"
)
end end
end end

View File

@ -42,12 +42,9 @@ def start(_type, _args) do
children = children =
[ [
Pleroma.Repo, Pleroma.Repo,
Pleroma.Scheduler,
Pleroma.Config.TransferTask, Pleroma.Config.TransferTask,
Pleroma.Emoji, Pleroma.Emoji,
Pleroma.Captcha, Pleroma.Captcha,
Pleroma.Daemons.ScheduledActivityDaemon,
Pleroma.Daemons.ActivityExpirationDaemon,
Pleroma.Plugs.RateLimiter.Supervisor Pleroma.Plugs.RateLimiter.Supervisor
] ++ ] ++
cachex_children() ++ cachex_children() ++
@ -58,7 +55,6 @@ def start(_type, _args) do
{Oban, Pleroma.Config.get(Oban)} {Oban, Pleroma.Config.get(Oban)}
] ++ ] ++
task_children(@env) ++ task_children(@env) ++
oauth_cleanup_child(oauth_cleanup_enabled?()) ++
streamer_child(@env) ++ streamer_child(@env) ++
chat_child(@env, chat_enabled?()) ++ chat_child(@env, chat_enabled?()) ++
[ [
@ -160,20 +156,12 @@ defp build_cachex(type, opts),
defp chat_enabled?, do: Pleroma.Config.get([:chat, :enabled]) defp chat_enabled?, do: Pleroma.Config.get([:chat, :enabled])
defp oauth_cleanup_enabled?,
do: Pleroma.Config.get([:oauth2, :clean_expired_tokens], false)
defp streamer_child(:test), do: [] defp streamer_child(:test), do: []
defp streamer_child(_) do defp streamer_child(_) do
[Pleroma.Web.Streamer.supervisor()] [Pleroma.Web.Streamer.supervisor()]
end end
defp oauth_cleanup_child(true),
do: [Pleroma.Web.OAuth.Token.CleanWorker]
defp oauth_cleanup_child(_), do: []
defp chat_child(_env, true) do defp chat_child(_env, true) do
[Pleroma.Web.ChatChannel.ChatChannelState] [Pleroma.Web.ChatChannel.ChatChannelState]
end end

View File

@ -1,66 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Daemons.ActivityExpirationDaemon do
alias Pleroma.Activity
alias Pleroma.ActivityExpiration
alias Pleroma.Config
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Web.CommonAPI
require Logger
use GenServer
import Ecto.Query
@schedule_interval :timer.minutes(1)
def start_link(_) do
GenServer.start_link(__MODULE__, nil)
end
@impl true
def init(_) do
if Config.get([ActivityExpiration, :enabled]) do
schedule_next()
{:ok, nil}
else
:ignore
end
end
def perform(:execute, expiration_id) do
try do
expiration =
ActivityExpiration
|> where([e], e.id == ^expiration_id)
|> Repo.one!()
activity = Activity.get_by_id_with_object(expiration.activity_id)
user = User.get_by_ap_id(activity.object.data["actor"])
CommonAPI.delete(activity.id, user)
rescue
error ->
Logger.error("#{__MODULE__} Couldn't delete expired activity: #{inspect(error)}")
end
end
@impl true
def handle_info(:perform, state) do
ActivityExpiration.due_expirations(@schedule_interval)
|> Enum.each(fn expiration ->
Pleroma.Workers.ActivityExpirationWorker.enqueue(
"activity_expiration",
%{"activity_expiration_id" => expiration.id}
)
end)
schedule_next()
{:noreply, state}
end
defp schedule_next do
Process.send_after(self(), :perform, @schedule_interval)
end
end

View File

@ -1,42 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Daemons.DigestEmailDaemon do
alias Pleroma.Repo
alias Pleroma.Workers.DigestEmailsWorker
import Ecto.Query
def perform do
config = Pleroma.Config.get([:email_notifications, :digest])
negative_interval = -Map.fetch!(config, :interval)
inactivity_threshold = Map.fetch!(config, :inactivity_threshold)
inactive_users_query = Pleroma.User.list_inactive_users_query(inactivity_threshold)
now = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
from(u in inactive_users_query,
where: fragment(~s(? ->'digest' @> 'true'), u.email_notifications),
where: u.last_digest_emailed_at < datetime_add(^now, ^negative_interval, "day"),
select: u
)
|> Repo.all()
|> Enum.each(fn user ->
DigestEmailsWorker.enqueue("digest_email", %{"user_id" => user.id})
end)
end
@doc """
Send digest email to the given user.
Updates `last_digest_emailed_at` field for the user and returns the updated user.
"""
@spec perform(Pleroma.User.t()) :: Pleroma.User.t()
def perform(user) do
with %Swoosh.Email{} = email <- Pleroma.Emails.UserEmail.digest_email(user) do
Pleroma.Emails.Mailer.deliver_async(email)
end
Pleroma.User.touch_last_digest_emailed_at(user)
end
end

View File

@ -1,62 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Daemons.ScheduledActivityDaemon do
@moduledoc """
Sends scheduled activities to the job queue.
"""
alias Pleroma.Config
alias Pleroma.ScheduledActivity
alias Pleroma.User
alias Pleroma.Web.CommonAPI
use GenServer
require Logger
@schedule_interval :timer.minutes(1)
def start_link(_) do
GenServer.start_link(__MODULE__, nil)
end
def init(_) do
if Config.get([ScheduledActivity, :enabled]) do
schedule_next()
{:ok, nil}
else
:ignore
end
end
def perform(:execute, scheduled_activity_id) do
try do
{:ok, scheduled_activity} = ScheduledActivity.delete(scheduled_activity_id)
%User{} = user = User.get_cached_by_id(scheduled_activity.user_id)
{:ok, _result} = CommonAPI.post(user, scheduled_activity.params)
rescue
error ->
Logger.error(
"#{__MODULE__} Couldn't create a status from the scheduled activity: #{inspect(error)}"
)
end
end
def handle_info(:perform, state) do
ScheduledActivity.due_activities(@schedule_interval)
|> Enum.each(fn scheduled_activity ->
Pleroma.Workers.ScheduledActivityWorker.enqueue(
"execute",
%{"activity_id" => scheduled_activity.id}
)
end)
schedule_next()
{:noreply, state}
end
defp schedule_next do
Process.send_after(self(), :perform, @schedule_interval)
end
end

View File

@ -13,7 +13,7 @@ def list_modules_in_dir(dir, start) do
|> Enum.filter(&String.ends_with?(&1, ".ex")) |> Enum.filter(&String.ends_with?(&1, ".ex"))
|> Enum.map(fn filename -> |> Enum.map(fn filename ->
module = filename |> String.trim_trailing(".ex") |> Macro.camelize() module = filename |> String.trim_trailing(".ex") |> Macro.camelize()
String.to_existing_atom(start <> module) String.to_atom(start <> module)
end) end)
end end
end end

View File

@ -58,8 +58,8 @@ def follow(%User{} = follower, %User{} = following, state \\ "accept") do
def unfollow(%User{} = follower, %User{} = following) do def unfollow(%User{} = follower, %User{} = following) do
case get(follower, following) do case get(follower, following) do
nil -> {:ok, nil}
%__MODULE__{} = following_relationship -> Repo.delete(following_relationship) %__MODULE__{} = following_relationship -> Repo.delete(following_relationship)
_ -> {:ok, nil}
end end
end end

View File

@ -1,21 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Plugs.Parsers do
@moduledoc "Initializes Plug.Parsers with upload limit set at boot time"
@behaviour Plug
def init(_opts) do
Plug.Parsers.init(
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Jason,
length: Pleroma.Config.get([:instance, :upload_limit]),
body_reader: {Pleroma.Web.Plugs.DigestPlug, :read_body, []}
)
end
defdelegate call(conn, opts), to: Plug.Parsers
end

View File

@ -5,15 +5,19 @@
defmodule Pleroma.ScheduledActivity do defmodule Pleroma.ScheduledActivity do
use Ecto.Schema use Ecto.Schema
alias Ecto.Multi
alias Pleroma.Config alias Pleroma.Config
alias Pleroma.Repo alias Pleroma.Repo
alias Pleroma.ScheduledActivity alias Pleroma.ScheduledActivity
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.CommonAPI.Utils alias Pleroma.Web.CommonAPI.Utils
alias Pleroma.Workers.ScheduledActivityWorker
import Ecto.Query import Ecto.Query
import Ecto.Changeset import Ecto.Changeset
@type t :: %__MODULE__{}
@min_offset :timer.minutes(5) @min_offset :timer.minutes(5)
schema "scheduled_activities" do schema "scheduled_activities" do
@ -105,16 +109,32 @@ def far_enough?(scheduled_at) do
end end
def new(%User{} = user, attrs) do def new(%User{} = user, attrs) do
%ScheduledActivity{user_id: user.id} changeset(%ScheduledActivity{user_id: user.id}, attrs)
|> changeset(attrs)
end end
@doc """
Creates ScheduledActivity and add to queue to perform at scheduled_at date
"""
@spec create(User.t(), map()) :: {:ok, ScheduledActivity.t()} | {:error, Ecto.Changeset.t()}
def create(%User{} = user, attrs) do def create(%User{} = user, attrs) do
user Multi.new()
|> new(attrs) |> Multi.insert(:scheduled_activity, new(user, attrs))
|> Repo.insert() |> maybe_add_jobs(Config.get([ScheduledActivity, :enabled]))
|> Repo.transaction()
|> transaction_response
end end
defp maybe_add_jobs(multi, true) do
multi
|> Multi.run(:scheduled_activity_job, fn _repo, %{scheduled_activity: activity} ->
%{activity_id: activity.id}
|> ScheduledActivityWorker.new(scheduled_at: activity.scheduled_at)
|> Oban.insert()
end)
end
defp maybe_add_jobs(multi, _), do: multi
def get(%User{} = user, scheduled_activity_id) do def get(%User{} = user, scheduled_activity_id) do
ScheduledActivity ScheduledActivity
|> where(user_id: ^user.id) |> where(user_id: ^user.id)
@ -122,25 +142,43 @@ def get(%User{} = user, scheduled_activity_id) do
|> Repo.one() |> Repo.one()
end end
def update(%ScheduledActivity{} = scheduled_activity, attrs) do @spec update(ScheduledActivity.t(), map()) ::
scheduled_activity {:ok, ScheduledActivity.t()} | {:error, Ecto.Changeset.t()}
|> update_changeset(attrs) def update(%ScheduledActivity{id: id} = scheduled_activity, attrs) do
|> Repo.update() with {:error, %Ecto.Changeset{valid?: true} = changeset} <-
{:error, update_changeset(scheduled_activity, attrs)} do
Multi.new()
|> Multi.update(:scheduled_activity, changeset)
|> Multi.update_all(:scheduled_job, job_query(id),
set: [scheduled_at: get_field(changeset, :scheduled_at)]
)
|> Repo.transaction()
|> transaction_response
end
end end
def delete(%ScheduledActivity{} = scheduled_activity) do @doc "Deletes a ScheduledActivity and linked jobs."
scheduled_activity @spec delete(ScheduledActivity.t() | binary() | integer) ::
|> Repo.delete() {:ok, ScheduledActivity.t()} | {:error, Ecto.Changeset.t()}
def delete(%ScheduledActivity{id: id} = scheduled_activity) do
Multi.new()
|> Multi.delete(:scheduled_activity, scheduled_activity, stale_error_field: :id)
|> Multi.delete_all(:jobs, job_query(id))
|> Repo.transaction()
|> transaction_response
end end
def delete(id) when is_binary(id) or is_integer(id) do def delete(id) when is_binary(id) or is_integer(id) do
ScheduledActivity delete(%__MODULE__{id: id})
|> where(id: ^id) end
|> select([sa], sa)
|> Repo.delete_all() defp transaction_response(result) do
|> case do case result do
{1, [scheduled_activity]} -> {:ok, scheduled_activity} {:ok, %{scheduled_activity: scheduled_activity}} ->
_ -> :error {:ok, scheduled_activity}
{:error, _, changeset, _} ->
{:error, changeset}
end end
end end
@ -158,4 +196,11 @@ def due_activities(offset \\ 0) do
|> where([sa], sa.scheduled_at < ^naive_datetime) |> where([sa], sa.scheduled_at < ^naive_datetime)
|> Repo.all() |> Repo.all()
end end
def job_query(scheduled_activity_id) do
from(j in Oban.Job,
where: j.queue == "scheduled_activities",
where: fragment("args ->> 'activity_id' = ?::text", ^to_string(scheduled_activity_id))
)
end
end end

View File

@ -1,7 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Scheduler do
use Quantum.Scheduler, otp_app: :pleroma
end

View File

@ -9,22 +9,43 @@ defmodule Pleroma.Stats do
use GenServer use GenServer
@interval 1000 * 60 * 60 @init_state %{
peers: [],
stats: %{
domain_count: 0,
status_count: 0,
user_count: 0
}
}
def start_link(_) do def start_link(_) do
GenServer.start_link(__MODULE__, initial_data(), name: __MODULE__) GenServer.start_link(
__MODULE__,
@init_state,
name: __MODULE__
)
end end
@doc "Performs update stats"
def force_update do def force_update do
GenServer.call(__MODULE__, :force_update) GenServer.call(__MODULE__, :force_update)
end end
@doc "Performs collect stats"
def do_collect do
GenServer.cast(__MODULE__, :run_update)
end
@doc "Returns stats data"
@spec get_stats() :: %{domain_count: integer(), status_count: integer(), user_count: integer()}
def get_stats do def get_stats do
%{stats: stats} = GenServer.call(__MODULE__, :get_state) %{stats: stats} = GenServer.call(__MODULE__, :get_state)
stats stats
end end
@doc "Returns list peers"
@spec get_peers() :: list(String.t())
def get_peers do def get_peers do
%{peers: peers} = GenServer.call(__MODULE__, :get_state) %{peers: peers} = GenServer.call(__MODULE__, :get_state)
@ -32,7 +53,6 @@ def get_peers do
end end
def init(args) do def init(args) do
Process.send(self(), :run_update, [])
{:ok, args} {:ok, args}
end end
@ -45,17 +65,12 @@ def handle_call(:get_state, _from, state) do
{:reply, state, state} {:reply, state, state}
end end
def handle_info(:run_update, _state) do def handle_cast(:run_update, _state) do
new_stats = get_stat_data() new_stats = get_stat_data()
Process.send_after(self(), :run_update, @interval)
{:noreply, new_stats} {:noreply, new_stats}
end end
defp initial_data do
%{peers: [], stats: %{}}
end
defp get_stat_data do defp get_stat_data do
peers = peers =
from( from(
@ -74,7 +89,11 @@ defp get_stat_data do
%{ %{
peers: peers, peers: peers,
stats: %{domain_count: domain_count, status_count: status_count, user_count: user_count} stats: %{
domain_count: domain_count,
status_count: status_count,
user_count: user_count
}
} }
end end
end end

View File

@ -647,10 +647,14 @@ def follow(%User{} = follower, %User{} = followed, state \\ "accept") do
end end
end end
def unfollow(%User{} = follower, %User{} = followed) do def unfollow(%User{ap_id: ap_id}, %User{ap_id: ap_id}) do
if following?(follower, followed) and follower.ap_id != followed.ap_id do {:error, "Not subscribed!"}
FollowingRelationship.unfollow(follower, followed) end
def unfollow(%User{} = follower, %User{} = followed) do
case get_follow_state(follower, followed) do
state when state in ["accept", "pending"] ->
FollowingRelationship.unfollow(follower, followed)
{:ok, followed} = update_follower_count(followed) {:ok, followed} = update_follower_count(followed)
{:ok, follower} = {:ok, follower} =
@ -659,13 +663,32 @@ def unfollow(%User{} = follower, %User{} = followed) do
|> set_cache() |> set_cache()
{:ok, follower, Utils.fetch_latest_follow(follower, followed)} {:ok, follower, Utils.fetch_latest_follow(follower, followed)}
else
nil ->
{:error, "Not subscribed!"} {:error, "Not subscribed!"}
end end
end end
defdelegate following?(follower, followed), to: FollowingRelationship defdelegate following?(follower, followed), to: FollowingRelationship
def get_follow_state(%User{} = follower, %User{} = following) do
following_relationship = FollowingRelationship.get(follower, following)
case {following_relationship, following.local} do
{nil, false} ->
case Utils.fetch_latest_follow(follower, following) do
%{data: %{"state" => state}} when state in ["pending", "accept"] -> state
_ -> nil
end
{%{state: state}, _} ->
state
{nil, _} ->
nil
end
end
def locked?(%User{} = user) do def locked?(%User{} = user) do
user.locked || false user.locked || false
end end

View File

@ -490,6 +490,15 @@ def fetch_latest_follow(%User{ap_id: follower_id}, %User{ap_id: followed_id}) do
|> Repo.one() |> Repo.one()
end end
def fetch_latest_undo(%User{ap_id: ap_id}) do
"Undo"
|> Activity.Queries.by_type()
|> where(actor: ^ap_id)
|> order_by([activity], fragment("? desc nulls last", activity.id))
|> limit(1)
|> Repo.one()
end
def get_latest_reaction(internal_activity_id, %{ap_id: ap_id}, emoji) do def get_latest_reaction(internal_activity_id, %{ap_id: ap_id}, emoji) do
%{data: %{"object" => object_ap_id}} = Activity.get_by_id(internal_activity_id) %{data: %{"object" => object_ap_id}} = Activity.get_by_id(internal_activity_id)

View File

@ -179,9 +179,9 @@ def make_poll_data(%{"poll" => %{"options" => options, "expires_in" => expires_i
end) end)
end_time = end_time =
NaiveDateTime.utc_now() DateTime.utc_now()
|> NaiveDateTime.add(expires_in) |> DateTime.add(expires_in)
|> NaiveDateTime.to_iso8601() |> DateTime.to_iso8601()
key = if truthy_param?(data["poll"]["multiple"]), do: "anyOf", else: "oneOf" key = if truthy_param?(data["poll"]["multiple"]), do: "anyOf", else: "oneOf"
poll = %{"type" => "Question", key => option_notes, "closed" => end_time} poll = %{"type" => "Question", key => option_notes, "closed" => end_time}

View File

@ -61,7 +61,17 @@ defmodule Pleroma.Web.Endpoint do
plug(Plug.RequestId) plug(Plug.RequestId)
plug(Plug.Logger, log: :debug) plug(Plug.Logger, log: :debug)
plug(Pleroma.Plugs.Parsers) plug(Plug.Parsers,
parsers: [
:urlencoded,
{:multipart, length: {Pleroma.Config, :get, [[:instance, :upload_limit]]}},
:json
],
pass: ["*/*"],
json_decoder: Jason,
length: Pleroma.Config.get([:instance, :upload_limit]),
body_reader: {Pleroma.Web.Plugs.DigestPlug, :read_body, []}
)
plug(Plug.MethodOverride) plug(Plug.MethodOverride)
plug(Plug.Head) plug(Plug.Head)

View File

@ -124,15 +124,18 @@ def create(
) do ) do
params = Map.put(params, "in_reply_to_status_id", params["in_reply_to_id"]) params = Map.put(params, "in_reply_to_status_id", params["in_reply_to_id"])
if ScheduledActivity.far_enough?(scheduled_at) do with {:far_enough, true} <- {:far_enough, ScheduledActivity.far_enough?(scheduled_at)},
with {:ok, scheduled_activity} <- attrs <- %{"params" => params, "scheduled_at" => scheduled_at},
ScheduledActivity.create(user, %{"params" => params, "scheduled_at" => scheduled_at}) do {:ok, scheduled_activity} <- ScheduledActivity.create(user, attrs) do
conn conn
|> put_view(ScheduledActivityView) |> put_view(ScheduledActivityView)
|> render("show.json", scheduled_activity: scheduled_activity) |> render("show.json", scheduled_activity: scheduled_activity)
end
else else
{:far_enough, _} ->
create(conn, Map.drop(params, ["scheduled_at"])) create(conn, Map.drop(params, ["scheduled_at"]))
error ->
error
end end
end end

View File

@ -1,34 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.OAuth.Token.CleanWorker do
@moduledoc """
The module represents functions to clean an expired oauth tokens.
"""
use GenServer
@ten_seconds 10_000
@one_day 86_400_000
alias Pleroma.Web.OAuth.Token
alias Pleroma.Workers.BackgroundWorker
def start_link(_), do: GenServer.start_link(__MODULE__, %{})
def init(_) do
Process.send_after(self(), :perform, @ten_seconds)
{:ok, nil}
end
@doc false
def handle_info(:perform, state) do
BackgroundWorker.enqueue("clean_expired_tokens", %{})
interval = Pleroma.Config.get([:oauth2, :clean_expired_tokens_interval], @one_day)
Process.send_after(self(), :perform, interval)
{:noreply, state}
end
def perform(:clean), do: Token.delete_expired_tokens()
end

View File

@ -1,18 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.ActivityExpirationWorker do
use Pleroma.Workers.WorkerHelper, queue: "activity_expiration"
@impl Oban.Worker
def perform(
%{
"op" => "activity_expiration",
"activity_expiration_id" => activity_expiration_id
},
_job
) do
Pleroma.Daemons.ActivityExpirationDaemon.perform(:execute, activity_expiration_id)
end
end

View File

@ -6,7 +6,6 @@ defmodule Pleroma.Workers.BackgroundWorker do
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
alias Pleroma.Web.OAuth.Token.CleanWorker
use Pleroma.Workers.WorkerHelper, queue: "background" use Pleroma.Workers.WorkerHelper, queue: "background"
@ -55,10 +54,6 @@ def perform(
User.perform(:follow_import, follower, followed_identifiers) User.perform(:follow_import, follower, followed_identifiers)
end end
def perform(%{"op" => "clean_expired_tokens"}, _job) do
CleanWorker.perform(:clean)
end
def perform(%{"op" => "media_proxy_preload", "message" => message}, _job) do def perform(%{"op" => "media_proxy_preload", "message" => message}, _job) do
MediaProxyWarmingPolicy.perform(:preload, message) MediaProxyWarmingPolicy.perform(:preload, message)
end end

View File

@ -0,0 +1,21 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.Cron.ClearOauthTokenWorker do
@moduledoc """
The worker to cleanup expired oAuth tokens.
"""
use Oban.Worker, queue: "background"
alias Pleroma.Config
alias Pleroma.Web.OAuth.Token
@impl Oban.Worker
def perform(_opts, _job) do
if Config.get([:oauth2, :clean_expired_tokens], false) do
Token.delete_expired_tokens()
end
end
end

View File

@ -0,0 +1,58 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.Cron.DigestEmailsWorker do
@moduledoc """
The worker to send digest emails.
"""
use Oban.Worker, queue: "digest_emails"
alias Pleroma.Config
alias Pleroma.Emails
alias Pleroma.Repo
alias Pleroma.User
import Ecto.Query
require Logger
@impl Oban.Worker
def perform(_opts, _job) do
config = Config.get([:email_notifications, :digest])
if config[:active] do
negative_interval = -Map.fetch!(config, :interval)
inactivity_threshold = Map.fetch!(config, :inactivity_threshold)
inactive_users_query = User.list_inactive_users_query(inactivity_threshold)
now = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
from(u in inactive_users_query,
where: fragment(~s(? ->'digest' @> 'true'), u.email_notifications),
where: u.last_digest_emailed_at < datetime_add(^now, ^negative_interval, "day"),
select: u
)
|> Repo.all()
|> send_emails
end
end
def send_emails(users) do
Enum.each(users, &send_email/1)
end
@doc """
Send digest email to the given user.
Updates `last_digest_emailed_at` field for the user and returns the updated user.
"""
@spec send_email(User.t()) :: User.t()
def send_email(user) do
with %Swoosh.Email{} = email <- Emails.UserEmail.digest_email(user) do
Emails.Mailer.deliver_async(email)
end
User.touch_last_digest_emailed_at(user)
end
end

View File

@ -0,0 +1,46 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker do
@moduledoc """
The worker to purge expired activities.
"""
use Oban.Worker, queue: "activity_expiration"
alias Pleroma.Activity
alias Pleroma.ActivityExpiration
alias Pleroma.Config
alias Pleroma.User
alias Pleroma.Web.CommonAPI
require Logger
@interval :timer.minutes(1)
@impl Oban.Worker
def perform(_opts, _job) do
if Config.get([ActivityExpiration, :enabled]) do
Enum.each(ActivityExpiration.due_expirations(@interval), &delete_activity/1)
end
end
def delete_activity(%ActivityExpiration{activity_id: activity_id}) do
with {:activity, %Activity{} = activity} <-
{:activity, Activity.get_by_id_with_object(activity_id)},
{:user, %User{} = user} <- {:user, User.get_by_ap_id(activity.object.data["actor"])} do
CommonAPI.delete(activity.id, user)
else
{:activity, _} ->
Logger.error(
"#{__MODULE__} Couldn't delete expired activity: not found activity ##{activity_id}"
)
{:user, _} ->
Logger.error(
"#{__MODULE__} Couldn't delete expired activity: not found actorof ##{activity_id}"
)
end
end
end

View File

@ -0,0 +1,16 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.Cron.StatsWorker do
@moduledoc """
The worker to update peers statistics.
"""
use Oban.Worker, queue: "background"
@impl Oban.Worker
def perform(_opts, _job) do
Pleroma.Stats.do_collect()
end
end

View File

@ -1,16 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.DigestEmailsWorker do
alias Pleroma.User
use Pleroma.Workers.WorkerHelper, queue: "digest_emails"
@impl Oban.Worker
def perform(%{"op" => "digest_email", "user_id" => user_id}, _job) do
user_id
|> User.get_cached_by_id()
|> Pleroma.Daemons.DigestEmailDaemon.perform()
end
end

View File

@ -3,10 +3,42 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.ScheduledActivityWorker do defmodule Pleroma.Workers.ScheduledActivityWorker do
@moduledoc """
The worker to post scheduled activity.
"""
use Pleroma.Workers.WorkerHelper, queue: "scheduled_activities" use Pleroma.Workers.WorkerHelper, queue: "scheduled_activities"
alias Pleroma.Config
alias Pleroma.ScheduledActivity
alias Pleroma.User
alias Pleroma.Web.CommonAPI
require Logger
@impl Oban.Worker @impl Oban.Worker
def perform(%{"op" => "execute", "activity_id" => activity_id}, _job) do def perform(%{"activity_id" => activity_id}, _job) do
Pleroma.Daemons.ScheduledActivityDaemon.perform(:execute, activity_id) if Config.get([ScheduledActivity, :enabled]) do
case Pleroma.Repo.get(ScheduledActivity, activity_id) do
%ScheduledActivity{} = scheduled_activity ->
post_activity(scheduled_activity)
_ ->
Logger.error("#{__MODULE__} Couldn't find scheduled activity: #{activity_id}")
end
end
end
defp post_activity(%ScheduledActivity{user_id: user_id, params: params} = scheduled_activity) do
with {:delete, {:ok, _}} <- {:delete, ScheduledActivity.delete(scheduled_activity)},
{:user, %User{} = user} <- {:user, User.get_cached_by_id(user_id)},
{:post, {:ok, _}} <- {:post, CommonAPI.post(user, params)} do
:ok
else
error ->
Logger.error(
"#{__MODULE__} Couldn't create a status from the scheduled activity: #{inspect(error)}"
)
end
end end
end end

View File

@ -63,7 +63,7 @@ def copy_nginx_config(%{path: target_path} = release) do
def application do def application do
[ [
mod: {Pleroma.Application, []}, mod: {Pleroma.Application, []},
extra_applications: [:logger, :runtime_tools, :comeonin, :quack, :fast_sanitize, :swarm], extra_applications: [:logger, :runtime_tools, :comeonin, :quack, :fast_sanitize],
included_applications: [:ex_syslogger] included_applications: [:ex_syslogger]
] ]
end end
@ -108,8 +108,7 @@ defp deps do
{:ecto_enum, "~> 1.4"}, {:ecto_enum, "~> 1.4"},
{:ecto_sql, "~> 3.3.2"}, {:ecto_sql, "~> 3.3.2"},
{:postgrex, ">= 0.13.5"}, {:postgrex, ">= 0.13.5"},
{:oban, "~> 0.12.0"}, {:oban, "~> 0.12.1"},
{:quantum, "~> 2.3"},
{:gettext, "~> 0.15"}, {:gettext, "~> 0.15"},
{:comeonin, "~> 4.1.1"}, {:comeonin, "~> 4.1.1"},
{:pbkdf2_elixir, "~> 0.12.3"}, {:pbkdf2_elixir, "~> 0.12.3"},
@ -163,7 +162,7 @@ defp deps do
{:esshd, "~> 0.1.0", runtime: Application.get_env(:esshd, :enabled, false)}, {:esshd, "~> 0.1.0", runtime: Application.get_env(:esshd, :enabled, false)},
{:ex_const, "~> 0.2"}, {:ex_const, "~> 0.2"},
{:plug_static_index_html, "~> 1.0.0"}, {:plug_static_index_html, "~> 1.0.0"},
{:excoveralls, "~> 0.11.1", only: :test}, {:excoveralls, "~> 0.12.1", only: :test},
{:flake_id, "~> 0.1.0"}, {:flake_id, "~> 0.1.0"},
{:remote_ip, {:remote_ip,
git: "https://git.pleroma.social/pleroma/remote_ip.git", git: "https://git.pleroma.social/pleroma/remote_ip.git",

View File

@ -36,9 +36,9 @@
"ex_doc": {:hex, :ex_doc, "0.21.2", "caca5bc28ed7b3bdc0b662f8afe2bee1eedb5c3cf7b322feeeb7c6ebbde089d6", [:mix], [{:earmark, "~> 1.3.3 or ~> 1.4", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"}, "ex_doc": {:hex, :ex_doc, "0.21.2", "caca5bc28ed7b3bdc0b662f8afe2bee1eedb5c3cf7b322feeeb7c6ebbde089d6", [:mix], [{:earmark, "~> 1.3.3 or ~> 1.4", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"},
"ex_machina": {:hex, :ex_machina, "2.3.0", "92a5ad0a8b10ea6314b876a99c8c9e3f25f4dde71a2a835845b136b9adaf199a", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm"}, "ex_machina": {:hex, :ex_machina, "2.3.0", "92a5ad0a8b10ea6314b876a99c8c9e3f25f4dde71a2a835845b136b9adaf199a", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm"},
"ex_syslogger": {:git, "https://github.com/slashmili/ex_syslogger.git", "f3963399047af17e038897c69e20d552e6899e1d", [tag: "1.4.0"]}, "ex_syslogger": {:git, "https://github.com/slashmili/ex_syslogger.git", "f3963399047af17e038897c69e20d552e6899e1d", [tag: "1.4.0"]},
"excoveralls": {:hex, :excoveralls, "0.11.2", "0c6f2c8db7683b0caa9d490fb8125709c54580b4255ffa7ad35f3264b075a643", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"}, "excoveralls": {:hex, :excoveralls, "0.12.1", "a553c59f6850d0aff3770e4729515762ba7c8e41eedde03208182a8dc9d0ce07", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
"fast_html": {:hex, :fast_html, "1.0.1", "5bc7df4dc4607ec2c314c16414e4111d79a209956c4f5df96602d194c61197f9", [:make, :mix], [], "hexpm"}, "fast_html": {:hex, :fast_html, "0.99.4", "d80812664f0429607e1d880fba0ef04da87a2e4fa596701bcaae17953535695c", [:make, :mix], [], "hexpm"},
"fast_sanitize": {:hex, :fast_sanitize, "0.1.6", "60a5ae96879956dea409a91a77f5dd2994c24cc10f80eefd8f9892ee4c0c7b25", [:mix], [{:fast_html, "~> 1.0", [hex: :fast_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, "fast_sanitize": {:hex, :fast_sanitize, "0.1.4", "6c2e7203ca2f8275527a3021ba6e9d5d4ee213a47dc214a97c128737c9e56df1", [:mix], [{:fast_html, "~> 0.99", [hex: :fast_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
"flake_id": {:hex, :flake_id, "0.1.0", "7716b086d2e405d09b647121a166498a0d93d1a623bead243e1f74216079ccb3", [:mix], [{:base62, "~> 1.2", [hex: :base62, repo: "hexpm", optional: false]}, {:ecto, ">= 2.0.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"}, "flake_id": {:hex, :flake_id, "0.1.0", "7716b086d2e405d09b647121a166498a0d93d1a623bead243e1f74216079ccb3", [:mix], [{:base62, "~> 1.2", [hex: :base62, repo: "hexpm", optional: false]}, {:ecto, ">= 2.0.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
"floki": {:hex, :floki, "0.23.1", "e100306ce7d8841d70a559748e5091542e2cfc67ffb3ade92b89a8435034dab1", [:mix], [{:html_entities, "~> 0.5.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm"}, "floki": {:hex, :floki, "0.23.1", "e100306ce7d8841d70a559748e5091542e2cfc67ffb3ade92b89a8435034dab1", [:mix], [{:html_entities, "~> 0.5.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm"},
"gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm"}, "gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm"},
@ -77,7 +77,7 @@
"phoenix_html": {:hex, :phoenix_html, "2.13.3", "850e292ff6e204257f5f9c4c54a8cb1f6fbc16ed53d360c2b780a3d0ba333867", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, "phoenix_html": {:hex, :phoenix_html, "2.13.3", "850e292ff6e204257f5f9c4c54a8cb1f6fbc16ed53d360c2b780a3d0ba333867", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
"phoenix_pubsub": {:hex, :phoenix_pubsub, "1.1.2", "496c303bdf1b2e98a9d26e89af5bba3ab487ba3a3735f74bf1f4064d2a845a3e", [:mix], [], "hexpm"}, "phoenix_pubsub": {:hex, :phoenix_pubsub, "1.1.2", "496c303bdf1b2e98a9d26e89af5bba3ab487ba3a3735f74bf1f4064d2a845a3e", [:mix], [], "hexpm"},
"phoenix_swoosh": {:hex, :phoenix_swoosh, "0.2.0", "a7e0b32077cd6d2323ae15198839b05d9caddfa20663fd85787479e81f89520e", [:mix], [{:phoenix, "~> 1.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.2", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:swoosh, "~> 0.1", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm"}, "phoenix_swoosh": {:hex, :phoenix_swoosh, "0.2.0", "a7e0b32077cd6d2323ae15198839b05d9caddfa20663fd85787479e81f89520e", [:mix], [{:phoenix, "~> 1.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.2", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:swoosh, "~> 0.1", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm"},
"plug": {:hex, :plug, "1.8.3", "12d5f9796dc72e8ac9614e94bda5e51c4c028d0d428e9297650d09e15a684478", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm"}, "plug": {:hex, :plug, "1.9.0", "8d7c4e26962283ff9f8f3347bd73838e2413fbc38b7bb5467d5924f68f3a5a4a", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm"},
"plug_cowboy": {:hex, :plug_cowboy, "2.1.0", "b75768153c3a8a9e8039d4b25bb9b14efbc58e9c4a6e6a270abff1cd30cbe320", [:mix], [{:cowboy, "~> 2.5", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, "plug_cowboy": {:hex, :plug_cowboy, "2.1.0", "b75768153c3a8a9e8039d4b25bb9b14efbc58e9c4a6e6a270abff1cd30cbe320", [:mix], [{:cowboy, "~> 2.5", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"}, "plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, "plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
@ -90,12 +90,10 @@
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.3.0", "c4b527e0b3a9ef1af26bdcfbfad3998f37795b9185d475ca610fe4388fdd3bb5", [:mix], [{:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"}, "prometheus_phoenix": {:hex, :prometheus_phoenix, "1.3.0", "c4b527e0b3a9ef1af26bdcfbfad3998f37795b9185d475ca610fe4388fdd3bb5", [:mix], [{:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"}, "prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"}, "quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
"quantum": {:hex, :quantum, "2.3.4", "72a0e8855e2adc101459eac8454787cb74ab4169de6ca50f670e72142d4960e9", [:mix], [{:calendar, "~> 0.17", [hex: :calendar, repo: "hexpm", optional: true]}, {:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}, {:gen_stage, "~> 0.12", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:swarm, "~> 3.3", [hex: :swarm, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: true]}], "hexpm"},
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"}, "ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
"recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]}, "recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]},
"remote_ip": {:git, "https://git.pleroma.social/pleroma/remote_ip.git", "825dc00aaba5a1b7c4202a532b696b595dd3bcb3", [ref: "825dc00aaba5a1b7c4202a532b696b595dd3bcb3"]}, "remote_ip": {:git, "https://git.pleroma.social/pleroma/remote_ip.git", "825dc00aaba5a1b7c4202a532b696b595dd3bcb3", [ref: "825dc00aaba5a1b7c4202a532b696b595dd3bcb3"]},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.5", "6eaf7ad16cb568bb01753dbbd7a95ff8b91c7979482b95f38443fe2c8852a79b", [:make, :mix, :rebar3], [], "hexpm"}, "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.5", "6eaf7ad16cb568bb01753dbbd7a95ff8b91c7979482b95f38443fe2c8852a79b", [:make, :mix, :rebar3], [], "hexpm"},
"swarm": {:hex, :swarm, "3.4.0", "64f8b30055d74640d2186c66354b33b999438692a91be275bb89cdc7e401f448", [:mix], [{:gen_state_machine, "~> 2.0", [hex: :gen_state_machine, repo: "hexpm", optional: false]}, {:libring, "~> 1.0", [hex: :libring, repo: "hexpm", optional: false]}], "hexpm"},
"sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm"}, "sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm"},
"swoosh": {:hex, :swoosh, "0.23.5", "bfd9404bbf5069b1be2ffd317923ce57e58b332e25dbca2a35dedd7820dfee5a", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm"}, "swoosh": {:hex, :swoosh, "0.23.5", "bfd9404bbf5069b1be2ffd317923ce57e58b332e25dbca2a35dedd7820dfee5a", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm"},
"syslog": {:git, "https://github.com/Vagabond/erlang-syslog.git", "4a6c6f2c996483e86c1320e9553f91d337bcb6aa", [tag: "1.0.5"]}, "syslog": {:git, "https://github.com/Vagabond/erlang-syslog.git", "4a6c6f2c996483e86c1320e9553f91d337bcb6aa", [tag: "1.0.5"]},

View File

@ -7,6 +7,8 @@ defmodule Pleroma.ActivityExpirationTest do
alias Pleroma.ActivityExpiration alias Pleroma.ActivityExpiration
import Pleroma.Factory import Pleroma.Factory
clear_config([ActivityExpiration, :enabled])
test "finds activities due to be deleted only" do test "finds activities due to be deleted only" do
activity = insert(:note_activity) activity = insert(:note_activity)
expiration_due = insert(:expiration_in_the_past, %{activity_id: activity.id}) expiration_due = insert(:expiration_in_the_past, %{activity_id: activity.id})
@ -24,4 +26,27 @@ test "denies expirations that don't live long enough" do
now = NaiveDateTime.utc_now() now = NaiveDateTime.utc_now()
assert {:error, _} = ActivityExpiration.create(activity, now) assert {:error, _} = ActivityExpiration.create(activity, now)
end end
test "deletes an expiration activity" do
Pleroma.Config.put([ActivityExpiration, :enabled], true)
activity = insert(:note_activity)
naive_datetime =
NaiveDateTime.add(
NaiveDateTime.utc_now(),
-:timer.minutes(2),
:millisecond
)
expiration =
insert(
:expiration_in_the_past,
%{activity_id: activity.id, scheduled_at: naive_datetime}
)
Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker.perform(:ops, :pid)
refute Pleroma.Repo.get(Pleroma.Activity, activity.id)
refute Pleroma.Repo.get(Pleroma.ActivityExpiration, expiration.id)
end
end end

View File

@ -1,17 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ActivityExpirationWorkerTest do
use Pleroma.DataCase
alias Pleroma.Activity
import Pleroma.Factory
test "deletes an activity" do
activity = insert(:note_activity)
expiration = insert(:expiration_in_the_past, %{activity_id: activity.id})
Pleroma.Daemons.ActivityExpirationDaemon.perform(:execute, expiration.id)
refute Repo.get(Activity, activity.id)
end
end

View File

@ -1,19 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ScheduledActivityDaemonTest do
use Pleroma.DataCase
alias Pleroma.ScheduledActivity
import Pleroma.Factory
test "creates a status from the scheduled activity" do
user = insert(:user)
scheduled_activity = insert(:scheduled_activity, user: user, params: %{status: "hi"})
Pleroma.Daemons.ScheduledActivityDaemon.perform(:execute, scheduled_activity.id)
refute Repo.get(ScheduledActivity, scheduled_activity.id)
activity = Repo.all(Pleroma.Activity) |> Enum.find(&(&1.actor == user.ap_id))
assert Pleroma.Object.normalize(activity).data["content"] == "hi"
end
end

View File

@ -8,11 +8,51 @@ defmodule Pleroma.ScheduledActivityTest do
alias Pleroma.ScheduledActivity alias Pleroma.ScheduledActivity
import Pleroma.Factory import Pleroma.Factory
clear_config([ScheduledActivity, :enabled])
setup context do setup context do
DataCase.ensure_local_uploader(context) DataCase.ensure_local_uploader(context)
end end
describe "creation" do describe "creation" do
test "scheduled activities with jobs when ScheduledActivity enabled" do
Pleroma.Config.put([ScheduledActivity, :enabled], true)
user = insert(:user)
today =
NaiveDateTime.utc_now()
|> NaiveDateTime.add(:timer.minutes(6), :millisecond)
|> NaiveDateTime.to_iso8601()
attrs = %{params: %{}, scheduled_at: today}
{:ok, sa1} = ScheduledActivity.create(user, attrs)
{:ok, sa2} = ScheduledActivity.create(user, attrs)
jobs =
Repo.all(from(j in Oban.Job, where: j.queue == "scheduled_activities", select: j.args))
assert jobs == [%{"activity_id" => sa1.id}, %{"activity_id" => sa2.id}]
end
test "scheduled activities without jobs when ScheduledActivity disabled" do
Pleroma.Config.put([ScheduledActivity, :enabled], false)
user = insert(:user)
today =
NaiveDateTime.utc_now()
|> NaiveDateTime.add(:timer.minutes(6), :millisecond)
|> NaiveDateTime.to_iso8601()
attrs = %{params: %{}, scheduled_at: today}
{:ok, _sa1} = ScheduledActivity.create(user, attrs)
{:ok, _sa2} = ScheduledActivity.create(user, attrs)
jobs =
Repo.all(from(j in Oban.Job, where: j.queue == "scheduled_activities", select: j.args))
assert jobs == []
end
test "when daily user limit is exceeded" do test "when daily user limit is exceeded" do
user = insert(:user) user = insert(:user)
@ -24,6 +64,7 @@ test "when daily user limit is exceeded" do
attrs = %{params: %{}, scheduled_at: today} attrs = %{params: %{}, scheduled_at: today}
{:ok, _} = ScheduledActivity.create(user, attrs) {:ok, _} = ScheduledActivity.create(user, attrs)
{:ok, _} = ScheduledActivity.create(user, attrs) {:ok, _} = ScheduledActivity.create(user, attrs)
{:error, changeset} = ScheduledActivity.create(user, attrs) {:error, changeset} = ScheduledActivity.create(user, attrs)
assert changeset.errors == [scheduled_at: {"daily limit exceeded", []}] assert changeset.errors == [scheduled_at: {"daily limit exceeded", []}]
end end

View File

@ -54,6 +54,12 @@ defmacro __using__(_opts) do
clear_config_all: 2 clear_config_all: 2
] ]
def to_datetime(naive_datetime) do
naive_datetime
|> DateTime.from_naive!("Etc/UTC")
|> DateTime.truncate(:second)
end
def collect_ids(collection) do def collect_ids(collection) do
collection collection
|> Enum.map(& &1.id) |> Enum.map(& &1.id)

View File

@ -1174,6 +1174,23 @@ test "creates an undo activity for the last follow" do
assert embedded_object["object"] == followed.ap_id assert embedded_object["object"] == followed.ap_id
assert embedded_object["id"] == follow_activity.data["id"] assert embedded_object["id"] == follow_activity.data["id"]
end end
test "creates an undo activity for a pending follow request" do
follower = insert(:user)
followed = insert(:user, %{locked: true})
{:ok, follow_activity} = ActivityPub.follow(follower, followed)
{:ok, activity} = ActivityPub.unfollow(follower, followed)
assert activity.data["type"] == "Undo"
assert activity.data["actor"] == follower.ap_id
embedded_object = activity.data["object"]
assert is_map(embedded_object)
assert embedded_object["type"] == "Follow"
assert embedded_object["object"] == followed.ap_id
assert embedded_object["id"] == follow_activity.data["id"]
end
end end
describe "blocking / unblocking" do describe "blocking / unblocking" do

View File

@ -551,6 +551,50 @@ test "also unsubscribes a user" do
refute User.subscribed_to?(follower, followed) refute User.subscribed_to?(follower, followed)
end end
test "cancels a pending follow for a local user" do
follower = insert(:user)
followed = insert(:user, locked: true)
assert {:ok, follower, followed, %{id: activity_id, data: %{"state" => "pending"}}} =
CommonAPI.follow(follower, followed)
assert User.get_follow_state(follower, followed) == "pending"
assert {:ok, follower} = CommonAPI.unfollow(follower, followed)
assert User.get_follow_state(follower, followed) == nil
assert %{id: ^activity_id, data: %{"state" => "cancelled"}} =
Pleroma.Web.ActivityPub.Utils.fetch_latest_follow(follower, followed)
assert %{
data: %{
"type" => "Undo",
"object" => %{"type" => "Follow", "state" => "cancelled"}
}
} = Pleroma.Web.ActivityPub.Utils.fetch_latest_undo(follower)
end
test "cancels a pending follow for a remote user" do
follower = insert(:user)
followed = insert(:user, locked: true, local: false, ap_enabled: true)
assert {:ok, follower, followed, %{id: activity_id, data: %{"state" => "pending"}}} =
CommonAPI.follow(follower, followed)
assert User.get_follow_state(follower, followed) == "pending"
assert {:ok, follower} = CommonAPI.unfollow(follower, followed)
assert User.get_follow_state(follower, followed) == nil
assert %{id: ^activity_id, data: %{"state" => "cancelled"}} =
Pleroma.Web.ActivityPub.Utils.fetch_latest_follow(follower, followed)
assert %{
data: %{
"type" => "Undo",
"object" => %{"type" => "Follow", "state" => "cancelled"}
}
} = Pleroma.Web.ActivityPub.Utils.fetch_latest_undo(follower)
end
end end
describe "accept_follow_request/2" do describe "accept_follow_request/2" do

View File

@ -457,6 +457,16 @@ test "following / unfollowing a user", %{conn: conn} do
assert id == to_string(other_user.id) assert id == to_string(other_user.id)
end end
test "cancelling follow request", %{conn: conn} do
%{id: other_user_id} = insert(:user, %{locked: true})
assert %{"id" => ^other_user_id, "following" => false, "requested" => true} =
conn |> post("/api/v1/accounts/#{other_user_id}/follow") |> json_response(:ok)
assert %{"id" => ^other_user_id, "following" => false, "requested" => false} =
conn |> post("/api/v1/accounts/#{other_user_id}/unfollow") |> json_response(:ok)
end
test "following without reblogs" do test "following without reblogs" do
%{conn: conn} = oauth_access(["follow", "read:statuses"]) %{conn: conn} = oauth_access(["follow", "read:statuses"])
followed = insert(:user) followed = insert(:user)

View File

@ -9,6 +9,9 @@ defmodule Pleroma.Web.MastodonAPI.ScheduledActivityControllerTest do
alias Pleroma.ScheduledActivity alias Pleroma.ScheduledActivity
import Pleroma.Factory import Pleroma.Factory
import Ecto.Query
clear_config([ScheduledActivity, :enabled])
test "shows scheduled activities" do test "shows scheduled activities" do
%{user: user, conn: conn} = oauth_access(["read:statuses"]) %{user: user, conn: conn} = oauth_access(["read:statuses"])
@ -52,11 +55,26 @@ test "shows a scheduled activity" do
end end
test "updates a scheduled activity" do test "updates a scheduled activity" do
Pleroma.Config.put([ScheduledActivity, :enabled], true)
%{user: user, conn: conn} = oauth_access(["write:statuses"]) %{user: user, conn: conn} = oauth_access(["write:statuses"])
scheduled_activity = insert(:scheduled_activity, user: user)
new_scheduled_at = scheduled_at = Timex.shift(NaiveDateTime.utc_now(), minutes: 60)
NaiveDateTime.add(NaiveDateTime.utc_now(), :timer.minutes(120), :millisecond)
{:ok, scheduled_activity} =
ScheduledActivity.create(
user,
%{
scheduled_at: scheduled_at,
params: build(:note).data
}
)
job = Repo.one(from(j in Oban.Job, where: j.queue == "scheduled_activities"))
assert job.args == %{"activity_id" => scheduled_activity.id}
assert DateTime.truncate(job.scheduled_at, :second) == to_datetime(scheduled_at)
new_scheduled_at = Timex.shift(NaiveDateTime.utc_now(), minutes: 120)
res_conn = res_conn =
put(conn, "/api/v1/scheduled_statuses/#{scheduled_activity.id}", %{ put(conn, "/api/v1/scheduled_statuses/#{scheduled_activity.id}", %{
@ -65,6 +83,9 @@ test "updates a scheduled activity" do
assert %{"scheduled_at" => expected_scheduled_at} = json_response(res_conn, 200) assert %{"scheduled_at" => expected_scheduled_at} = json_response(res_conn, 200)
assert expected_scheduled_at == Pleroma.Web.CommonAPI.Utils.to_masto_date(new_scheduled_at) assert expected_scheduled_at == Pleroma.Web.CommonAPI.Utils.to_masto_date(new_scheduled_at)
job = refresh_record(job)
assert DateTime.truncate(job.scheduled_at, :second) == to_datetime(new_scheduled_at)
res_conn = put(conn, "/api/v1/scheduled_statuses/404", %{scheduled_at: new_scheduled_at}) res_conn = put(conn, "/api/v1/scheduled_statuses/404", %{scheduled_at: new_scheduled_at})
@ -72,8 +93,22 @@ test "updates a scheduled activity" do
end end
test "deletes a scheduled activity" do test "deletes a scheduled activity" do
Pleroma.Config.put([ScheduledActivity, :enabled], true)
%{user: user, conn: conn} = oauth_access(["write:statuses"]) %{user: user, conn: conn} = oauth_access(["write:statuses"])
scheduled_activity = insert(:scheduled_activity, user: user) scheduled_at = Timex.shift(NaiveDateTime.utc_now(), minutes: 60)
{:ok, scheduled_activity} =
ScheduledActivity.create(
user,
%{
scheduled_at: scheduled_at,
params: build(:note).data
}
)
job = Repo.one(from(j in Oban.Job, where: j.queue == "scheduled_activities"))
assert job.args == %{"activity_id" => scheduled_activity.id}
res_conn = res_conn =
conn conn
@ -81,7 +116,8 @@ test "deletes a scheduled activity" do
|> delete("/api/v1/scheduled_statuses/#{scheduled_activity.id}") |> delete("/api/v1/scheduled_statuses/#{scheduled_activity.id}")
assert %{} = json_response(res_conn, 200) assert %{} = json_response(res_conn, 200)
assert nil == Repo.get(ScheduledActivity, scheduled_activity.id) refute Repo.get(ScheduledActivity, scheduled_activity.id)
refute Repo.get(Oban.Job, job.id)
res_conn = res_conn =
conn conn

View File

@ -370,6 +370,11 @@ test "posting a poll", %{conn: conn} do
assert NaiveDateTime.diff(NaiveDateTime.from_iso8601!(response["poll"]["expires_at"]), time) in 420..430 assert NaiveDateTime.diff(NaiveDateTime.from_iso8601!(response["poll"]["expires_at"]), time) in 420..430
refute response["poll"]["expred"] refute response["poll"]["expred"]
question = Object.get_by_id(response["poll"]["id"])
# closed contains utc timezone
assert question.data["closed"] =~ "Z"
end end
test "option limit is enforced", %{conn: conn} do test "option limit is enforced", %{conn: conn} do

View File

@ -6,6 +6,7 @@ defmodule Pleroma.Web.NodeInfoTest do
use Pleroma.Web.ConnCase use Pleroma.Web.ConnCase
import Pleroma.Factory import Pleroma.Factory
clear_config([:mrf_simple])
test "GET /.well-known/nodeinfo", %{conn: conn} do test "GET /.well-known/nodeinfo", %{conn: conn} do
links = links =

View File

@ -0,0 +1,22 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.Cron.ClearOauthTokenWorkerTest do
use Pleroma.DataCase
import Pleroma.Factory
alias Pleroma.Workers.Cron.ClearOauthTokenWorker
clear_config([:oauth2, :clean_expired_tokens])
test "deletes expired tokens" do
insert(:oauth_token,
valid_until: NaiveDateTime.add(NaiveDateTime.utc_now(), -60 * 10)
)
Pleroma.Config.put([:oauth2, :clean_expired_tokens], true)
ClearOauthTokenWorker.perform(:opts, :job)
assert Pleroma.Repo.all(Pleroma.Web.OAuth.Token) == []
end
end

View File

@ -2,16 +2,24 @@
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/> # Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.DigestEmailDaemonTest do defmodule Pleroma.Workers.Cron.DigestEmailsWorkerTest do
use Pleroma.DataCase use Pleroma.DataCase
import Pleroma.Factory import Pleroma.Factory
alias Pleroma.Daemons.DigestEmailDaemon
alias Pleroma.Tests.ObanHelpers alias Pleroma.Tests.ObanHelpers
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI
clear_config([:email_notifications, :digest])
test "it sends digest emails" do test "it sends digest emails" do
Pleroma.Config.put([:email_notifications, :digest], %{
active: true,
inactivity_threshold: 7,
interval: 7
})
user = insert(:user) user = insert(:user)
date = date =
@ -23,8 +31,7 @@ test "it sends digest emails" do
{:ok, _} = User.switch_email_notifications(user2, "digest", true) {:ok, _} = User.switch_email_notifications(user2, "digest", true)
CommonAPI.post(user, %{"status" => "hey @#{user2.nickname}!"}) CommonAPI.post(user, %{"status" => "hey @#{user2.nickname}!"})
DigestEmailDaemon.perform() Pleroma.Workers.Cron.DigestEmailsWorker.perform(:opts, :pid)
ObanHelpers.perform_all()
# Performing job(s) enqueued at previous step # Performing job(s) enqueued at previous step
ObanHelpers.perform_all() ObanHelpers.perform_all()

View File

@ -0,0 +1,56 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.Cron.PurgeExpiredActivitiesWorkerTest do
use Pleroma.DataCase
alias Pleroma.ActivityExpiration
alias Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker
import Pleroma.Factory
import ExUnit.CaptureLog
clear_config([ActivityExpiration, :enabled])
test "deletes an expiration activity" do
Pleroma.Config.put([ActivityExpiration, :enabled], true)
activity = insert(:note_activity)
naive_datetime =
NaiveDateTime.add(
NaiveDateTime.utc_now(),
-:timer.minutes(2),
:millisecond
)
expiration =
insert(
:expiration_in_the_past,
%{activity_id: activity.id, scheduled_at: naive_datetime}
)
Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker.perform(:ops, :pid)
refute Pleroma.Repo.get(Pleroma.Activity, activity.id)
refute Pleroma.Repo.get(Pleroma.ActivityExpiration, expiration.id)
end
describe "delete_activity/1" do
test "adds log message if activity isn't find" do
assert capture_log([level: :error], fn ->
PurgeExpiredActivitiesWorker.delete_activity(%ActivityExpiration{
activity_id: "test-activity"
})
end) =~ "Couldn't delete expired activity: not found activity"
end
test "adds log message if actor isn't find" do
assert capture_log([level: :error], fn ->
PurgeExpiredActivitiesWorker.delete_activity(%ActivityExpiration{
activity_id: "test-activity"
})
end) =~ "Couldn't delete expired activity: not found activity"
end
end
end

View File

@ -0,0 +1,52 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.ScheduledActivityWorkerTest do
use Pleroma.DataCase
alias Pleroma.ScheduledActivity
alias Pleroma.Workers.ScheduledActivityWorker
import Pleroma.Factory
import ExUnit.CaptureLog
clear_config([ScheduledActivity, :enabled])
test "creates a status from the scheduled activity" do
Pleroma.Config.put([ScheduledActivity, :enabled], true)
user = insert(:user)
naive_datetime =
NaiveDateTime.add(
NaiveDateTime.utc_now(),
-:timer.minutes(2),
:millisecond
)
scheduled_activity =
insert(
:scheduled_activity,
scheduled_at: naive_datetime,
user: user,
params: %{status: "hi"}
)
ScheduledActivityWorker.perform(
%{"activity_id" => scheduled_activity.id},
:pid
)
refute Repo.get(ScheduledActivity, scheduled_activity.id)
activity = Repo.all(Pleroma.Activity) |> Enum.find(&(&1.actor == user.ap_id))
assert Pleroma.Object.normalize(activity).data["content"] == "hi"
end
test "adds log message if ScheduledActivity isn't find" do
Pleroma.Config.put([ScheduledActivity, :enabled], true)
assert capture_log([level: :error], fn ->
ScheduledActivityWorker.perform(%{"activity_id" => 42}, :pid)
end) =~ "Couldn't find scheduled activity"
end
end