Compare commits

...

20 Commits

Author SHA1 Message Date
Mark Felder
acd7b7ada4 Invalidation.enabled function is boolean 2020-06-09 10:19:55 -05:00
Mark Felder
2222b21eb7 Merge branch 'develop' into fix/mediaproxy-http-invalidation 2020-06-09 09:37:46 -05:00
Maksim Pechnikov
a602886508 added test 2020-06-06 21:12:35 +03:00
Maksim Pechnikov
0eff690f44 fix typo 2020-06-05 08:06:29 +03:00
Maksim Pechnikov
a0fab4e9b6 update config/description.exs 2020-06-05 06:37:16 +03:00
Mark Felder
55825b46be Merge branch 'develop' into fix/mediaproxy-http-invalidation 2020-06-04 17:29:20 -05:00
Mark Felder
9d2c2a83a4 Do not insert cache invalidations for URLs that bypass MediaProxy 2020-06-04 17:26:11 -05:00
Maksim Pechnikov
03369b5c4a fix cache keys 2020-06-04 17:01:45 +03:00
Maksim Pechnikov
3fd2795412 fix purge script 2020-06-03 21:10:41 +03:00
Maksim Pechnikov
64b7b0ee3b added filters deleted media urls 2020-06-03 09:42:26 +03:00
Maksim Pechnikov
5402fa9fb0 remove_deleted_attachements_from_cache -> maybe_remove_mediaproxy_invalidation 2020-05-30 09:09:44 +03:00
Maksim Pechnikov
dc5647d4da added test 2020-05-29 17:09:25 +03:00
Maksim Pechnikov
e4e96ea914 remove deleted media urls from cache 2020-05-28 21:35:46 +03:00
Maksim Pechnikov
ddb91106b6 update attachments_cleanup_worker.ex 2020-05-28 09:30:34 +03:00
Maksim Pechnikov
1a7ed04f9c Merge branch 'develop' into fix/mediaproxy-http-invalidation 2020-05-26 17:12:26 +03:00
Maksim Pechnikov
04a26ab0a8 added deleted_urls in AttachmentsCleanupWorker 2020-05-26 09:49:20 +03:00
Maksim Pechnikov
65d9692975 fix tests 2020-05-26 06:38:45 +03:00
Maksim Pechnikov
6e9e21d6fc added deleted_urls_cache 2020-05-25 20:55:42 +03:00
Maksim Pechnikov
755bf36437 fix mediaproxy invalidations 2020-05-23 21:22:12 +03:00
Mark Felder
aa06fc584b Fix MediaProxy Invalidation for Http method 2020-05-22 13:53:03 -05:00
20 changed files with 500 additions and 119 deletions

View File

@ -406,6 +406,13 @@ config :pleroma, :media_proxy,
], ],
whitelist: [] whitelist: []
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
method: :purge,
headers: [],
options: []
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Script, script_path: nil
config :pleroma, :chat, enabled: true config :pleroma, :chat, enabled: true
config :phoenix, :format_encoders, json: Jason config :phoenix, :format_encoders, json: Jason

View File

@ -1638,6 +1638,31 @@ config :pleroma, :config_description, [
suggestions: ["https://example.com"] suggestions: ["https://example.com"]
}, },
%{ %{
key: :invalidation,
type: :keyword,
descpiption: "",
suggestions: [
enabled: true,
provider: Pleroma.Web.MediaProxy.Invalidation.Script
],
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables invalidate media cache"
},
%{
key: :provider,
type: :module,
description: "Module which will be used to cache purge.",
suggestions: [
Pleroma.Web.MediaProxy.Invalidation.Script,
Pleroma.Web.MediaProxy.Invalidation.Http
]
}
]
},
%{
key: :proxy_opts, key: :proxy_opts,
type: :keyword, type: :keyword,
description: "Options for Pleroma.ReverseProxy", description: "Options for Pleroma.ReverseProxy",
@ -1711,6 +1736,45 @@ config :pleroma, :config_description, [
}, },
%{ %{
group: :pleroma, group: :pleroma,
key: Pleroma.Web.MediaProxy.Invalidation.Http,
type: :group,
description: "HTTP invalidate settings",
children: [
%{
key: :method,
type: :atom,
description: "HTTP method of request. Default: :purge"
},
%{
key: :headers,
type: {:list, :tuple},
description: "HTTP headers of request.",
suggestions: [{"x-refresh", 1}]
},
%{
key: :options,
type: :keyword,
description: "Request options.",
suggestions: [params: %{ts: "xxx"}]
}
]
},
%{
group: :pleroma,
key: Pleroma.Web.MediaProxy.Invalidation.Script,
type: :group,
description: "Script invalidate settings",
children: [
%{
key: :script_path,
type: :string,
description: "Path to shell script. Which will run purge cache.",
suggestions: ["./installation/nginx-cache-purge.sh.example"]
}
]
},
%{
group: :pleroma,
key: :gopher, key: :gopher,
type: :group, type: :group,
description: "Gopher settings", description: "Gopher settings",

View File

@ -262,7 +262,7 @@ This section describe PWA manifest instance-specific values. Currently this opti
#### Pleroma.Web.MediaProxy.Invalidation.Script #### Pleroma.Web.MediaProxy.Invalidation.Script
This strategy allow perform external bash script to purge cache. This strategy allow perform external shell script to purge cache.
Urls of attachments pass to script as arguments. Urls of attachments pass to script as arguments.
* `script_path`: path to external script. * `script_path`: path to external script.
@ -278,8 +278,8 @@ config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Script,
This strategy allow perform custom http request to purge cache. This strategy allow perform custom http request to purge cache.
* `method`: http method. default is `purge` * `method`: http method. default is `purge`
* `headers`: http headers. default is empty * `headers`: http headers.
* `options`: request options. default is empty * `options`: request options.
Example: Example:
```elixir ```elixir

View File

@ -13,7 +13,7 @@ CACHE_DIRECTORY="/tmp/pleroma-media-cache"
## $3 - (optional) the number of parallel processes to run for grep. ## $3 - (optional) the number of parallel processes to run for grep.
get_cache_files() { get_cache_files() {
local max_parallel=${3-16} local max_parallel=${3-16}
find $2 -maxdepth 2 -type d | xargs -P $max_parallel -n 1 grep -E Rl "^KEY:.*$1" | sort -u find $2 -maxdepth 2 -type d | xargs -P $max_parallel -n 1 grep -E -Rl "^KEY:.*$1" | sort -u
} }
## Removes an item from the given cache zone. ## Removes an item from the given cache zone.
@ -33,8 +33,9 @@ purge() {
do do
echo "$SCRIPTNAME delete \`$url\` from cache ($CACHE_DIRECTORY)" echo "$SCRIPTNAME delete \`$url\` from cache ($CACHE_DIRECTORY)"
purge_item $url $CACHE_DIRECTORY purge_item $url $CACHE_DIRECTORY
echo "\n"
done done
} }
purge $1 purge $@

View File

@ -148,7 +148,8 @@ defmodule Pleroma.Application do
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500), build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
build_cachex("web_resp", limit: 2500), build_cachex("web_resp", limit: 2500),
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10), build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
build_cachex("failed_proxy_url", limit: 2500) build_cachex("failed_proxy_url", limit: 2500),
build_cachex("deleted_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000)
] ]
end end

View File

@ -10,6 +10,8 @@ defmodule Pleroma.Plugs.UploadedMedia do
import Pleroma.Web.Gettext import Pleroma.Web.Gettext
require Logger require Logger
alias Pleroma.Web.MediaProxy
@behaviour Plug @behaviour Plug
# no slashes # no slashes
@path "media" @path "media"
@ -35,8 +37,7 @@ defmodule Pleroma.Plugs.UploadedMedia do
%{query_params: %{"name" => name}} = conn -> %{query_params: %{"name" => name}} = conn ->
name = String.replace(name, "\"", "\\\"") name = String.replace(name, "\"", "\\\"")
conn put_resp_header(conn, "content-disposition", "filename=\"#{name}\"")
|> put_resp_header("content-disposition", "filename=\"#{name}\"")
conn -> conn ->
conn conn
@ -47,7 +48,8 @@ defmodule Pleroma.Plugs.UploadedMedia do
with uploader <- Keyword.fetch!(config, :uploader), with uploader <- Keyword.fetch!(config, :uploader),
proxy_remote = Keyword.get(config, :proxy_remote, false), proxy_remote = Keyword.get(config, :proxy_remote, false),
{:ok, get_method} <- uploader.get_file(file) do {:ok, get_method} <- uploader.get_file(file),
false <- media_is_deleted(conn, get_method) do
get_media(conn, get_method, proxy_remote, opts) get_media(conn, get_method, proxy_remote, opts)
else else
_ -> _ ->
@ -59,6 +61,14 @@ defmodule Pleroma.Plugs.UploadedMedia do
def call(conn, _opts), do: conn def call(conn, _opts), do: conn
defp media_is_deleted(%{request_path: path} = _conn, {:static_dir, _}) do
MediaProxy.in_deleted_urls(Pleroma.Web.base_url() <> path)
end
defp media_is_deleted(_, {:url, url}), do: MediaProxy.in_deleted_urls(url)
defp media_is_deleted(_, _), do: false
defp get_media(conn, {:static_dir, directory}, _, opts) do defp get_media(conn, {:static_dir, directory}, _, opts) do
static_opts = static_opts =
Map.get(opts, :static_plug_opts) Map.get(opts, :static_plug_opts)

View File

@ -20,6 +20,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.ActivityPub.MRF alias Pleroma.Web.ActivityPub.MRF
alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.MediaProxy
alias Pleroma.Web.Streamer alias Pleroma.Web.Streamer
alias Pleroma.Web.WebFinger alias Pleroma.Web.WebFinger
alias Pleroma.Workers.BackgroundWorker alias Pleroma.Workers.BackgroundWorker
@ -83,6 +84,22 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
defp check_remote_limit(_), do: true defp check_remote_limit(_), do: true
@spec maybe_remove_mediaproxy_invalidation(boolean, map()) :: :ok
def maybe_remove_mediaproxy_invalidation(true, %{
"object" => %{"attachment" => [_ | _] = attachments}
}) do
attachments
|> Enum.flat_map(fn
%{"url" => urls} -> Enum.map(urls, & &1["href"])
_ -> []
end)
|> MediaProxy.remove_from_deleted_urls()
:ok
end
def maybe_remove_mediaproxy_invalidation(_, _), do: :ok
defp increase_note_count_if_public(actor, object) do defp increase_note_count_if_public(actor, object) do
if is_public?(object), do: User.increase_note_count(actor), else: {:ok, actor} if is_public?(object), do: User.increase_note_count(actor), else: {:ok, actor}
end end
@ -259,16 +276,15 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
{:fake, false, activity} <- {:fake, fake, activity}, {:fake, false, activity} <- {:fake, fake, activity},
_ <- increase_replies_count_if_reply(create_data), _ <- increase_replies_count_if_reply(create_data),
_ <- increase_poll_votes_if_vote(create_data), _ <- increase_poll_votes_if_vote(create_data),
_ <-
maybe_remove_mediaproxy_invalidation(MediaProxy.Invalidation.enabled?(), create_data),
{:quick_insert, false, activity} <- {:quick_insert, quick_insert?, activity}, {:quick_insert, false, activity} <- {:quick_insert, quick_insert?, activity},
{:ok, _actor} <- increase_note_count_if_public(actor, activity), {:ok, _actor} <- increase_note_count_if_public(actor, activity),
_ <- notify_and_stream(activity), _ <- notify_and_stream(activity),
:ok <- maybe_federate(activity) do :ok <- maybe_federate(activity) do
{:ok, activity} {:ok, activity}
else else
{:quick_insert, true, activity} -> {error, true, activity} when error in [:fake, :quick_insert] ->
{:ok, activity}
{:fake, true, activity} ->
{:ok, activity} {:ok, activity}
{:error, message} -> {:error, message} ->

View File

@ -5,22 +5,33 @@
defmodule Pleroma.Web.MediaProxy.Invalidation do defmodule Pleroma.Web.MediaProxy.Invalidation do
@moduledoc false @moduledoc false
@callback purge(list(String.t()), map()) :: {:ok, String.t()} | {:error, String.t()} @callback purge(list(String.t()), Keyword.t()) :: {:ok, list(String.t())} | {:error, String.t()}
alias Pleroma.Config alias Pleroma.Config
alias Pleroma.Web.MediaProxy
@spec purge(list(String.t())) :: {:ok, String.t()} | {:error, String.t()} @spec enabled?() :: boolean()
def enabled?, do: Config.get([:media_proxy, :invalidation, :enabled])
@spec purge(list(String.t()) | String.t()) :: {:ok, list(String.t())} | {:error, String.t()}
def purge(urls) do def purge(urls) do
[:media_proxy, :invalidation, :enabled] prepared_urls = prepare_urls(urls)
|> Config.get()
|> do_purge(urls) if enabled?() do
do_purge(prepared_urls)
else
{:ok, prepared_urls}
end
end end
defp do_purge(true, urls) do defp do_purge(urls) do
provider = Config.get([:media_proxy, :invalidation, :provider]) provider = Config.get([:media_proxy, :invalidation, :provider])
options = Config.get(provider) provider.purge(urls, Config.get(provider))
provider.purge(urls, options)
end end
defp do_purge(_, _), do: :ok def prepare_urls(urls) do
urls
|> List.wrap()
|> Enum.map(&MediaProxy.url(&1))
end
end end

View File

@ -10,9 +10,9 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do
@impl Pleroma.Web.MediaProxy.Invalidation @impl Pleroma.Web.MediaProxy.Invalidation
def purge(urls, opts) do def purge(urls, opts) do
method = Map.get(opts, :method, :purge) method = Keyword.get(opts, :method, :purge)
headers = Map.get(opts, :headers, []) headers = Keyword.get(opts, :headers, [])
options = Map.get(opts, :options, []) options = Keyword.get(opts, :options, [])
Logger.debug("Running cache purge: #{inspect(urls)}") Logger.debug("Running cache purge: #{inspect(urls)}")
@ -22,7 +22,7 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do
end end
end) end)
{:ok, "success"} {:ok, urls}
end end
defp do_purge(method, url, headers, options) do defp do_purge(method, url, headers, options) do

View File

@ -10,32 +10,34 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Script do
require Logger require Logger
@impl Pleroma.Web.MediaProxy.Invalidation @impl Pleroma.Web.MediaProxy.Invalidation
def purge(urls, %{script_path: script_path} = _options) do def purge(urls, opts) do
args = args =
urls urls
|> List.wrap() |> List.wrap()
|> Enum.uniq() |> Enum.uniq()
|> Enum.join(" ") |> Enum.join(" ")
opts
|> Keyword.get(:script_path, nil)
|> do_purge([args])
|> handle_result(urls)
end
defp do_purge(script_path, args) when is_binary(script_path) do
path = Path.expand(script_path) path = Path.expand(script_path)
Logger.debug("Running cache purge: #{inspect(args)}, #{inspect(path)}")
Logger.debug("Running cache purge: #{inspect(urls)}, #{path}")
case do_purge(path, [args]) do
{result, exit_status} when exit_status > 0 ->
Logger.error("Error while cache purge: #{inspect(result)}")
{:error, inspect(result)}
_ ->
{:ok, "success"}
end
end
def purge(_, _), do: {:error, "not found script path"}
defp do_purge(path, args) do
System.cmd(path, args) System.cmd(path, args)
rescue rescue
error -> {inspect(error), 1} error -> error
end
defp do_purge(_, _), do: {:error, "not found script path"}
defp handle_result({_result, 0}, urls), do: {:ok, urls}
defp handle_result({:error, error}, urls), do: handle_result(error, urls)
defp handle_result(error, _) do
Logger.error("Error while cache purge: #{inspect(error)}")
{:error, inspect(error)}
end end
end end

View File

@ -6,20 +6,53 @@ defmodule Pleroma.Web.MediaProxy do
alias Pleroma.Config alias Pleroma.Config
alias Pleroma.Upload alias Pleroma.Upload
alias Pleroma.Web alias Pleroma.Web
alias Pleroma.Web.MediaProxy.Invalidation
@base64_opts [padding: false] @base64_opts [padding: false]
@spec in_deleted_urls(String.t()) :: boolean()
def in_deleted_urls(url), do: elem(Cachex.exists?(:deleted_urls_cache, url(url)), 1)
def remove_from_deleted_urls(urls) when is_list(urls) do
Cachex.execute!(:deleted_urls_cache, fn cache ->
Enum.each(Invalidation.prepare_urls(urls), &Cachex.del(cache, &1))
end)
end
def remove_from_deleted_urls(url) when is_binary(url) do
Cachex.del(:deleted_urls_cache, url(url))
end
def put_in_deleted_urls(urls) when is_list(urls) do
Cachex.execute!(:deleted_urls_cache, fn cache ->
Enum.each(Invalidation.prepare_urls(urls), &Cachex.put(cache, &1, true))
end)
end
def put_in_deleted_urls(url) when is_binary(url) do
Cachex.put(:deleted_urls_cache, url(url), true)
end
def url(url) when is_nil(url) or url == "", do: nil def url(url) when is_nil(url) or url == "", do: nil
def url("/" <> _ = url), do: url def url("/" <> _ = url), do: url
def url(url) do def url(url) do
if disabled?() or local?(url) or whitelisted?(url) do if disabled?() or not is_url_proxiable?(url) do
url url
else else
encode_url(url) encode_url(url)
end end
end end
@spec is_url_proxiable?(String.t()) :: boolean()
def is_url_proxiable?(url) do
if local?(url) or whitelisted?(url) do
false
else
true
end
end
defp disabled?, do: !Config.get([:media_proxy, :enabled], false) defp disabled?, do: !Config.get([:media_proxy, :enabled], false)
defp local?(url), do: String.starts_with?(url, Pleroma.Web.base_url()) defp local?(url), do: String.starts_with?(url, Pleroma.Web.base_url())

View File

@ -14,10 +14,11 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do
with config <- Pleroma.Config.get([:media_proxy], []), with config <- Pleroma.Config.get([:media_proxy], []),
true <- Keyword.get(config, :enabled, false), true <- Keyword.get(config, :enabled, false),
{:ok, url} <- MediaProxy.decode_url(sig64, url64), {:ok, url} <- MediaProxy.decode_url(sig64, url64),
{_, false} <- {:in_deleted_urls, MediaProxy.in_deleted_urls(url)},
:ok <- filename_matches(params, conn.request_path, url) do :ok <- filename_matches(params, conn.request_path, url) do
ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts)) ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts))
else else
false -> error when error in [false, {:in_deleted_urls, true}] ->
send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404)) send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404))
{:error, :invalid_signature} -> {:error, :invalid_signature} ->

View File

@ -7,6 +7,7 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Repo alias Pleroma.Repo
alias Pleroma.Web.MediaProxy
use Pleroma.Workers.WorkerHelper, queue: "attachments_cleanup" use Pleroma.Workers.WorkerHelper, queue: "attachments_cleanup"
@ -23,8 +24,6 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
Enum.map(attachment["url"], & &1["href"]) Enum.map(attachment["url"], & &1["href"])
end) end)
names = Enum.map(attachments, & &1["name"])
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader]) uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
prefix = prefix =
@ -40,21 +39,57 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
) )
# find all objects for copies of the attachments, name and actor doesn't matter here # find all objects for copies of the attachments, name and actor doesn't matter here
object_ids_and_hrefs = {object_ids, attachment_urls, exclude_urls} =
from(o in Object, hrefs
where: |> fetch_objects
fragment( |> prepare_objects(actor, Enum.map(attachments, & &1["name"]))
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)", |> Enum.reduce({[], [], []}, fn {href, %{id: id, count: count}},
o.data, {ids, hrefs, exclude_urls} ->
o.data, with 1 <- count do
^hrefs {ids ++ [id], hrefs ++ [href], exclude_urls}
) else
) _ -> {ids ++ [id], hrefs, exclude_urls ++ [href]}
# The query above can be time consumptive on large instances until we end
# refactor how uploads are stored end)
|> Repo.all(timeout: :infinity)
lock_attachments(MediaProxy.Invalidation.enabled?(), hrefs -- exclude_urls)
Enum.each(attachment_urls, fn href ->
href
|> String.trim_leading("#{base_url}/#{prefix}")
|> uploader.delete_file()
end)
delete_objects(object_ids)
cache_purge(MediaProxy.Invalidation.enabled?(), hrefs -- exclude_urls)
{:ok, :success}
end
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip}
defp delete_objects([_ | _] = object_ids) do
Repo.delete_all(from(o in Object, where: o.id in ^object_ids))
end
defp delete_objects(_), do: :ok
defp cache_purge(true, [_ | _] = urls), do: MediaProxy.Invalidation.purge(urls)
defp cache_purge(_, _), do: :ok
defp lock_attachments(true, [_ | _] = urls) do
urls
|> Enum.filter(&MediaProxy.is_url_proxiable?(&1))
|> MediaProxy.put_in_deleted_urls()
end
defp lock_attachments(_, _), do: :ok
# we should delete 1 object for any given attachment, but don't delete # we should delete 1 object for any given attachment, but don't delete
# files if there are more than 1 object for it # files if there are more than 1 object for it
def prepare_objects(objects, actor, names) do
objects
|> Enum.reduce(%{}, fn %{ |> Enum.reduce(%{}, fn %{
id: id, id: id,
data: %{ data: %{
@ -76,31 +111,20 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
end end
end) end)
end) end)
|> Enum.map(fn {href, %{id: id, count: count}} ->
# only delete files that have single instance
with 1 <- count do
href
|> String.trim_leading("#{base_url}/#{prefix}")
|> uploader.delete_file()
{id, href}
else
_ -> {id, nil}
end
end)
object_ids = Enum.map(object_ids_and_hrefs, fn {id, _} -> id end)
from(o in Object, where: o.id in ^object_ids)
|> Repo.delete_all()
object_ids_and_hrefs
|> Enum.filter(fn {_, href} -> not is_nil(href) end)
|> Enum.map(&elem(&1, 1))
|> Pleroma.Web.MediaProxy.Invalidation.purge()
{:ok, :success}
end end
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip} def fetch_objects(hrefs) do
from(o in Object,
where:
fragment(
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
o.data,
o.data,
^hrefs
)
)
# The query above can be time consumptive on large instances until we
# refactor how uploads are stored
|> Repo.all(timeout: :infinity)
end
end end

View File

@ -9,10 +9,12 @@ defmodule Pleroma.ObjectTest do
import Pleroma.Factory import Pleroma.Factory
import Tesla.Mock import Tesla.Mock
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.Config
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Repo alias Pleroma.Repo
alias Pleroma.Tests.ObanHelpers alias Pleroma.Tests.ObanHelpers
alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI
alias Pleroma.Web.MediaProxy.Invalidation
setup do setup do
mock(fn env -> apply(HttpRequestMock, :request, [env]) end) mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
@ -76,6 +78,7 @@ defmodule Pleroma.ObjectTest do
describe "delete attachments" do describe "delete attachments" do
setup do: clear_config([Pleroma.Upload]) setup do: clear_config([Pleroma.Upload])
setup do: clear_config([:instance, :cleanup_attachments]) setup do: clear_config([:instance, :cleanup_attachments])
setup do: clear_config([:media_proxy])
test "Disabled via config" do test "Disabled via config" do
Pleroma.Config.put([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) Pleroma.Config.put([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
@ -109,11 +112,14 @@ defmodule Pleroma.ObjectTest do
refute Object.get_by_id(attachment.id) == nil refute Object.get_by_id(attachment.id) == nil
assert {:ok, ["an_image.jpg"]} == File.ls("#{uploads_dir}/#{path}") assert {:ok, ["an_image.jpg"]} == File.ls("#{uploads_dir}/#{path}")
refute Pleroma.Web.MediaProxy.in_deleted_urls(href)
end end
test "in subdirectories" do test "in subdirectories" do
Pleroma.Config.put([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) Config.put([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
Pleroma.Config.put([:instance, :cleanup_attachments], true) Config.put([:instance, :cleanup_attachments], true)
Config.put([:media_proxy, :invalidation, :enabled], true)
Config.put([:media_proxy, :invalidation, :provider], Invalidation.Mock)
file = %Plug.Upload{ file = %Plug.Upload{
content_type: "image/jpg", content_type: "image/jpg",
@ -143,12 +149,15 @@ defmodule Pleroma.ObjectTest do
assert Object.get_by_id(attachment.id) == nil assert Object.get_by_id(attachment.id) == nil
assert {:ok, []} == File.ls("#{uploads_dir}/#{path}") assert {:ok, []} == File.ls("#{uploads_dir}/#{path}")
refute Pleroma.Web.MediaProxy.in_deleted_urls(href)
end end
test "with dedupe enabled" do test "with dedupe enabled" do
Pleroma.Config.put([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) Config.put([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
Pleroma.Config.put([Pleroma.Upload, :filters], [Pleroma.Upload.Filter.Dedupe]) Config.put([Pleroma.Upload, :filters], [Pleroma.Upload.Filter.Dedupe])
Pleroma.Config.put([:instance, :cleanup_attachments], true) Config.put([:instance, :cleanup_attachments], true)
Config.put([:media_proxy, :invalidation, :enabled], true)
Config.put([:media_proxy, :invalidation, :provider], Invalidation.Mock)
uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads]) uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads])
@ -181,11 +190,14 @@ defmodule Pleroma.ObjectTest do
assert Object.get_by_id(attachment.id) == nil assert Object.get_by_id(attachment.id) == nil
assert {:ok, files} = File.ls(uploads_dir) assert {:ok, files} = File.ls(uploads_dir)
refute filename in files refute filename in files
refute Pleroma.Web.MediaProxy.in_deleted_urls(href)
end end
test "with objects that have legacy data.url attribute" do test "with objects that have legacy data.url attribute" do
Pleroma.Config.put([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) Config.put([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
Pleroma.Config.put([:instance, :cleanup_attachments], true) Config.put([:instance, :cleanup_attachments], true)
Config.put([:media_proxy, :invalidation, :enabled], true)
Config.put([:media_proxy, :invalidation, :provider], Invalidation.Mock)
file = %Plug.Upload{ file = %Plug.Upload{
content_type: "image/jpg", content_type: "image/jpg",
@ -217,12 +229,15 @@ defmodule Pleroma.ObjectTest do
assert Object.get_by_id(attachment.id) == nil assert Object.get_by_id(attachment.id) == nil
assert {:ok, []} == File.ls("#{uploads_dir}/#{path}") assert {:ok, []} == File.ls("#{uploads_dir}/#{path}")
refute Pleroma.Web.MediaProxy.in_deleted_urls(href)
end end
test "With custom base_url" do test "With custom base_url" do
Pleroma.Config.put([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) Config.put([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
Pleroma.Config.put([Pleroma.Upload, :base_url], "https://sub.domain.tld/dir/") Config.put([Pleroma.Upload, :base_url], "https://sub.domain.tld/dir/")
Pleroma.Config.put([:instance, :cleanup_attachments], true) Config.put([:instance, :cleanup_attachments], true)
Config.put([:media_proxy, :invalidation, :enabled], true)
Config.put([:media_proxy, :invalidation, :provider], Invalidation.Mock)
file = %Plug.Upload{ file = %Plug.Upload{
content_type: "image/jpg", content_type: "image/jpg",
@ -252,6 +267,7 @@ defmodule Pleroma.ObjectTest do
assert Object.get_by_id(attachment.id) == nil assert Object.get_by_id(attachment.id) == nil
assert {:ok, []} == File.ls("#{uploads_dir}/#{path}") assert {:ok, []} == File.ls("#{uploads_dir}/#{path}")
refute Pleroma.Web.MediaProxy.in_deleted_urls(href)
end end
end end

View File

@ -0,0 +1,10 @@
defmodule Pleroma.Web.MediaProxy.Invalidation.Mock do
@moduledoc false
@behaviour Pleroma.Web.MediaProxy.Invalidation
@impl Pleroma.Web.MediaProxy.Invalidation
def purge(urls, _opts) do
{:ok, urls}
end
end

View File

@ -0,0 +1,65 @@
defmodule Pleroma.Web.MediaProxy.InvalidationTest do
use ExUnit.Case
use Pleroma.Tests.Helpers
alias Pleroma.Config
alias Pleroma.Web.MediaProxy.Invalidation
import ExUnit.CaptureLog
import Mock
import Tesla.Mock
setup do: clear_config([:media_proxy])
setup do
on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
:ok
end
describe "Invalidation.Http" do
test "perform request to clear cache" do
Config.put([:media_proxy, :enabled], false)
Config.put([:media_proxy, :invalidation, :enabled], true)
Config.put([:media_proxy, :invalidation, :provider], Invalidation.Http)
Config.put([Invalidation.Http], method: :purge, headers: [{"x-refresh", 1}])
image_url = "http://example.com/media/example.jpg"
Pleroma.Web.MediaProxy.put_in_deleted_urls(image_url)
mock(fn
%{
method: :purge,
url: "http://example.com/media/example.jpg",
headers: [{"x-refresh", 1}]
} ->
%Tesla.Env{status: 200}
end)
assert capture_log(fn ->
assert Pleroma.Web.MediaProxy.in_deleted_urls(image_url)
assert Invalidation.purge([image_url]) == {:ok, [image_url]}
assert Pleroma.Web.MediaProxy.in_deleted_urls(image_url)
end) =~ "Running cache purge: [\"#{image_url}\"]"
end
end
describe "Invalidation.Script" do
test "run script to clear cache" do
Config.put([:media_proxy, :enabled], false)
Config.put([:media_proxy, :invalidation, :enabled], true)
Config.put([:media_proxy, :invalidation, :provider], Invalidation.Script)
Config.put([Invalidation.Script], script_path: "purge-nginx")
image_url = "http://example.com/media/example.jpg"
Pleroma.Web.MediaProxy.put_in_deleted_urls(image_url)
with_mocks [{System, [], [cmd: fn _, _ -> {"ok", 0} end]}] do
assert capture_log(fn ->
assert Pleroma.Web.MediaProxy.in_deleted_urls(image_url)
assert Invalidation.purge([image_url]) == {:ok, [image_url]}
assert Pleroma.Web.MediaProxy.in_deleted_urls(image_url)
end) =~ "Running cache purge: [\"#{image_url}\"]"
end
end
end
end

View File

@ -5,6 +5,11 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.HttpTest do
import ExUnit.CaptureLog import ExUnit.CaptureLog
import Tesla.Mock import Tesla.Mock
setup do
on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
:ok
end
test "logs hasn't error message when request is valid" do test "logs hasn't error message when request is valid" do
mock(fn mock(fn
%{method: :purge, url: "http://example.com/media/example.jpg"} -> %{method: :purge, url: "http://example.com/media/example.jpg"} ->
@ -14,8 +19,8 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.HttpTest do
refute capture_log(fn -> refute capture_log(fn ->
assert Invalidation.Http.purge( assert Invalidation.Http.purge(
["http://example.com/media/example.jpg"], ["http://example.com/media/example.jpg"],
%{} []
) == {:ok, "success"} ) == {:ok, ["http://example.com/media/example.jpg"]}
end) =~ "Error while cache purge" end) =~ "Error while cache purge"
end end
@ -28,8 +33,8 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.HttpTest do
assert capture_log(fn -> assert capture_log(fn ->
assert Invalidation.Http.purge( assert Invalidation.Http.purge(
["http://example.com/media/example1.jpg"], ["http://example.com/media/example1.jpg"],
%{} []
) == {:ok, "success"} ) == {:ok, ["http://example.com/media/example1.jpg"]}
end) =~ "Error while cache purge: url - http://example.com/media/example1.jpg" end) =~ "Error while cache purge: url - http://example.com/media/example1.jpg"
end end
end end

View File

@ -4,17 +4,24 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.ScriptTest do
import ExUnit.CaptureLog import ExUnit.CaptureLog
setup do
on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
:ok
end
test "it logger error when script not found" do test "it logger error when script not found" do
assert capture_log(fn -> assert capture_log(fn ->
assert Invalidation.Script.purge( assert Invalidation.Script.purge(
["http://example.com/media/example.jpg"], ["http://example.com/media/example.jpg"],
%{script_path: "./example"} script_path: "./example"
) == {:error, "\"%ErlangError{original: :enoent}\""} ) == {:error, "%ErlangError{original: :enoent}"}
end) =~ "Error while cache purge: \"%ErlangError{original: :enoent}\"" end) =~ "Error while cache purge: %ErlangError{original: :enoent}"
capture_log(fn ->
assert Invalidation.Script.purge( assert Invalidation.Script.purge(
["http://example.com/media/example.jpg"], ["http://example.com/media/example.jpg"],
%{} []
) == {:error, "not found script path"} ) == {:error, "\"not found script path\""}
end)
end end
end end

View File

@ -10,6 +10,11 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
setup do: clear_config(:media_proxy) setup do: clear_config(:media_proxy)
setup do: clear_config([Pleroma.Web.Endpoint, :secret_key_base]) setup do: clear_config([Pleroma.Web.Endpoint, :secret_key_base])
setup do
on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
:ok
end
test "it returns 404 when MediaProxy disabled", %{conn: conn} do test "it returns 404 when MediaProxy disabled", %{conn: conn} do
Config.put([:media_proxy, :enabled], false) Config.put([:media_proxy, :enabled], false)
@ -66,4 +71,16 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
assert %Plug.Conn{status: :success} = get(conn, url) assert %Plug.Conn{status: :success} = get(conn, url)
end end
end end
test "it returns 404 when url contains in deleted_urls cache", %{conn: conn} do
Config.put([:media_proxy, :enabled], true)
Config.put([Pleroma.Web.Endpoint, :secret_key_base], "00000000000")
url = Pleroma.Web.MediaProxy.encode_url("https://google.fn/test.png")
Pleroma.Web.MediaProxy.put_in_deleted_urls("https://google.fn/test.png")
with_mock Pleroma.ReverseProxy,
call: fn _conn, _url, _opts -> %Plug.Conn{status: :success} end do
assert %Plug.Conn{status: 404, resp_body: "Not Found"} = get(conn, url)
end
end
end end

View File

@ -0,0 +1,91 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.AttachmentsCleanupWorkerTest do
use Pleroma.DataCase
alias Pleroma.Config
alias Pleroma.Web.MediaProxy
alias Pleroma.Workers.AttachmentsCleanupWorker
import Mock
import Pleroma.Factory
describe "delete attachments" do
setup do: clear_config([Pleroma.Upload])
setup do: clear_config([:instance, :cleanup_attachments])
setup do: clear_config([:media_proxy])
test "deletes attachment objects and run purge cache" do
Config.put([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
Config.put([:instance, :cleanup_attachments], true)
Config.put([:media_proxy, :invalidation, :enabled], true)
Config.put([:media_proxy, :invalidation, :provider], MediaProxy.Invalidation.Mock)
file = %Plug.Upload{
content_type: "image/jpg",
path: Path.absname("test/fixtures/image.jpg"),
filename: "an_image.jpg"
}
user = insert(:user)
{:ok, %Pleroma.Object{} = attachment} =
Pleroma.Web.ActivityPub.ActivityPub.upload(file, actor: user.ap_id)
remote_url = "http://example.com/media/d6661b98ae72e39.jpg"
%{data: %{"url" => [%{"href" => local_url}]}} = attachment
note =
insert(:note, %{
user: user,
data: %{
"attachment" => [
attachment.data,
%{
"actor" => user.ap_id,
"name" => "v_image.jpg",
"type" => "Document",
"url" => [
%{"href" => remote_url, "mediaType" => "image/jpeg", "type" => "Link"}
]
}
]
}
})
uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads])
path = local_url |> Path.dirname() |> Path.basename()
assert {:ok, ["an_image.jpg"]} == File.ls("#{uploads_dir}/#{path}")
with_mocks [
{MediaProxy.Invalidation, [:passthrough],
[purge: fn [^local_url, ^remote_url] -> :ok end]}
] do
assert AttachmentsCleanupWorker.perform(
%{"op" => "cleanup_attachments", "object" => %{"data" => note.data}},
:job
) == {:ok, :success}
end
refute Pleroma.Object.get_by_id(attachment.id)
assert {:ok, []} == File.ls("#{uploads_dir}/#{path}")
refute Pleroma.Web.MediaProxy.in_deleted_urls(local_url)
assert Pleroma.Web.MediaProxy.in_deleted_urls(remote_url)
end
test "skip execution" do
assert AttachmentsCleanupWorker.perform(
%{
"op" => "cleanup_attachments",
"object" => %{}
},
:job
) == {:ok, :skip}
end
end
end