2018-12-23 15:11:29 -05:00
|
|
|
# Pleroma: A lightweight social networking server
|
|
|
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-04-13 09:49:42 -04:00
|
|
|
defmodule Pleroma.ObjectTest do
|
|
|
|
use Pleroma.DataCase
|
2019-09-18 11:31:24 -04:00
|
|
|
import ExUnit.CaptureLog
|
2017-04-13 09:49:42 -04:00
|
|
|
import Pleroma.Factory
|
2019-04-17 07:52:01 -04:00
|
|
|
import Tesla.Mock
|
2019-02-10 16:57:38 -05:00
|
|
|
alias Pleroma.Object
|
2019-03-04 21:52:23 -05:00
|
|
|
alias Pleroma.Repo
|
2017-04-13 09:49:42 -04:00
|
|
|
|
2019-04-17 07:52:01 -04:00
|
|
|
setup do
|
|
|
|
mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
|
|
|
:ok
|
|
|
|
end
|
|
|
|
|
2017-04-13 09:49:42 -04:00
|
|
|
test "returns an object by it's AP id" do
|
|
|
|
object = insert(:note)
|
2017-05-09 12:11:51 -04:00
|
|
|
found_object = Object.get_by_ap_id(object.data["id"])
|
2017-04-13 09:49:42 -04:00
|
|
|
|
|
|
|
assert object == found_object
|
|
|
|
end
|
2017-05-09 12:11:51 -04:00
|
|
|
|
|
|
|
describe "generic changeset" do
|
|
|
|
test "it ensures uniqueness of the id" do
|
|
|
|
object = insert(:note)
|
|
|
|
cs = Object.change(%Object{}, %{data: %{id: object.data["id"]}})
|
|
|
|
assert cs.valid?
|
|
|
|
|
2018-02-12 04:13:54 -05:00
|
|
|
{:error, _result} = Repo.insert(cs)
|
2017-05-09 12:11:51 -04:00
|
|
|
end
|
|
|
|
end
|
2018-11-01 03:37:07 -04:00
|
|
|
|
|
|
|
describe "deletion function" do
|
|
|
|
test "deletes an object" do
|
|
|
|
object = insert(:note)
|
|
|
|
found_object = Object.get_by_ap_id(object.data["id"])
|
|
|
|
|
|
|
|
assert object == found_object
|
|
|
|
|
|
|
|
Object.delete(found_object)
|
|
|
|
|
|
|
|
found_object = Object.get_by_ap_id(object.data["id"])
|
|
|
|
|
|
|
|
refute object == found_object
|
2018-12-24 19:00:06 -05:00
|
|
|
|
|
|
|
assert found_object.data["type"] == "Tombstone"
|
2018-11-01 03:37:07 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
test "ensures cache is cleared for the object" do
|
|
|
|
object = insert(:note)
|
|
|
|
cached_object = Object.get_cached_by_ap_id(object.data["id"])
|
|
|
|
|
|
|
|
assert object == cached_object
|
|
|
|
|
2019-09-09 14:53:08 -04:00
|
|
|
Cachex.put(:web_resp_cache, URI.parse(object.data["id"]).path, "cofe")
|
|
|
|
|
2018-11-01 03:37:07 -04:00
|
|
|
Object.delete(cached_object)
|
|
|
|
|
2018-11-01 04:30:10 -04:00
|
|
|
{:ok, nil} = Cachex.get(:object_cache, "object:#{object.data["id"]}")
|
2019-09-09 14:53:08 -04:00
|
|
|
{:ok, nil} = Cachex.get(:web_resp_cache, URI.parse(object.data["id"]).path)
|
2018-11-01 03:37:07 -04:00
|
|
|
|
|
|
|
cached_object = Object.get_cached_by_ap_id(object.data["id"])
|
|
|
|
|
|
|
|
refute object == cached_object
|
2018-12-24 19:00:06 -05:00
|
|
|
|
|
|
|
assert cached_object.data["type"] == "Tombstone"
|
2018-11-01 03:37:07 -04:00
|
|
|
end
|
|
|
|
end
|
2018-12-04 00:00:11 -05:00
|
|
|
|
|
|
|
describe "normalizer" do
|
|
|
|
test "fetches unknown objects by default" do
|
2018-12-04 00:01:21 -05:00
|
|
|
%Object{} =
|
|
|
|
object = Object.normalize("http://mastodon.example.org/@admin/99541947525187367")
|
2018-12-04 00:00:11 -05:00
|
|
|
|
|
|
|
assert object.data["url"] == "http://mastodon.example.org/@admin/99541947525187367"
|
|
|
|
end
|
|
|
|
|
|
|
|
test "fetches unknown objects when fetch_remote is explicitly true" do
|
2018-12-04 00:01:21 -05:00
|
|
|
%Object{} =
|
|
|
|
object = Object.normalize("http://mastodon.example.org/@admin/99541947525187367", true)
|
2018-12-04 00:00:11 -05:00
|
|
|
|
|
|
|
assert object.data["url"] == "http://mastodon.example.org/@admin/99541947525187367"
|
|
|
|
end
|
|
|
|
|
|
|
|
test "does not fetch unknown objects when fetch_remote is false" do
|
2018-12-04 00:01:21 -05:00
|
|
|
assert is_nil(
|
|
|
|
Object.normalize("http://mastodon.example.org/@admin/99541947525187367", false)
|
|
|
|
)
|
2018-12-04 00:00:11 -05:00
|
|
|
end
|
|
|
|
end
|
2019-09-18 11:13:21 -04:00
|
|
|
|
|
|
|
describe "get_by_id_and_maybe_refetch" do
|
|
|
|
test "refetches if the time since the last refetch is greater than the interval" do
|
|
|
|
mock(fn
|
|
|
|
%{method: :get, url: "https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"} ->
|
|
|
|
%Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/poll_original.json")}
|
|
|
|
|
|
|
|
env ->
|
|
|
|
apply(HttpRequestMock, :request, [env])
|
|
|
|
end)
|
|
|
|
|
|
|
|
%Object{} =
|
|
|
|
object = Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d")
|
|
|
|
|
|
|
|
assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4
|
|
|
|
assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0
|
|
|
|
|
|
|
|
mock(fn
|
|
|
|
%{method: :get, url: "https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"} ->
|
|
|
|
%Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/poll_modified.json")}
|
|
|
|
|
|
|
|
env ->
|
|
|
|
apply(HttpRequestMock, :request, [env])
|
|
|
|
end)
|
|
|
|
|
|
|
|
updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: -1)
|
|
|
|
assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 8
|
|
|
|
assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 3
|
|
|
|
end
|
|
|
|
|
|
|
|
test "returns the old object if refetch fails" do
|
|
|
|
mock(fn
|
|
|
|
%{method: :get, url: "https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"} ->
|
|
|
|
%Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/poll_original.json")}
|
|
|
|
|
|
|
|
env ->
|
|
|
|
apply(HttpRequestMock, :request, [env])
|
|
|
|
end)
|
|
|
|
|
|
|
|
%Object{} =
|
|
|
|
object = Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d")
|
|
|
|
|
|
|
|
assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4
|
|
|
|
assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0
|
|
|
|
|
2019-09-18 11:31:24 -04:00
|
|
|
assert capture_log(fn ->
|
|
|
|
mock(fn
|
|
|
|
%{
|
|
|
|
method: :get,
|
|
|
|
url: "https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"
|
|
|
|
} ->
|
|
|
|
%Tesla.Env{status: 404, body: ""}
|
|
|
|
|
|
|
|
env ->
|
|
|
|
apply(HttpRequestMock, :request, [env])
|
|
|
|
end)
|
|
|
|
|
|
|
|
updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: -1)
|
|
|
|
assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 4
|
|
|
|
assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 0
|
|
|
|
end) =~
|
|
|
|
"[error] Couldn't refresh https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"
|
2019-09-18 11:13:21 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
test "does not refetch if the time since the last refetch is greater than the interval" do
|
|
|
|
mock(fn
|
|
|
|
%{method: :get, url: "https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"} ->
|
|
|
|
%Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/poll_original.json")}
|
|
|
|
|
|
|
|
env ->
|
|
|
|
apply(HttpRequestMock, :request, [env])
|
|
|
|
end)
|
|
|
|
|
|
|
|
%Object{} =
|
|
|
|
object = Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d")
|
|
|
|
|
|
|
|
assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4
|
|
|
|
assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0
|
|
|
|
|
|
|
|
mock(fn
|
|
|
|
%{method: :get, url: "https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"} ->
|
|
|
|
%Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/poll_modified.json")}
|
|
|
|
|
|
|
|
env ->
|
|
|
|
apply(HttpRequestMock, :request, [env])
|
|
|
|
end)
|
|
|
|
|
|
|
|
updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: 100)
|
|
|
|
assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 4
|
|
|
|
assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 0
|
|
|
|
end
|
|
|
|
end
|
2017-04-13 09:49:42 -04:00
|
|
|
end
|