mirror of
https://git.youjo.love/youjo/youjo-be.git
synced 2024-11-20 05:49:54 +01:00
Use finch everywhere (#33)
Reviewed-on: https://akkoma.dev/AkkomaGang/akkoma/pulls/33
This commit is contained in:
parent
058bf96798
commit
364b6969eb
31 changed files with 261 additions and 994 deletions
|
@ -70,8 +70,6 @@ config :pleroma, :rate_limit,
|
||||||
|
|
||||||
config :pleroma, :http_security, report_uri: "https://endpoint.com"
|
config :pleroma, :http_security, report_uri: "https://endpoint.com"
|
||||||
|
|
||||||
config :pleroma, :http, send_user_agent: false
|
|
||||||
|
|
||||||
rum_enabled = System.get_env("RUM_ENABLED") == "true"
|
rum_enabled = System.get_env("RUM_ENABLED") == "true"
|
||||||
config :pleroma, :database, rum_enabled: rum_enabled
|
config :pleroma, :database, rum_enabled: rum_enabled
|
||||||
IO.puts("RUM enabled: #{rum_enabled}")
|
IO.puts("RUM enabled: #{rum_enabled}")
|
||||||
|
|
|
@ -175,12 +175,11 @@ config :mime, :types, %{
|
||||||
"application/ld+json" => ["activity+json"]
|
"application/ld+json" => ["activity+json"]
|
||||||
}
|
}
|
||||||
|
|
||||||
config :tesla, adapter: Tesla.Adapter.Hackney
|
config :tesla, :adapter, {Tesla.Adapter.Finch, name: MyFinch}
|
||||||
|
|
||||||
# Configures http settings, upstream proxy etc.
|
# Configures http settings, upstream proxy etc.
|
||||||
config :pleroma, :http,
|
config :pleroma, :http,
|
||||||
proxy_url: nil,
|
proxy_url: nil,
|
||||||
send_user_agent: true,
|
|
||||||
user_agent: :default,
|
user_agent: :default,
|
||||||
adapter: []
|
adapter: []
|
||||||
|
|
||||||
|
@ -440,11 +439,7 @@ config :pleroma, :media_proxy,
|
||||||
redirect_on_failure: false,
|
redirect_on_failure: false,
|
||||||
max_body_length: 25 * 1_048_576,
|
max_body_length: 25 * 1_048_576,
|
||||||
# Note: max_read_duration defaults to Pleroma.ReverseProxy.max_read_duration_default/1
|
# Note: max_read_duration defaults to Pleroma.ReverseProxy.max_read_duration_default/1
|
||||||
max_read_duration: 30_000,
|
max_read_duration: 30_000
|
||||||
http: [
|
|
||||||
follow_redirect: true,
|
|
||||||
pool: :media
|
|
||||||
]
|
|
||||||
],
|
],
|
||||||
whitelist: []
|
whitelist: []
|
||||||
|
|
||||||
|
@ -765,51 +760,6 @@ config :pleroma, Pleroma.Repo,
|
||||||
parameters: [gin_fuzzy_search_limit: "500"],
|
parameters: [gin_fuzzy_search_limit: "500"],
|
||||||
prepare: :unnamed
|
prepare: :unnamed
|
||||||
|
|
||||||
config :pleroma, :connections_pool,
|
|
||||||
reclaim_multiplier: 0.1,
|
|
||||||
connection_acquisition_wait: 250,
|
|
||||||
connection_acquisition_retries: 5,
|
|
||||||
max_connections: 250,
|
|
||||||
max_idle_time: 30_000,
|
|
||||||
retry: 0,
|
|
||||||
connect_timeout: 5_000
|
|
||||||
|
|
||||||
config :pleroma, :pools,
|
|
||||||
federation: [
|
|
||||||
size: 50,
|
|
||||||
max_waiting: 10,
|
|
||||||
recv_timeout: 10_000
|
|
||||||
],
|
|
||||||
media: [
|
|
||||||
size: 50,
|
|
||||||
max_waiting: 20,
|
|
||||||
recv_timeout: 15_000
|
|
||||||
],
|
|
||||||
upload: [
|
|
||||||
size: 25,
|
|
||||||
max_waiting: 5,
|
|
||||||
recv_timeout: 15_000
|
|
||||||
],
|
|
||||||
default: [
|
|
||||||
size: 10,
|
|
||||||
max_waiting: 2,
|
|
||||||
recv_timeout: 5_000
|
|
||||||
]
|
|
||||||
|
|
||||||
config :pleroma, :hackney_pools,
|
|
||||||
federation: [
|
|
||||||
max_connections: 50,
|
|
||||||
timeout: 150_000
|
|
||||||
],
|
|
||||||
media: [
|
|
||||||
max_connections: 50,
|
|
||||||
timeout: 150_000
|
|
||||||
],
|
|
||||||
upload: [
|
|
||||||
max_connections: 25,
|
|
||||||
timeout: 300_000
|
|
||||||
]
|
|
||||||
|
|
||||||
config :pleroma, :majic_pool, size: 2
|
config :pleroma, :majic_pool, size: 2
|
||||||
|
|
||||||
private_instance? = :if_instance_is_private
|
private_instance? = :if_instance_is_private
|
||||||
|
@ -826,8 +776,6 @@ config :pleroma, :mrf,
|
||||||
transparency: true,
|
transparency: true,
|
||||||
transparency_exclusions: []
|
transparency_exclusions: []
|
||||||
|
|
||||||
config :tzdata, :http_client, Pleroma.HTTP.Tzdata
|
|
||||||
|
|
||||||
config :ex_aws, http_client: Pleroma.HTTP.ExAws
|
config :ex_aws, http_client: Pleroma.HTTP.ExAws
|
||||||
|
|
||||||
config :web_push_encryption, http_client: Pleroma.HTTP.WebPush
|
config :web_push_encryption, http_client: Pleroma.HTTP.WebPush
|
||||||
|
|
|
@ -2625,10 +2625,6 @@ config :pleroma, :config_description, [
|
||||||
description: "Proxy URL",
|
description: "Proxy URL",
|
||||||
suggestions: ["localhost:9020", {:socks5, :localhost, 3090}]
|
suggestions: ["localhost:9020", {:socks5, :localhost, 3090}]
|
||||||
},
|
},
|
||||||
%{
|
|
||||||
key: :send_user_agent,
|
|
||||||
type: :boolean
|
|
||||||
},
|
|
||||||
%{
|
%{
|
||||||
key: :user_agent,
|
key: :user_agent,
|
||||||
type: [:string, :atom],
|
type: [:string, :atom],
|
||||||
|
@ -2941,147 +2937,6 @@ config :pleroma, :config_description, [
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
%{
|
|
||||||
group: :pleroma,
|
|
||||||
key: :connections_pool,
|
|
||||||
type: :group,
|
|
||||||
description: "Advanced settings for `Gun` connections pool",
|
|
||||||
children: [
|
|
||||||
%{
|
|
||||||
key: :connection_acquisition_wait,
|
|
||||||
type: :integer,
|
|
||||||
description:
|
|
||||||
"Timeout to acquire a connection from pool. The total max time is this value multiplied by the number of retries. Default: 250ms.",
|
|
||||||
suggestions: [250]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :connection_acquisition_retries,
|
|
||||||
type: :integer,
|
|
||||||
description:
|
|
||||||
"Number of attempts to acquire the connection from the pool if it is overloaded. Default: 5",
|
|
||||||
suggestions: [5]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :max_connections,
|
|
||||||
type: :integer,
|
|
||||||
description: "Maximum number of connections in the pool. Default: 250 connections.",
|
|
||||||
suggestions: [250]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :connect_timeout,
|
|
||||||
type: :integer,
|
|
||||||
description: "Timeout while `gun` will wait until connection is up. Default: 5000ms.",
|
|
||||||
suggestions: [5000]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :reclaim_multiplier,
|
|
||||||
type: :integer,
|
|
||||||
description:
|
|
||||||
"Multiplier for the number of idle connection to be reclaimed if the pool is full. For example if the pool maxes out at 250 connections and this setting is set to 0.3, the pool will reclaim at most 75 idle connections if it's overloaded. Default: 0.1",
|
|
||||||
suggestions: [0.1]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
group: :pleroma,
|
|
||||||
key: :pools,
|
|
||||||
type: :group,
|
|
||||||
description: "Advanced settings for `Gun` workers pools",
|
|
||||||
children:
|
|
||||||
Enum.map([:federation, :media, :upload, :default], fn pool_name ->
|
|
||||||
%{
|
|
||||||
key: pool_name,
|
|
||||||
type: :keyword,
|
|
||||||
description: "Settings for #{pool_name} pool.",
|
|
||||||
children: [
|
|
||||||
%{
|
|
||||||
key: :size,
|
|
||||||
type: :integer,
|
|
||||||
description: "Maximum number of concurrent requests in the pool.",
|
|
||||||
suggestions: [50]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :max_waiting,
|
|
||||||
type: :integer,
|
|
||||||
description:
|
|
||||||
"Maximum number of requests waiting for other requests to finish. After this number is reached, the pool will start returning errrors when a new request is made",
|
|
||||||
suggestions: [10]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :recv_timeout,
|
|
||||||
type: :integer,
|
|
||||||
description: "Timeout for the pool while gun will wait for response",
|
|
||||||
suggestions: [10_000]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
end)
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
group: :pleroma,
|
|
||||||
key: :hackney_pools,
|
|
||||||
type: :group,
|
|
||||||
description: "Advanced settings for `Hackney` connections pools",
|
|
||||||
children: [
|
|
||||||
%{
|
|
||||||
key: :federation,
|
|
||||||
type: :keyword,
|
|
||||||
description: "Settings for federation pool.",
|
|
||||||
children: [
|
|
||||||
%{
|
|
||||||
key: :max_connections,
|
|
||||||
type: :integer,
|
|
||||||
description: "Number workers in the pool.",
|
|
||||||
suggestions: [50]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :timeout,
|
|
||||||
type: :integer,
|
|
||||||
description: "Timeout while `hackney` will wait for response.",
|
|
||||||
suggestions: [150_000]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :media,
|
|
||||||
type: :keyword,
|
|
||||||
description: "Settings for media pool.",
|
|
||||||
children: [
|
|
||||||
%{
|
|
||||||
key: :max_connections,
|
|
||||||
type: :integer,
|
|
||||||
description: "Number workers in the pool.",
|
|
||||||
suggestions: [50]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :timeout,
|
|
||||||
type: :integer,
|
|
||||||
description: "Timeout while `hackney` will wait for response.",
|
|
||||||
suggestions: [150_000]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :upload,
|
|
||||||
type: :keyword,
|
|
||||||
description: "Settings for upload pool.",
|
|
||||||
children: [
|
|
||||||
%{
|
|
||||||
key: :max_connections,
|
|
||||||
type: :integer,
|
|
||||||
description: "Number workers in the pool.",
|
|
||||||
suggestions: [25]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :timeout,
|
|
||||||
type: :integer,
|
|
||||||
description: "Timeout while `hackney` will wait for response.",
|
|
||||||
suggestions: [300_000]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
%{
|
%{
|
||||||
group: :pleroma,
|
group: :pleroma,
|
||||||
key: :restrict_unauthenticated,
|
key: :restrict_unauthenticated,
|
||||||
|
|
|
@ -45,7 +45,7 @@ config :pleroma, Pleroma.Repo,
|
||||||
adapter: Ecto.Adapters.Postgres,
|
adapter: Ecto.Adapters.Postgres,
|
||||||
username: "postgres",
|
username: "postgres",
|
||||||
password: "postgres",
|
password: "postgres",
|
||||||
database: "pleroma_test",
|
database: "akkoma_test",
|
||||||
hostname: System.get_env("DB_HOST") || "localhost",
|
hostname: System.get_env("DB_HOST") || "localhost",
|
||||||
pool: Ecto.Adapters.SQL.Sandbox,
|
pool: Ecto.Adapters.SQL.Sandbox,
|
||||||
pool_size: 50,
|
pool_size: 50,
|
||||||
|
@ -104,12 +104,8 @@ IO.puts("RUM enabled: #{rum_enabled}")
|
||||||
|
|
||||||
config :joken, default_signer: "yU8uHKq+yyAkZ11Hx//jcdacWc8yQ1bxAAGrplzB0Zwwjkp35v0RK9SO8WTPr6QZ"
|
config :joken, default_signer: "yU8uHKq+yyAkZ11Hx//jcdacWc8yQ1bxAAGrplzB0Zwwjkp35v0RK9SO8WTPr6QZ"
|
||||||
|
|
||||||
config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock
|
|
||||||
|
|
||||||
config :pleroma, :modules, runtime_dir: "test/fixtures/modules"
|
config :pleroma, :modules, runtime_dir: "test/fixtures/modules"
|
||||||
|
|
||||||
config :pleroma, Pleroma.Gun, Pleroma.GunMock
|
|
||||||
|
|
||||||
config :pleroma, Pleroma.Emails.NewUsersDigestEmail, enabled: true
|
config :pleroma, Pleroma.Emails.NewUsersDigestEmail, enabled: true
|
||||||
|
|
||||||
config :pleroma, Pleroma.Web.Plugs.RemoteIp, enabled: false
|
config :pleroma, Pleroma.Web.Plugs.RemoteIp, enabled: false
|
||||||
|
|
|
@ -23,21 +23,14 @@ defmodule Mix.Pleroma do
|
||||||
Pleroma.Config.Oban.warn()
|
Pleroma.Config.Oban.warn()
|
||||||
Pleroma.Application.limiters_setup()
|
Pleroma.Application.limiters_setup()
|
||||||
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
|
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
|
||||||
|
Finch.start_link(name: MyFinch)
|
||||||
|
|
||||||
|
|
||||||
unless System.get_env("DEBUG") do
|
unless System.get_env("DEBUG") do
|
||||||
Logger.remove_backend(:console)
|
Logger.remove_backend(:console)
|
||||||
end
|
end
|
||||||
|
|
||||||
adapter = Application.get_env(:tesla, :adapter)
|
Enum.each(@apps, &Application.ensure_all_started/1)
|
||||||
|
|
||||||
apps =
|
|
||||||
if adapter == Tesla.Adapter.Gun do
|
|
||||||
[:gun | @apps]
|
|
||||||
else
|
|
||||||
[:hackney | @apps]
|
|
||||||
end
|
|
||||||
|
|
||||||
Enum.each(apps, &Application.ensure_all_started/1)
|
|
||||||
|
|
||||||
oban_config = [
|
oban_config = [
|
||||||
crontab: [],
|
crontab: [],
|
||||||
|
@ -57,7 +50,6 @@ defmodule Mix.Pleroma do
|
||||||
{Majic.Pool,
|
{Majic.Pool,
|
||||||
[name: Pleroma.MajicPool, pool_size: Pleroma.Config.get([:majic_pool, :size], 2)]}
|
[name: Pleroma.MajicPool, pool_size: Pleroma.Config.get([:majic_pool, :size], 2)]}
|
||||||
] ++
|
] ++
|
||||||
http_children(adapter) ++
|
|
||||||
elasticsearch_children()
|
elasticsearch_children()
|
||||||
|
|
||||||
cachex_children = Enum.map(@cachex_children, &Pleroma.Application.build_cachex(&1, []))
|
cachex_children = Enum.map(@cachex_children, &Pleroma.Application.build_cachex(&1, []))
|
||||||
|
@ -131,13 +123,6 @@ defmodule Mix.Pleroma do
|
||||||
~S(') <> String.replace(path, ~S('), ~S(\')) <> ~S(')
|
~S(') <> String.replace(path, ~S('), ~S(\')) <> ~S(')
|
||||||
end
|
end
|
||||||
|
|
||||||
defp http_children(Tesla.Adapter.Gun) do
|
|
||||||
Pleroma.Gun.ConnectionPool.children() ++
|
|
||||||
[{Task, &Pleroma.HTTP.AdapterHelper.Gun.limiter_setup/0}]
|
|
||||||
end
|
|
||||||
|
|
||||||
defp http_children(_), do: []
|
|
||||||
|
|
||||||
def elasticsearch_children do
|
def elasticsearch_children do
|
||||||
config = Pleroma.Config.get([Pleroma.Search, :module])
|
config = Pleroma.Config.get([Pleroma.Search, :module])
|
||||||
|
|
||||||
|
|
|
@ -74,40 +74,4 @@ defmodule Mix.Tasks.Pleroma.Benchmark do
|
||||||
inputs: inputs
|
inputs: inputs
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["adapters"]) do
|
|
||||||
start_pleroma()
|
|
||||||
|
|
||||||
:ok =
|
|
||||||
Pleroma.Gun.Conn.open(
|
|
||||||
"https://httpbin.org/stream-bytes/1500",
|
|
||||||
:gun_connections
|
|
||||||
)
|
|
||||||
|
|
||||||
Process.sleep(1_500)
|
|
||||||
|
|
||||||
Benchee.run(
|
|
||||||
%{
|
|
||||||
"Without conn and without pool" => fn ->
|
|
||||||
{:ok, %Tesla.Env{}} =
|
|
||||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
|
|
||||||
pool: :no_pool,
|
|
||||||
receive_conn: false
|
|
||||||
)
|
|
||||||
end,
|
|
||||||
"Without conn and with pool" => fn ->
|
|
||||||
{:ok, %Tesla.Env{}} =
|
|
||||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [], receive_conn: false)
|
|
||||||
end,
|
|
||||||
"With reused conn and without pool" => fn ->
|
|
||||||
{:ok, %Tesla.Env{}} =
|
|
||||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [], pool: :no_pool)
|
|
||||||
end,
|
|
||||||
"With reused conn and with pool" => fn ->
|
|
||||||
{:ok, %Tesla.Env{}} = Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500")
|
|
||||||
end
|
|
||||||
},
|
|
||||||
parallel: 10
|
|
||||||
)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -59,34 +59,8 @@ defmodule Pleroma.Application do
|
||||||
Pleroma.Docs.JSON.compile()
|
Pleroma.Docs.JSON.compile()
|
||||||
limiters_setup()
|
limiters_setup()
|
||||||
|
|
||||||
adapter = Application.get_env(:tesla, :adapter)
|
|
||||||
|
|
||||||
if match?({Tesla.Adapter.Finch, _}, adapter) do
|
|
||||||
Logger.info("Starting Finch")
|
Logger.info("Starting Finch")
|
||||||
Finch.start_link(name: MyFinch)
|
Finch.start_link(name: MyFinch)
|
||||||
end
|
|
||||||
|
|
||||||
if adapter == Tesla.Adapter.Gun do
|
|
||||||
if version = Pleroma.OTPVersion.version() do
|
|
||||||
[major, minor] =
|
|
||||||
version
|
|
||||||
|> String.split(".")
|
|
||||||
|> Enum.map(&String.to_integer/1)
|
|
||||||
|> Enum.take(2)
|
|
||||||
|
|
||||||
if (major == 22 and minor < 2) or major < 22 do
|
|
||||||
raise "
|
|
||||||
!!!OTP VERSION WARNING!!!
|
|
||||||
You are using gun adapter with OTP version #{version}, which doesn't support correct handling of unordered certificates chains. Please update your Erlang/OTP to at least 22.2.
|
|
||||||
"
|
|
||||||
end
|
|
||||||
else
|
|
||||||
raise "
|
|
||||||
!!!OTP VERSION WARNING!!!
|
|
||||||
To support correct handling of unordered certificates chains - OTP version must be > 22.2.
|
|
||||||
"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Define workers and child supervisors to be supervised
|
# Define workers and child supervisors to be supervised
|
||||||
children =
|
children =
|
||||||
|
@ -97,7 +71,6 @@ defmodule Pleroma.Application do
|
||||||
Pleroma.Web.Plugs.RateLimiter.Supervisor
|
Pleroma.Web.Plugs.RateLimiter.Supervisor
|
||||||
] ++
|
] ++
|
||||||
cachex_children() ++
|
cachex_children() ++
|
||||||
http_children(adapter, @mix_env) ++
|
|
||||||
[
|
[
|
||||||
Pleroma.Stats,
|
Pleroma.Stats,
|
||||||
Pleroma.JobQueueMonitor,
|
Pleroma.JobQueueMonitor,
|
||||||
|
@ -276,34 +249,6 @@ defmodule Pleroma.Application do
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
# start hackney and gun pools in tests
|
|
||||||
defp http_children(_, :test) do
|
|
||||||
http_children(Tesla.Adapter.Hackney, nil) ++ http_children(Tesla.Adapter.Gun, nil)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp http_children(Tesla.Adapter.Hackney, _) do
|
|
||||||
pools = [:federation, :media]
|
|
||||||
|
|
||||||
pools =
|
|
||||||
if Config.get([Pleroma.Upload, :proxy_remote]) do
|
|
||||||
[:upload | pools]
|
|
||||||
else
|
|
||||||
pools
|
|
||||||
end
|
|
||||||
|
|
||||||
for pool <- pools do
|
|
||||||
options = Config.get([:hackney_pools, pool])
|
|
||||||
:hackney_pool.child_spec(pool, options)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp http_children(Tesla.Adapter.Gun, _) do
|
|
||||||
Pleroma.Gun.ConnectionPool.children() ++
|
|
||||||
[{Task, &Pleroma.HTTP.AdapterHelper.Gun.limiter_setup/0}]
|
|
||||||
end
|
|
||||||
|
|
||||||
defp http_children(_, _), do: []
|
|
||||||
|
|
||||||
def elasticsearch_children do
|
def elasticsearch_children do
|
||||||
config = Config.get([Pleroma.Search, :module])
|
config = Config.get([Pleroma.Search, :module])
|
||||||
|
|
||||||
|
|
|
@ -173,7 +173,6 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
||||||
check_old_mrf_config(),
|
check_old_mrf_config(),
|
||||||
check_media_proxy_whitelist_config(),
|
check_media_proxy_whitelist_config(),
|
||||||
check_welcome_message_config(),
|
check_welcome_message_config(),
|
||||||
check_gun_pool_options(),
|
|
||||||
check_activity_expiration_config(),
|
check_activity_expiration_config(),
|
||||||
check_remote_ip_plug_name(),
|
check_remote_ip_plug_name(),
|
||||||
check_uploders_s3_public_endpoint(),
|
check_uploders_s3_public_endpoint(),
|
||||||
|
@ -257,51 +256,6 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def check_gun_pool_options do
|
|
||||||
pool_config = Config.get(:connections_pool)
|
|
||||||
|
|
||||||
if timeout = pool_config[:await_up_timeout] do
|
|
||||||
Logger.warn("""
|
|
||||||
!!!DEPRECATION WARNING!!!
|
|
||||||
Your config is using old setting `config :pleroma, :connections_pool, await_up_timeout`. Please change to `config :pleroma, :connections_pool, connect_timeout` to ensure compatibility with future releases.
|
|
||||||
""")
|
|
||||||
|
|
||||||
Config.put(:connections_pool, Keyword.put_new(pool_config, :connect_timeout, timeout))
|
|
||||||
end
|
|
||||||
|
|
||||||
pools_configs = Config.get(:pools)
|
|
||||||
|
|
||||||
warning_preface = """
|
|
||||||
!!!DEPRECATION WARNING!!!
|
|
||||||
Your config is using old setting name `timeout` instead of `recv_timeout` in pool settings. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
|
|
||||||
"""
|
|
||||||
|
|
||||||
updated_config =
|
|
||||||
Enum.reduce(pools_configs, [], fn {pool_name, config}, acc ->
|
|
||||||
if timeout = config[:timeout] do
|
|
||||||
Keyword.put(acc, pool_name, Keyword.put_new(config, :recv_timeout, timeout))
|
|
||||||
else
|
|
||||||
acc
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|
|
||||||
if updated_config != [] do
|
|
||||||
pool_warnings =
|
|
||||||
updated_config
|
|
||||||
|> Keyword.keys()
|
|
||||||
|> Enum.map(fn pool_name ->
|
|
||||||
"\n* `:timeout` options in #{pool_name} pool is now `:recv_timeout`"
|
|
||||||
end)
|
|
||||||
|
|
||||||
Logger.warn(Enum.join([warning_preface | pool_warnings]))
|
|
||||||
|
|
||||||
Config.put(:pools, updated_config)
|
|
||||||
:error
|
|
||||||
else
|
|
||||||
:ok
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec check_activity_expiration_config() :: :ok | nil
|
@spec check_activity_expiration_config() :: :ok | nil
|
||||||
def check_activity_expiration_config do
|
def check_activity_expiration_config do
|
||||||
warning_preface = """
|
warning_preface = """
|
||||||
|
|
|
@ -15,14 +15,11 @@ defmodule Pleroma.Config.TransferTask do
|
||||||
|
|
||||||
defp reboot_time_keys,
|
defp reboot_time_keys,
|
||||||
do: [
|
do: [
|
||||||
{:pleroma, :hackney_pools},
|
|
||||||
{:pleroma, :shout},
|
{:pleroma, :shout},
|
||||||
{:pleroma, Oban},
|
{:pleroma, Oban},
|
||||||
{:pleroma, :rate_limit},
|
{:pleroma, :rate_limit},
|
||||||
{:pleroma, :markup},
|
{:pleroma, :markup},
|
||||||
{:pleroma, :streamer},
|
{:pleroma, :streamer}
|
||||||
{:pleroma, :pools},
|
|
||||||
{:pleroma, :connections_pool}
|
|
||||||
]
|
]
|
||||||
|
|
||||||
defp reboot_time_subkeys,
|
defp reboot_time_subkeys,
|
||||||
|
|
|
@ -542,7 +542,7 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
defp http_get(%URI{} = url), do: url |> to_string() |> http_get()
|
defp http_get(%URI{} = url), do: url |> to_string() |> http_get()
|
||||||
|
|
||||||
defp http_get(url) do
|
defp http_get(url) do
|
||||||
with {:ok, %{body: body}} <- Pleroma.HTTP.get(url, [], pool: :default) do
|
with {:ok, %{body: body}} <- Pleroma.HTTP.get(url, [], []) do
|
||||||
Jason.decode(body)
|
Jason.decode(body)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -93,7 +93,7 @@ defmodule Pleroma.Frontend do
|
||||||
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
|
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
|
||||||
|
|
||||||
with {:ok, %{status: 200, body: zip_body}} <-
|
with {:ok, %{status: 200, body: zip_body}} <-
|
||||||
Pleroma.HTTP.get(url, [], pool: :media, recv_timeout: 120_000) do
|
Pleroma.HTTP.get(url, [], recv_timeout: 120_000) do
|
||||||
unzip(zip_body, dest)
|
unzip(zip_body, dest)
|
||||||
else
|
else
|
||||||
{:error, e} -> {:error, e}
|
{:error, e} -> {:error, e}
|
||||||
|
|
|
@ -24,7 +24,7 @@ defmodule Pleroma.Helpers.MediaHelper do
|
||||||
def image_resize(url, options) do
|
def image_resize(url, options) do
|
||||||
with executable when is_binary(executable) <- System.find_executable("convert"),
|
with executable when is_binary(executable) <- System.find_executable("convert"),
|
||||||
{:ok, args} <- prepare_image_resize_args(options),
|
{:ok, args} <- prepare_image_resize_args(options),
|
||||||
{:ok, env} <- HTTP.get(url, [], pool: :media),
|
{:ok, env} <- HTTP.get(url, [], []),
|
||||||
{:ok, fifo_path} <- mkfifo() do
|
{:ok, fifo_path} <- mkfifo() do
|
||||||
args = List.flatten([fifo_path, args])
|
args = List.flatten([fifo_path, args])
|
||||||
run_fifo(fifo_path, env, executable, args)
|
run_fifo(fifo_path, env, executable, args)
|
||||||
|
@ -73,7 +73,7 @@ defmodule Pleroma.Helpers.MediaHelper do
|
||||||
# Note: video thumbnail is intentionally not resized (always has original dimensions)
|
# Note: video thumbnail is intentionally not resized (always has original dimensions)
|
||||||
def video_framegrab(url) do
|
def video_framegrab(url) do
|
||||||
with executable when is_binary(executable) <- System.find_executable("ffmpeg"),
|
with executable when is_binary(executable) <- System.find_executable("ffmpeg"),
|
||||||
{:ok, env} <- HTTP.get(url, [], pool: :media),
|
{:ok, env} <- HTTP.get(url, [], []),
|
||||||
{:ok, fifo_path} <- mkfifo(),
|
{:ok, fifo_path} <- mkfifo(),
|
||||||
args = [
|
args = [
|
||||||
"-y",
|
"-y",
|
||||||
|
|
|
@ -66,17 +66,9 @@ defmodule Pleroma.HTTP do
|
||||||
params = options[:params] || []
|
params = options[:params] || []
|
||||||
request = build_request(method, headers, options, url, body, params)
|
request = build_request(method, headers, options, url, body, params)
|
||||||
|
|
||||||
adapter = Application.get_env(:tesla, :adapter)
|
client = Tesla.client([Tesla.Middleware.FollowRedirects])
|
||||||
|
|
||||||
client = Tesla.client(adapter_middlewares(adapter), adapter)
|
|
||||||
|
|
||||||
maybe_limit(
|
|
||||||
fn ->
|
|
||||||
request(client, request)
|
request(client, request)
|
||||||
end,
|
|
||||||
adapter,
|
|
||||||
adapter_opts
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec request(Client.t(), keyword()) :: {:ok, Env.t()} | {:error, any()}
|
@spec request(Client.t(), keyword()) :: {:ok, Env.t()} | {:error, any()}
|
||||||
|
@ -92,19 +84,4 @@ defmodule Pleroma.HTTP do
|
||||||
|> Builder.add_param(:query, :query, params)
|
|> Builder.add_param(:query, :query, params)
|
||||||
|> Builder.convert_to_keyword()
|
|> Builder.convert_to_keyword()
|
||||||
end
|
end
|
||||||
|
|
||||||
@prefix Pleroma.Gun.ConnectionPool
|
|
||||||
defp maybe_limit(fun, Tesla.Adapter.Gun, opts) do
|
|
||||||
ConcurrentLimiter.limit(:"#{@prefix}.#{opts[:pool] || :default}", fun)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp maybe_limit(fun, _, _) do
|
|
||||||
fun.()
|
|
||||||
end
|
|
||||||
|
|
||||||
defp adapter_middlewares(Tesla.Adapter.Gun) do
|
|
||||||
[Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.ConnectionPool]
|
|
||||||
end
|
|
||||||
|
|
||||||
defp adapter_middlewares(_), do: []
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -6,7 +6,7 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
Configure Tesla.Client with default and customized adapter options.
|
Configure Tesla.Client with default and customized adapter options.
|
||||||
"""
|
"""
|
||||||
@defaults [pool: :federation, connect_timeout: 5_000, recv_timeout: 5_000]
|
@defaults [name: MyFinch, connect_timeout: 5_000, recv_timeout: 5_000]
|
||||||
|
|
||||||
@type proxy_type() :: :socks4 | :socks5
|
@type proxy_type() :: :socks4 | :socks5
|
||||||
@type host() :: charlist() | :inet.ip_address()
|
@type host() :: charlist() | :inet.ip_address()
|
||||||
|
@ -43,17 +43,7 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
||||||
def options(%URI{} = uri, opts \\ []) do
|
def options(%URI{} = uri, opts \\ []) do
|
||||||
@defaults
|
@defaults
|
||||||
|> Keyword.merge(opts)
|
|> Keyword.merge(opts)
|
||||||
|> adapter_helper().options(uri)
|
|> AdapterHelper.Default.options(uri)
|
||||||
end
|
|
||||||
|
|
||||||
defp adapter, do: Application.get_env(:tesla, :adapter)
|
|
||||||
|
|
||||||
defp adapter_helper do
|
|
||||||
case adapter() do
|
|
||||||
Tesla.Adapter.Gun -> AdapterHelper.Gun
|
|
||||||
Tesla.Adapter.Hackney -> AdapterHelper.Hackney
|
|
||||||
_ -> AdapterHelper.Default
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec parse_proxy(String.t() | tuple() | nil) ::
|
@spec parse_proxy(String.t() | tuple() | nil) ::
|
||||||
|
|
|
@ -11,8 +11,6 @@ defmodule Pleroma.HTTP.ExAws do
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
|
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
|
||||||
http_opts = Keyword.put_new(http_opts, :pool, :upload)
|
|
||||||
|
|
||||||
case HTTP.request(method, url, body, headers, http_opts) do
|
case HTTP.request(method, url, body, headers, http_opts) do
|
||||||
{:ok, env} ->
|
{:ok, env} ->
|
||||||
{:ok, %{status_code: env.status, headers: env.headers, body: env.body}}
|
{:ok, %{status_code: env.status, headers: env.headers, body: env.body}}
|
||||||
|
|
|
@ -10,6 +10,8 @@ defmodule Pleroma.HTTP.RequestBuilder do
|
||||||
alias Pleroma.HTTP.Request
|
alias Pleroma.HTTP.Request
|
||||||
alias Tesla.Multipart
|
alias Tesla.Multipart
|
||||||
|
|
||||||
|
@mix_env Mix.env()
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Creates new request
|
Creates new request
|
||||||
"""
|
"""
|
||||||
|
@ -33,14 +35,7 @@ defmodule Pleroma.HTTP.RequestBuilder do
|
||||||
"""
|
"""
|
||||||
@spec headers(Request.t(), Request.headers()) :: Request.t()
|
@spec headers(Request.t(), Request.headers()) :: Request.t()
|
||||||
def headers(request, headers) do
|
def headers(request, headers) do
|
||||||
headers_list =
|
headers_list = maybe_add_user_agent(headers, @mix_env)
|
||||||
with true <- Pleroma.Config.get([:http, :send_user_agent]),
|
|
||||||
nil <- Enum.find(headers, fn {key, _val} -> String.downcase(key) == "user-agent" end) do
|
|
||||||
[{"user-agent", Pleroma.Application.user_agent()} | headers]
|
|
||||||
else
|
|
||||||
_ ->
|
|
||||||
headers
|
|
||||||
end
|
|
||||||
|
|
||||||
%{request | headers: headers_list}
|
%{request | headers: headers_list}
|
||||||
end
|
end
|
||||||
|
@ -92,4 +87,16 @@ defmodule Pleroma.HTTP.RequestBuilder do
|
||||||
|> Map.from_struct()
|
|> Map.from_struct()
|
||||||
|> Enum.into([])
|
|> Enum.into([])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp maybe_add_user_agent(headers, :test) do
|
||||||
|
with true <- Pleroma.Config.get([:http, :send_user_agent]) do
|
||||||
|
[{"user-agent", Pleroma.Application.user_agent()} | headers]
|
||||||
|
else
|
||||||
|
_ ->
|
||||||
|
headers
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_add_user_agent(headers, _),
|
||||||
|
do: [{"user-agent", Pleroma.Application.user_agent()} | headers]
|
||||||
end
|
end
|
||||||
|
|
|
@ -11,8 +11,6 @@ defmodule Pleroma.HTTP.Tzdata do
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def get(url, headers, options) do
|
def get(url, headers, options) do
|
||||||
options = Keyword.put_new(options, :pool, :default)
|
|
||||||
|
|
||||||
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
|
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
|
||||||
{:ok, {env.status, env.headers, env.body}}
|
{:ok, {env.status, env.headers, env.body}}
|
||||||
end
|
end
|
||||||
|
@ -20,8 +18,6 @@ defmodule Pleroma.HTTP.Tzdata do
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def head(url, headers, options) do
|
def head(url, headers, options) do
|
||||||
options = Keyword.put_new(options, :pool, :default)
|
|
||||||
|
|
||||||
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
|
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
|
||||||
{:ok, {env.status, env.headers}}
|
{:ok, {env.status, env.headers}}
|
||||||
end
|
end
|
||||||
|
|
|
@ -170,7 +170,7 @@ defmodule Pleroma.Instances.Instance do
|
||||||
try do
|
try do
|
||||||
with {_, true} <- {:reachable, reachable?(instance_uri.host)},
|
with {_, true} <- {:reachable, reachable?(instance_uri.host)},
|
||||||
{:ok, %Tesla.Env{body: html}} <-
|
{:ok, %Tesla.Env{body: html}} <-
|
||||||
Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}], pool: :media),
|
Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}], []),
|
||||||
{_, [favicon_rel | _]} when is_binary(favicon_rel) <-
|
{_, [favicon_rel | _]} when is_binary(favicon_rel) <-
|
||||||
{:parse,
|
{:parse,
|
||||||
html |> Floki.parse_document!() |> Floki.attribute("link[rel=icon]", "href")},
|
html |> Floki.parse_document!() |> Floki.attribute("link[rel=icon]", "href")},
|
||||||
|
|
|
@ -9,7 +9,7 @@ defmodule Pleroma.ReverseProxy do
|
||||||
@resp_cache_headers ~w(etag date last-modified)
|
@resp_cache_headers ~w(etag date last-modified)
|
||||||
@keep_resp_headers @resp_cache_headers ++
|
@keep_resp_headers @resp_cache_headers ++
|
||||||
~w(content-length content-type content-disposition content-encoding) ++
|
~w(content-length content-type content-disposition content-encoding) ++
|
||||||
~w(content-range accept-ranges vary)
|
~w(content-range accept-ranges vary expires)
|
||||||
@default_cache_control_header "public, max-age=1209600"
|
@default_cache_control_header "public, max-age=1209600"
|
||||||
@valid_resp_codes [200, 206, 304]
|
@valid_resp_codes [200, 206, 304]
|
||||||
@max_read_duration :timer.seconds(30)
|
@max_read_duration :timer.seconds(30)
|
||||||
|
@ -59,11 +59,7 @@ defmodule Pleroma.ReverseProxy do
|
||||||
|
|
||||||
* `req_headers`, `resp_headers` additional headers.
|
* `req_headers`, `resp_headers` additional headers.
|
||||||
|
|
||||||
* `http`: options for [hackney](https://github.com/benoitc/hackney) or [gun](https://github.com/ninenines/gun).
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@default_options [pool: :media]
|
|
||||||
|
|
||||||
@inline_content_types [
|
@inline_content_types [
|
||||||
"image/gif",
|
"image/gif",
|
||||||
"image/jpeg",
|
"image/jpeg",
|
||||||
|
@ -94,7 +90,7 @@ defmodule Pleroma.ReverseProxy do
|
||||||
def call(_conn, _url, _opts \\ [])
|
def call(_conn, _url, _opts \\ [])
|
||||||
|
|
||||||
def call(conn = %{method: method}, url, opts) when method in @methods do
|
def call(conn = %{method: method}, url, opts) when method in @methods do
|
||||||
client_opts = Keyword.merge(@default_options, Keyword.get(opts, :http, []))
|
client_opts = Keyword.get(opts, :http, [])
|
||||||
|
|
||||||
req_headers = build_req_headers(conn.req_headers, opts)
|
req_headers = build_req_headers(conn.req_headers, opts)
|
||||||
|
|
||||||
|
@ -106,32 +102,39 @@ defmodule Pleroma.ReverseProxy do
|
||||||
end
|
end
|
||||||
|
|
||||||
with {:ok, nil} <- @cachex.get(:failed_proxy_url_cache, url),
|
with {:ok, nil} <- @cachex.get(:failed_proxy_url_cache, url),
|
||||||
{:ok, code, headers, client} <- request(method, url, req_headers, client_opts),
|
{:ok, status, headers, body} <- request(method, url, req_headers, client_opts),
|
||||||
:ok <-
|
:ok <-
|
||||||
header_length_constraint(
|
header_length_constraint(
|
||||||
headers,
|
headers,
|
||||||
Keyword.get(opts, :max_body_length, @max_body_length)
|
Keyword.get(opts, :max_body_length, @max_body_length)
|
||||||
) do
|
) do
|
||||||
response(conn, client, url, code, headers, opts)
|
conn
|
||||||
|
|> put_private(:proxied_url, url)
|
||||||
|
|> response(body, status, headers, opts)
|
||||||
else
|
else
|
||||||
{:ok, true} ->
|
{:ok, true} ->
|
||||||
conn
|
conn
|
||||||
|> error_or_redirect(url, 500, "Request failed", opts)
|
|> error_or_redirect(500, "Request failed", opts)
|
||||||
|> halt()
|
|> halt()
|
||||||
|
|
||||||
{:ok, code, headers} ->
|
{:ok, status, headers} ->
|
||||||
head_response(conn, url, code, headers, opts)
|
conn
|
||||||
|
|> put_private(:proxied_url, url)
|
||||||
|
|> head_response(status, headers, opts)
|
||||||
|> halt()
|
|> halt()
|
||||||
|
|
||||||
{:error, {:invalid_http_response, code}} ->
|
{:error, {:invalid_http_response, status}} ->
|
||||||
Logger.error("#{__MODULE__}: request to #{inspect(url)} failed with HTTP status #{code}")
|
Logger.error(
|
||||||
track_failed_url(url, code, opts)
|
"#{__MODULE__}: request to #{inspect(url)} failed with HTTP status #{status}"
|
||||||
|
)
|
||||||
|
|
||||||
|
track_failed_url(url, status, opts)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|
|> put_private(:proxied_url, url)
|
||||||
|> error_or_redirect(
|
|> error_or_redirect(
|
||||||
url,
|
status,
|
||||||
code,
|
"Request failed: " <> Plug.Conn.Status.reason_phrase(status),
|
||||||
"Request failed: " <> Plug.Conn.Status.reason_phrase(code),
|
|
||||||
opts
|
opts
|
||||||
)
|
)
|
||||||
|> halt()
|
|> halt()
|
||||||
|
@ -141,7 +144,8 @@ defmodule Pleroma.ReverseProxy do
|
||||||
track_failed_url(url, error, opts)
|
track_failed_url(url, error, opts)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> error_or_redirect(url, 500, "Request failed", opts)
|
|> put_private(:proxied_url, url)
|
||||||
|
|> error_or_redirect(500, "Request failed", opts)
|
||||||
|> halt()
|
|> halt()
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -156,93 +160,48 @@ defmodule Pleroma.ReverseProxy do
|
||||||
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
||||||
method = method |> String.downcase() |> String.to_existing_atom()
|
method = method |> String.downcase() |> String.to_existing_atom()
|
||||||
|
|
||||||
case client().request(method, url, headers, "", opts) do
|
opts = opts ++ [receive_timeout: @max_read_duration]
|
||||||
{:ok, code, headers, client} when code in @valid_resp_codes ->
|
|
||||||
{:ok, code, downcase_headers(headers), client}
|
|
||||||
|
|
||||||
{:ok, code, headers} when code in @valid_resp_codes ->
|
case Pleroma.HTTP.request(method, url, "", headers, opts) do
|
||||||
{:ok, code, downcase_headers(headers)}
|
{:ok, %Tesla.Env{status: status, headers: headers, body: body}}
|
||||||
|
when status in @valid_resp_codes ->
|
||||||
|
{:ok, status, downcase_headers(headers), body}
|
||||||
|
|
||||||
{:ok, code, _, _} ->
|
{:ok, %Tesla.Env{status: status, headers: headers}} when status in @valid_resp_codes ->
|
||||||
{:error, {:invalid_http_response, code}}
|
{:ok, status, downcase_headers(headers)}
|
||||||
|
|
||||||
{:ok, code, _} ->
|
{:ok, %Tesla.Env{status: status}} ->
|
||||||
{:error, {:invalid_http_response, code}}
|
{:error, {:invalid_http_response, status}}
|
||||||
|
|
||||||
{:error, error} ->
|
{:error, error} ->
|
||||||
{:error, error}
|
{:error, error}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp response(conn, client, url, status, headers, opts) do
|
defp response(conn, body, status, headers, opts) do
|
||||||
Logger.debug("#{__MODULE__} #{status} #{url} #{inspect(headers)}")
|
Logger.debug("#{__MODULE__} #{status} #{conn.private[:proxied_url]} #{inspect(headers)}")
|
||||||
|
|
||||||
result =
|
|
||||||
conn
|
|
||||||
|> put_resp_headers(build_resp_headers(headers, opts))
|
|
||||||
|> send_chunked(status)
|
|
||||||
|> chunk_reply(client, opts)
|
|
||||||
|
|
||||||
case result do
|
|
||||||
{:ok, conn} ->
|
|
||||||
halt(conn)
|
|
||||||
|
|
||||||
{:error, :closed, conn} ->
|
|
||||||
client().close(client)
|
|
||||||
halt(conn)
|
|
||||||
|
|
||||||
{:error, error, conn} ->
|
|
||||||
Logger.warn(
|
|
||||||
"#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
client().close(client)
|
|
||||||
halt(conn)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp chunk_reply(conn, client, opts) do
|
|
||||||
chunk_reply(conn, client, opts, 0, 0)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp chunk_reply(conn, client, opts, sent_so_far, duration) do
|
|
||||||
with {:ok, duration} <-
|
|
||||||
check_read_duration(
|
|
||||||
duration,
|
|
||||||
Keyword.get(opts, :max_read_duration, @max_read_duration)
|
|
||||||
),
|
|
||||||
{:ok, data, client} <- client().stream_body(client),
|
|
||||||
{:ok, duration} <- increase_read_duration(duration),
|
|
||||||
sent_so_far = sent_so_far + byte_size(data),
|
|
||||||
:ok <-
|
|
||||||
body_size_constraint(
|
|
||||||
sent_so_far,
|
|
||||||
Keyword.get(opts, :max_body_length, @max_body_length)
|
|
||||||
),
|
|
||||||
{:ok, conn} <- chunk(conn, data) do
|
|
||||||
chunk_reply(conn, client, opts, sent_so_far, duration)
|
|
||||||
else
|
|
||||||
:done -> {:ok, conn}
|
|
||||||
{:error, error} -> {:error, error, conn}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp head_response(conn, url, code, headers, opts) do
|
|
||||||
Logger.debug("#{__MODULE__} #{code} #{url} #{inspect(headers)}")
|
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> put_resp_headers(build_resp_headers(headers, opts))
|
|> put_resp_headers(build_resp_headers(headers, opts))
|
||||||
|> send_resp(code, "")
|
|> send_resp(status, body)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp error_or_redirect(conn, url, code, body, opts) do
|
defp head_response(conn, status, headers, opts) do
|
||||||
|
Logger.debug("#{__MODULE__} #{status} #{conn.private[:proxied_url]} #{inspect(headers)}")
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_resp_headers(build_resp_headers(headers, opts))
|
||||||
|
|> send_resp(status, "")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp error_or_redirect(conn, status, body, opts) do
|
||||||
if Keyword.get(opts, :redirect_on_failure, false) do
|
if Keyword.get(opts, :redirect_on_failure, false) do
|
||||||
conn
|
conn
|
||||||
|> Phoenix.Controller.redirect(external: url)
|
|> Phoenix.Controller.redirect(external: conn.private[:proxied_url])
|
||||||
|> halt()
|
|> halt()
|
||||||
else
|
else
|
||||||
conn
|
conn
|
||||||
|> send_resp(code, body)
|
|> send_resp(status, body)
|
||||||
|> halt
|
|> halt
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -272,7 +231,6 @@ defmodule Pleroma.ReverseProxy do
|
||||||
|> downcase_headers()
|
|> downcase_headers()
|
||||||
|> Enum.filter(fn {k, _} -> k in @keep_req_headers end)
|
|> Enum.filter(fn {k, _} -> k in @keep_req_headers end)
|
||||||
|> build_req_range_or_encoding_header(opts)
|
|> build_req_range_or_encoding_header(opts)
|
||||||
|> build_req_user_agent_header(opts)
|
|
||||||
|> Keyword.merge(Keyword.get(opts, :req_headers, []))
|
|> Keyword.merge(Keyword.get(opts, :req_headers, []))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -287,15 +245,6 @@ defmodule Pleroma.ReverseProxy do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp build_req_user_agent_header(headers, _opts) do
|
|
||||||
List.keystore(
|
|
||||||
headers,
|
|
||||||
"user-agent",
|
|
||||||
0,
|
|
||||||
{"user-agent", Pleroma.Application.user_agent()}
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp build_resp_headers(headers, opts) do
|
defp build_resp_headers(headers, opts) do
|
||||||
headers
|
headers
|
||||||
|> Enum.filter(fn {k, _} -> k in @keep_resp_headers end)
|
|> Enum.filter(fn {k, _} -> k in @keep_resp_headers end)
|
||||||
|
@ -382,37 +331,6 @@ defmodule Pleroma.ReverseProxy do
|
||||||
|
|
||||||
defp header_length_constraint(_, _), do: :ok
|
defp header_length_constraint(_, _), do: :ok
|
||||||
|
|
||||||
defp body_size_constraint(size, limit) when is_integer(limit) and limit > 0 and size >= limit do
|
|
||||||
{:error, :body_too_large}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp body_size_constraint(_, _), do: :ok
|
|
||||||
|
|
||||||
defp check_read_duration(nil = _duration, max), do: check_read_duration(@max_read_duration, max)
|
|
||||||
|
|
||||||
defp check_read_duration(duration, max)
|
|
||||||
when is_integer(duration) and is_integer(max) and max > 0 do
|
|
||||||
if duration > max do
|
|
||||||
{:error, :read_duration_exceeded}
|
|
||||||
else
|
|
||||||
{:ok, {duration, :erlang.system_time(:millisecond)}}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp check_read_duration(_, _), do: {:ok, :no_duration_limit, :no_duration_limit}
|
|
||||||
|
|
||||||
defp increase_read_duration({previous_duration, started})
|
|
||||||
when is_integer(previous_duration) and is_integer(started) do
|
|
||||||
duration = :erlang.system_time(:millisecond) - started
|
|
||||||
{:ok, previous_duration + duration}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp increase_read_duration(_) do
|
|
||||||
{:ok, :no_duration_limit, :no_duration_limit}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp client, do: Pleroma.ReverseProxy.Client.Wrapper
|
|
||||||
|
|
||||||
defp track_failed_url(url, error, opts) do
|
defp track_failed_url(url, error, opts) do
|
||||||
ttl =
|
ttl =
|
||||||
unless error in [:body_too_large, 400, 204] do
|
unless error in [:body_too_large, 400, 204] do
|
||||||
|
|
|
@ -8,11 +8,7 @@ defmodule Pleroma.Telemetry.Logger do
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@events [
|
@events [
|
||||||
[:pleroma, :connection_pool, :reclaim, :start],
|
[:pleroma, :repo, :query]
|
||||||
[:pleroma, :connection_pool, :reclaim, :stop],
|
|
||||||
[:pleroma, :connection_pool, :provision_failure],
|
|
||||||
[:pleroma, :connection_pool, :client, :dead],
|
|
||||||
[:pleroma, :connection_pool, :client, :add]
|
|
||||||
]
|
]
|
||||||
def attach do
|
def attach do
|
||||||
:telemetry.attach_many(
|
:telemetry.attach_many(
|
||||||
|
@ -28,68 +24,62 @@ defmodule Pleroma.Telemetry.Logger do
|
||||||
# out anyway due to higher log level configured
|
# out anyway due to higher log level configured
|
||||||
|
|
||||||
def handle_event(
|
def handle_event(
|
||||||
[:pleroma, :connection_pool, :reclaim, :start],
|
[:pleroma, :repo, :query] = _name,
|
||||||
_,
|
%{query_time: query_time} = measurements,
|
||||||
%{max_connections: max_connections, reclaim_max: reclaim_max},
|
%{source: source} = metadata,
|
||||||
_
|
config
|
||||||
) do
|
) do
|
||||||
Logger.debug(fn ->
|
logging_config = Pleroma.Config.get([:telemetry, :slow_queries_logging], [])
|
||||||
"Connection pool is exhausted (reached #{max_connections} connections). Starting idle connection cleanup to reclaim as much as #{reclaim_max} connections"
|
|
||||||
|
if logging_config[:enabled] &&
|
||||||
|
logging_config[:min_duration] &&
|
||||||
|
query_time > logging_config[:min_duration] and
|
||||||
|
(is_nil(logging_config[:exclude_sources]) or
|
||||||
|
source not in logging_config[:exclude_sources]) do
|
||||||
|
log_slow_query(measurements, metadata, config)
|
||||||
|
else
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp log_slow_query(
|
||||||
|
%{query_time: query_time} = _measurements,
|
||||||
|
%{source: _source, query: query, params: query_params, repo: repo} = _metadata,
|
||||||
|
_config
|
||||||
|
) do
|
||||||
|
sql_explain =
|
||||||
|
with {:ok, %{rows: explain_result_rows}} <-
|
||||||
|
repo.query("EXPLAIN " <> query, query_params, log: false) do
|
||||||
|
Enum.map_join(explain_result_rows, "\n", & &1)
|
||||||
|
end
|
||||||
|
|
||||||
|
{:current_stacktrace, stacktrace} = Process.info(self(), :current_stacktrace)
|
||||||
|
|
||||||
|
pleroma_stacktrace =
|
||||||
|
Enum.filter(stacktrace, fn
|
||||||
|
{__MODULE__, _, _, _} ->
|
||||||
|
false
|
||||||
|
|
||||||
|
{mod, _, _, _} ->
|
||||||
|
mod
|
||||||
|
|> to_string()
|
||||||
|
|> String.starts_with?("Elixir.Pleroma.")
|
||||||
end)
|
end)
|
||||||
end
|
|
||||||
|
|
||||||
def handle_event(
|
|
||||||
[:pleroma, :connection_pool, :reclaim, :stop],
|
|
||||||
%{reclaimed_count: 0},
|
|
||||||
_,
|
|
||||||
_
|
|
||||||
) do
|
|
||||||
Logger.error(fn ->
|
|
||||||
"Connection pool failed to reclaim any connections due to all of them being in use. It will have to drop requests for opening connections to new hosts"
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_event(
|
|
||||||
[:pleroma, :connection_pool, :reclaim, :stop],
|
|
||||||
%{reclaimed_count: reclaimed_count},
|
|
||||||
_,
|
|
||||||
_
|
|
||||||
) do
|
|
||||||
Logger.debug(fn -> "Connection pool cleaned up #{reclaimed_count} idle connections" end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_event(
|
|
||||||
[:pleroma, :connection_pool, :provision_failure],
|
|
||||||
%{opts: [key | _]},
|
|
||||||
_,
|
|
||||||
_
|
|
||||||
) do
|
|
||||||
Logger.error(fn ->
|
|
||||||
"Connection pool had to refuse opening a connection to #{key} due to connection limit exhaustion"
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_event(
|
|
||||||
[:pleroma, :connection_pool, :client, :dead],
|
|
||||||
%{client_pid: client_pid, reason: reason},
|
|
||||||
%{key: key},
|
|
||||||
_
|
|
||||||
) do
|
|
||||||
Logger.warn(fn ->
|
Logger.warn(fn ->
|
||||||
"Pool worker for #{key}: Client #{inspect(client_pid)} died before releasing the connection with #{inspect(reason)}"
|
"""
|
||||||
|
Slow query!
|
||||||
|
|
||||||
|
Total time: #{round(query_time / 1_000)} ms
|
||||||
|
|
||||||
|
#{query}
|
||||||
|
|
||||||
|
#{inspect(query_params, limit: :infinity)}
|
||||||
|
|
||||||
|
#{sql_explain}
|
||||||
|
|
||||||
|
#{Exception.format_stacktrace(pleroma_stacktrace)}
|
||||||
|
"""
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_event(
|
|
||||||
[:pleroma, :connection_pool, :client, :add],
|
|
||||||
%{clients: [_, _ | _] = clients},
|
|
||||||
%{key: key, protocol: :http},
|
|
||||||
_
|
|
||||||
) do
|
|
||||||
Logger.info(fn ->
|
|
||||||
"Pool worker for #{key}: #{length(clients)} clients are using an HTTP1 connection at the same time, head-of-line blocking might occur."
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_event([:pleroma, :connection_pool, :client, :add], _, _, _), do: :ok
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -30,23 +30,12 @@ defmodule Pleroma.Uploaders.S3 do
|
||||||
|
|
||||||
op =
|
op =
|
||||||
if streaming do
|
if streaming do
|
||||||
op =
|
|
||||||
upload.tempfile
|
upload.tempfile
|
||||||
|> ExAws.S3.Upload.stream_file()
|
|> ExAws.S3.Upload.stream_file()
|
||||||
|> ExAws.S3.upload(bucket, s3_name, [
|
|> ExAws.S3.upload(bucket, s3_name, [
|
||||||
{:acl, :public_read},
|
{:acl, :public_read},
|
||||||
{:content_type, upload.content_type}
|
{:content_type, upload.content_type}
|
||||||
])
|
])
|
||||||
|
|
||||||
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Gun do
|
|
||||||
# set s3 upload timeout to respect :upload pool timeout
|
|
||||||
# timeout should be slightly larger, so s3 can retry upload on fail
|
|
||||||
timeout = Pleroma.HTTP.AdapterHelper.Gun.pool_timeout(:upload) + 1_000
|
|
||||||
opts = Keyword.put(op.opts, :timeout, timeout)
|
|
||||||
Map.put(op, :opts, opts)
|
|
||||||
else
|
|
||||||
op
|
|
||||||
end
|
|
||||||
else
|
else
|
||||||
{:ok, file_data} = File.read(upload.tempfile)
|
{:ok, file_data} = File.read(upload.tempfile)
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@adapter_options [
|
@adapter_options [
|
||||||
pool: :media,
|
|
||||||
recv_timeout: 10_000
|
recv_timeout: 10_000
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -54,7 +54,7 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do
|
||||||
media_proxy_url = MediaProxy.url(url)
|
media_proxy_url = MediaProxy.url(url)
|
||||||
|
|
||||||
with {:ok, %{status: status} = head_response} when status in 200..299 <-
|
with {:ok, %{status: status} = head_response} when status in 200..299 <-
|
||||||
Pleroma.HTTP.request("head", media_proxy_url, [], [], pool: :media) do
|
Pleroma.HTTP.request("head", media_proxy_url, [], [], name: MyFinch) do
|
||||||
content_type = Tesla.get_header(head_response, "content-type")
|
content_type = Tesla.get_header(head_response, "content-type")
|
||||||
content_length = Tesla.get_header(head_response, "content-length")
|
content_length = Tesla.get_header(head_response, "content-length")
|
||||||
content_length = content_length && String.to_integer(content_length)
|
content_length = content_length && String.to_integer(content_length)
|
||||||
|
|
|
@ -47,10 +47,9 @@ defmodule Pleroma.Web.Plugs.UploadedMedia do
|
||||||
config = Pleroma.Config.get(Pleroma.Upload)
|
config = Pleroma.Config.get(Pleroma.Upload)
|
||||||
|
|
||||||
with uploader <- Keyword.fetch!(config, :uploader),
|
with uploader <- Keyword.fetch!(config, :uploader),
|
||||||
proxy_remote = Keyword.get(config, :proxy_remote, false),
|
|
||||||
{:ok, get_method} <- uploader.get_file(file),
|
{:ok, get_method} <- uploader.get_file(file),
|
||||||
false <- media_is_banned(conn, get_method) do
|
false <- media_is_banned(conn, get_method) do
|
||||||
get_media(conn, get_method, proxy_remote, opts)
|
get_media(conn, get_method, opts)
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
conn
|
conn
|
||||||
|
@ -69,7 +68,7 @@ defmodule Pleroma.Web.Plugs.UploadedMedia do
|
||||||
|
|
||||||
defp media_is_banned(_, _), do: false
|
defp media_is_banned(_, _), do: false
|
||||||
|
|
||||||
defp get_media(conn, {:static_dir, directory}, _, opts) do
|
defp get_media(conn, {:static_dir, directory}, opts) do
|
||||||
static_opts =
|
static_opts =
|
||||||
Map.get(opts, :static_plug_opts)
|
Map.get(opts, :static_plug_opts)
|
||||||
|> Map.put(:at, [@path])
|
|> Map.put(:at, [@path])
|
||||||
|
@ -86,25 +85,13 @@ defmodule Pleroma.Web.Plugs.UploadedMedia do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_media(conn, {:url, url}, true, _) do
|
defp get_media(conn, {:url, url}, _) do
|
||||||
proxy_opts = [
|
|
||||||
http: [
|
|
||||||
follow_redirect: true,
|
|
||||||
pool: :upload
|
|
||||||
]
|
|
||||||
]
|
|
||||||
|
|
||||||
conn
|
|
||||||
|> Pleroma.ReverseProxy.call(url, proxy_opts)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp get_media(conn, {:url, url}, _, _) do
|
|
||||||
conn
|
conn
|
||||||
|> Phoenix.Controller.redirect(external: url)
|
|> Phoenix.Controller.redirect(external: url)
|
||||||
|> halt()
|
|> halt()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_media(conn, unknown, _, _) do
|
defp get_media(conn, unknown, _) do
|
||||||
Logger.error("#{__MODULE__}: Unknown get startegy: #{inspect(unknown)}")
|
Logger.error("#{__MODULE__}: Unknown get startegy: #{inspect(unknown)}")
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.RelMe do
|
defmodule Pleroma.Web.RelMe do
|
||||||
@options [
|
@options [
|
||||||
pool: :media,
|
|
||||||
max_body: 2_000_000,
|
max_body: 2_000_000,
|
||||||
recv_timeout: 2_000
|
recv_timeout: 2_000
|
||||||
]
|
]
|
||||||
|
|
|
@ -10,7 +10,6 @@ defmodule Pleroma.Web.RichMedia.Helpers do
|
||||||
alias Pleroma.Web.RichMedia.Parser
|
alias Pleroma.Web.RichMedia.Parser
|
||||||
|
|
||||||
@options [
|
@options [
|
||||||
pool: :media,
|
|
||||||
max_body: 2_000_000,
|
max_body: 2_000_000,
|
||||||
recv_timeout: 2_000
|
recv_timeout: 2_000
|
||||||
]
|
]
|
||||||
|
|
1
mix.exs
1
mix.exs
|
@ -215,7 +215,6 @@ defmodule Pleroma.Mixfile do
|
||||||
{:mock, "~> 0.3.5", only: :test},
|
{:mock, "~> 0.3.5", only: :test},
|
||||||
# temporary downgrade for excoveralls, hackney until hackney max_connections bug will be fixed
|
# temporary downgrade for excoveralls, hackney until hackney max_connections bug will be fixed
|
||||||
{:excoveralls, "0.12.3", only: :test},
|
{:excoveralls, "0.12.3", only: :test},
|
||||||
{:hackney, "~> 1.18.0", override: true},
|
|
||||||
{:mox, "~> 1.0", only: :test},
|
{:mox, "~> 1.0", only: :test},
|
||||||
{:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test}
|
{:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test}
|
||||||
] ++ oauth_deps()
|
] ++ oauth_deps()
|
||||||
|
|
|
@ -280,50 +280,6 @@ defmodule Pleroma.Config.DeprecationWarningsTest do
|
||||||
"Your config is using the old setting for controlling the URL of media uploaded to your S3 bucket."
|
"Your config is using the old setting for controlling the URL of media uploaded to your S3 bucket."
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "check_gun_pool_options/0" do
|
|
||||||
test "await_up_timeout" do
|
|
||||||
config = Config.get(:connections_pool)
|
|
||||||
clear_config(:connections_pool, Keyword.put(config, :await_up_timeout, 5_000))
|
|
||||||
|
|
||||||
assert capture_log(fn ->
|
|
||||||
DeprecationWarnings.check_gun_pool_options()
|
|
||||||
end) =~
|
|
||||||
"Your config is using old setting `config :pleroma, :connections_pool, await_up_timeout`."
|
|
||||||
end
|
|
||||||
|
|
||||||
test "pool timeout" do
|
|
||||||
old_config = [
|
|
||||||
federation: [
|
|
||||||
size: 50,
|
|
||||||
max_waiting: 10,
|
|
||||||
timeout: 10_000
|
|
||||||
],
|
|
||||||
media: [
|
|
||||||
size: 50,
|
|
||||||
max_waiting: 10,
|
|
||||||
timeout: 10_000
|
|
||||||
],
|
|
||||||
upload: [
|
|
||||||
size: 25,
|
|
||||||
max_waiting: 5,
|
|
||||||
timeout: 15_000
|
|
||||||
],
|
|
||||||
default: [
|
|
||||||
size: 10,
|
|
||||||
max_waiting: 2,
|
|
||||||
timeout: 5_000
|
|
||||||
]
|
|
||||||
]
|
|
||||||
|
|
||||||
clear_config(:pools, old_config)
|
|
||||||
|
|
||||||
assert capture_log(fn ->
|
|
||||||
DeprecationWarnings.check_gun_pool_options()
|
|
||||||
end) =~
|
|
||||||
"Your config is using old setting name `timeout` instead of `recv_timeout` in pool settings"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
test "check_old_chat_shoutbox/0" do
|
test "check_old_chat_shoutbox/0" do
|
||||||
clear_config([:instance, :chat_limit], 1_000)
|
clear_config([:instance, :chat_limit], 1_000)
|
||||||
clear_config([:chat, :enabled], true)
|
clear_config([:chat, :enabled], true)
|
||||||
|
|
|
@ -1,100 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Gun.ConnectionPoolTest do
|
|
||||||
use Pleroma.DataCase
|
|
||||||
|
|
||||||
import Mox
|
|
||||||
import ExUnit.CaptureLog
|
|
||||||
alias Pleroma.Gun.ConnectionPool
|
|
||||||
|
|
||||||
defp gun_mock(_) do
|
|
||||||
Pleroma.GunMock
|
|
||||||
|> stub(:open, fn _, _, _ -> Task.start_link(fn -> Process.sleep(100) end) end)
|
|
||||||
|> stub(:await_up, fn _, _ -> {:ok, :http} end)
|
|
||||||
|> stub(:set_owner, fn _, _ -> :ok end)
|
|
||||||
|
|
||||||
:ok
|
|
||||||
end
|
|
||||||
|
|
||||||
setup :gun_mock
|
|
||||||
|
|
||||||
test "gives the same connection to 2 concurrent requests" do
|
|
||||||
Enum.map(
|
|
||||||
[
|
|
||||||
"http://www.korean-books.com.kp/KBMbooks/en/periodic/pictorial/20200530163914.pdf",
|
|
||||||
"http://www.korean-books.com.kp/KBMbooks/en/periodic/pictorial/20200528183427.pdf"
|
|
||||||
],
|
|
||||||
fn uri ->
|
|
||||||
uri = URI.parse(uri)
|
|
||||||
task_parent = self()
|
|
||||||
|
|
||||||
Task.start_link(fn ->
|
|
||||||
{:ok, conn} = ConnectionPool.get_conn(uri, [])
|
|
||||||
ConnectionPool.release_conn(conn)
|
|
||||||
send(task_parent, conn)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
)
|
|
||||||
|
|
||||||
[pid, pid] =
|
|
||||||
for _ <- 1..2 do
|
|
||||||
receive do
|
|
||||||
pid -> pid
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@tag :erratic
|
|
||||||
test "connection limit is respected with concurrent requests" do
|
|
||||||
clear_config([:connections_pool, :max_connections]) do
|
|
||||||
clear_config([:connections_pool, :max_connections], 1)
|
|
||||||
# The supervisor needs a reboot to apply the new config setting
|
|
||||||
Process.exit(Process.whereis(Pleroma.Gun.ConnectionPool.WorkerSupervisor), :kill)
|
|
||||||
|
|
||||||
on_exit(fn ->
|
|
||||||
Process.exit(Process.whereis(Pleroma.Gun.ConnectionPool.WorkerSupervisor), :kill)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
capture_log(fn ->
|
|
||||||
Enum.map(
|
|
||||||
[
|
|
||||||
"https://ninenines.eu/",
|
|
||||||
"https://youtu.be/PFGwMiDJKNY"
|
|
||||||
],
|
|
||||||
fn uri ->
|
|
||||||
uri = URI.parse(uri)
|
|
||||||
task_parent = self()
|
|
||||||
|
|
||||||
Task.start_link(fn ->
|
|
||||||
result = ConnectionPool.get_conn(uri, [])
|
|
||||||
# Sleep so that we don't end up with a situation,
|
|
||||||
# where request from the second process gets processed
|
|
||||||
# only after the first process already released the connection
|
|
||||||
Process.sleep(50)
|
|
||||||
|
|
||||||
case result do
|
|
||||||
{:ok, pid} ->
|
|
||||||
ConnectionPool.release_conn(pid)
|
|
||||||
|
|
||||||
_ ->
|
|
||||||
nil
|
|
||||||
end
|
|
||||||
|
|
||||||
send(task_parent, result)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
)
|
|
||||||
|
|
||||||
[{:error, :pool_full}, {:ok, _pid}] =
|
|
||||||
for _ <- 1..2 do
|
|
||||||
receive do
|
|
||||||
result -> result
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|> Enum.sort()
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -5,112 +5,57 @@
|
||||||
defmodule Pleroma.ReverseProxyTest do
|
defmodule Pleroma.ReverseProxyTest do
|
||||||
use Pleroma.Web.ConnCase
|
use Pleroma.Web.ConnCase
|
||||||
import ExUnit.CaptureLog
|
import ExUnit.CaptureLog
|
||||||
import Mox
|
|
||||||
|
|
||||||
alias Pleroma.ReverseProxy
|
alias Pleroma.ReverseProxy
|
||||||
alias Pleroma.ReverseProxy.ClientMock
|
|
||||||
alias Plug.Conn
|
alias Plug.Conn
|
||||||
|
|
||||||
setup_all do
|
|
||||||
{:ok, _} = Registry.start_link(keys: :unique, name: ClientMock)
|
|
||||||
:ok
|
|
||||||
end
|
|
||||||
|
|
||||||
setup :verify_on_exit!
|
|
||||||
|
|
||||||
defp request_mock(invokes) do
|
|
||||||
ClientMock
|
|
||||||
|> expect(:request, fn :get, url, headers, _body, _opts ->
|
|
||||||
Registry.register(ClientMock, url, 0)
|
|
||||||
body = headers |> Enum.into(%{}) |> Jason.encode!()
|
|
||||||
|
|
||||||
{:ok, 200,
|
|
||||||
[
|
|
||||||
{"content-type", "application/json"},
|
|
||||||
{"content-length", byte_size(body) |> to_string()}
|
|
||||||
], %{url: url, body: body}}
|
|
||||||
end)
|
|
||||||
|> expect(:stream_body, invokes, fn %{url: url, body: body} = client ->
|
|
||||||
case Registry.lookup(ClientMock, url) do
|
|
||||||
[{_, 0}] ->
|
|
||||||
Registry.update_value(ClientMock, url, &(&1 + 1))
|
|
||||||
{:ok, body, client}
|
|
||||||
|
|
||||||
[{_, 1}] ->
|
|
||||||
Registry.unregister(ClientMock, url)
|
|
||||||
:done
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
describe "reverse proxy" do
|
describe "reverse proxy" do
|
||||||
test "do not track successful request", %{conn: conn} do
|
test "do not track successful request", %{conn: conn} do
|
||||||
request_mock(2)
|
|
||||||
url = "/success"
|
url = "/success"
|
||||||
|
|
||||||
|
Tesla.Mock.mock(fn %{url: ^url} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
body: ""
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
conn = ReverseProxy.call(conn, url)
|
conn = ReverseProxy.call(conn, url)
|
||||||
|
|
||||||
assert conn.status == 200
|
assert response(conn, 200)
|
||||||
assert Cachex.get(:failed_proxy_url_cache, url) == {:ok, nil}
|
assert Cachex.get(:failed_proxy_url_cache, url) == {:ok, nil}
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
test "use Pleroma's user agent in the request; don't pass the client's", %{conn: conn} do
|
test "use Pleroma's user agent in the request; don't pass the client's", %{conn: conn} do
|
||||||
request_mock(2)
|
clear_config([:http, :send_user_agent], true)
|
||||||
|
# Mock will fail if the client's user agent isn't filtered
|
||||||
|
wanted_headers = [{"user-agent", Pleroma.Application.user_agent()}]
|
||||||
|
|
||||||
|
Tesla.Mock.mock(fn %{url: "/user-agent", headers: ^wanted_headers} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
body: ""
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
conn =
|
conn =
|
||||||
conn
|
conn
|
||||||
|> Plug.Conn.put_req_header("user-agent", "fake/1.0")
|
|> Plug.Conn.put_req_header("user-agent", "fake/1.0")
|
||||||
|> ReverseProxy.call("/user-agent")
|
|> ReverseProxy.call("/user-agent")
|
||||||
|
|
||||||
assert json_response(conn, 200) == %{"user-agent" => Pleroma.Application.user_agent()}
|
assert response(conn, 200)
|
||||||
end
|
|
||||||
|
|
||||||
test "closed connection", %{conn: conn} do
|
|
||||||
ClientMock
|
|
||||||
|> expect(:request, fn :get, "/closed", _, _, _ -> {:ok, 200, [], %{}} end)
|
|
||||||
|> expect(:stream_body, fn _ -> {:error, :closed} end)
|
|
||||||
|> expect(:close, fn _ -> :ok end)
|
|
||||||
|
|
||||||
conn = ReverseProxy.call(conn, "/closed")
|
|
||||||
assert conn.halted
|
|
||||||
end
|
|
||||||
|
|
||||||
defp stream_mock(invokes, with_close? \\ false) do
|
|
||||||
ClientMock
|
|
||||||
|> expect(:request, fn :get, "/stream-bytes/" <> length, _, _, _ ->
|
|
||||||
Registry.register(ClientMock, "/stream-bytes/" <> length, 0)
|
|
||||||
|
|
||||||
{:ok, 200, [{"content-type", "application/octet-stream"}],
|
|
||||||
%{url: "/stream-bytes/" <> length}}
|
|
||||||
end)
|
|
||||||
|> expect(:stream_body, invokes, fn %{url: "/stream-bytes/" <> length} = client ->
|
|
||||||
max = String.to_integer(length)
|
|
||||||
|
|
||||||
case Registry.lookup(ClientMock, "/stream-bytes/" <> length) do
|
|
||||||
[{_, current}] when current < max ->
|
|
||||||
Registry.update_value(
|
|
||||||
ClientMock,
|
|
||||||
"/stream-bytes/" <> length,
|
|
||||||
&(&1 + 10)
|
|
||||||
)
|
|
||||||
|
|
||||||
{:ok, "0123456789", client}
|
|
||||||
|
|
||||||
[{_, ^max}] ->
|
|
||||||
Registry.unregister(ClientMock, "/stream-bytes/" <> length)
|
|
||||||
:done
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|
|
||||||
if with_close? do
|
|
||||||
expect(ClientMock, :close, fn _ -> :ok end)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "max_body" do
|
describe "max_body" do
|
||||||
test "length returns error if content-length more than option", %{conn: conn} do
|
test "length returns error if content-length more than option", %{conn: conn} do
|
||||||
request_mock(0)
|
Tesla.Mock.mock(fn %{url: "/huge-file"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
headers: [{"content-length", "100"}],
|
||||||
|
body: "This body is too large."
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
assert capture_log(fn ->
|
assert capture_log(fn ->
|
||||||
ReverseProxy.call(conn, "/huge-file", max_body_length: 4)
|
ReverseProxy.call(conn, "/huge-file", max_body_length: 4)
|
||||||
|
@ -123,22 +68,16 @@ defmodule Pleroma.ReverseProxyTest do
|
||||||
ReverseProxy.call(conn, "/huge-file", max_body_length: 4)
|
ReverseProxy.call(conn, "/huge-file", max_body_length: 4)
|
||||||
end) == ""
|
end) == ""
|
||||||
end
|
end
|
||||||
|
|
||||||
test "max_body_length returns error if streaming body more than that option", %{conn: conn} do
|
|
||||||
stream_mock(3, true)
|
|
||||||
|
|
||||||
assert capture_log(fn ->
|
|
||||||
ReverseProxy.call(conn, "/stream-bytes/50", max_body_length: 30)
|
|
||||||
end) =~
|
|
||||||
"Elixir.Pleroma.ReverseProxy request to /stream-bytes/50 failed while reading/chunking: :body_too_large"
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "HEAD requests" do
|
describe "HEAD requests" do
|
||||||
test "common", %{conn: conn} do
|
test "common", %{conn: conn} do
|
||||||
ClientMock
|
Tesla.Mock.mock(fn %{method: :head, url: "/head"} ->
|
||||||
|> expect(:request, fn :head, "/head", _, _, _ ->
|
%Tesla.Env{
|
||||||
{:ok, 200, [{"content-type", "text/html; charset=utf-8"}]}
|
status: 200,
|
||||||
|
headers: [{"content-type", "text/html; charset=utf-8"}],
|
||||||
|
body: ""
|
||||||
|
}
|
||||||
end)
|
end)
|
||||||
|
|
||||||
conn = ReverseProxy.call(Map.put(conn, :method, "HEAD"), "/head")
|
conn = ReverseProxy.call(Map.put(conn, :method, "HEAD"), "/head")
|
||||||
|
@ -146,18 +85,17 @@ defmodule Pleroma.ReverseProxyTest do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp error_mock(status) when is_integer(status) do
|
|
||||||
ClientMock
|
|
||||||
|> expect(:request, fn :get, "/status/" <> _, _, _, _ ->
|
|
||||||
{:error, status}
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
describe "returns error on" do
|
describe "returns error on" do
|
||||||
test "500", %{conn: conn} do
|
test "500", %{conn: conn} do
|
||||||
error_mock(500)
|
|
||||||
url = "/status/500"
|
url = "/status/500"
|
||||||
|
|
||||||
|
Tesla.Mock.mock(fn %{url: ^url} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 500,
|
||||||
|
body: ""
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
capture_log(fn -> ReverseProxy.call(conn, url) end) =~
|
capture_log(fn -> ReverseProxy.call(conn, url) end) =~
|
||||||
"[error] Elixir.Pleroma.ReverseProxy: request to /status/500 failed with HTTP status 500"
|
"[error] Elixir.Pleroma.ReverseProxy: request to /status/500 failed with HTTP status 500"
|
||||||
|
|
||||||
|
@ -168,9 +106,15 @@ defmodule Pleroma.ReverseProxyTest do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "400", %{conn: conn} do
|
test "400", %{conn: conn} do
|
||||||
error_mock(400)
|
|
||||||
url = "/status/400"
|
url = "/status/400"
|
||||||
|
|
||||||
|
Tesla.Mock.mock(fn %{url: ^url} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 400,
|
||||||
|
body: ""
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
capture_log(fn -> ReverseProxy.call(conn, url) end) =~
|
capture_log(fn -> ReverseProxy.call(conn, url) end) =~
|
||||||
"[error] Elixir.Pleroma.ReverseProxy: request to /status/400 failed with HTTP status 400"
|
"[error] Elixir.Pleroma.ReverseProxy: request to /status/400 failed with HTTP status 400"
|
||||||
|
|
||||||
|
@ -179,9 +123,15 @@ defmodule Pleroma.ReverseProxyTest do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "403", %{conn: conn} do
|
test "403", %{conn: conn} do
|
||||||
error_mock(403)
|
|
||||||
url = "/status/403"
|
url = "/status/403"
|
||||||
|
|
||||||
|
Tesla.Mock.mock(fn %{url: ^url} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 403,
|
||||||
|
body: ""
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
capture_log(fn ->
|
capture_log(fn ->
|
||||||
ReverseProxy.call(conn, url, failed_request_ttl: :timer.seconds(120))
|
ReverseProxy.call(conn, url, failed_request_ttl: :timer.seconds(120))
|
||||||
end) =~
|
end) =~
|
||||||
|
@ -190,57 +140,17 @@ defmodule Pleroma.ReverseProxyTest do
|
||||||
{:ok, ttl} = Cachex.ttl(:failed_proxy_url_cache, url)
|
{:ok, ttl} = Cachex.ttl(:failed_proxy_url_cache, url)
|
||||||
assert ttl > 100_000
|
assert ttl > 100_000
|
||||||
end
|
end
|
||||||
|
|
||||||
test "204", %{conn: conn} do
|
|
||||||
url = "/status/204"
|
|
||||||
expect(ClientMock, :request, fn :get, _url, _, _, _ -> {:ok, 204, [], %{}} end)
|
|
||||||
|
|
||||||
capture_log(fn ->
|
|
||||||
conn = ReverseProxy.call(conn, url)
|
|
||||||
assert conn.resp_body == "Request failed: No Content"
|
|
||||||
assert conn.halted
|
|
||||||
end) =~
|
|
||||||
"[error] Elixir.Pleroma.ReverseProxy: request to \"/status/204\" failed with HTTP status 204"
|
|
||||||
|
|
||||||
assert Cachex.get(:failed_proxy_url_cache, url) == {:ok, true}
|
|
||||||
assert Cachex.ttl(:failed_proxy_url_cache, url) == {:ok, nil}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
test "streaming", %{conn: conn} do
|
|
||||||
stream_mock(21)
|
|
||||||
conn = ReverseProxy.call(conn, "/stream-bytes/200")
|
|
||||||
assert conn.state == :chunked
|
|
||||||
assert byte_size(conn.resp_body) == 200
|
|
||||||
assert Conn.get_resp_header(conn, "content-type") == ["application/octet-stream"]
|
|
||||||
end
|
|
||||||
|
|
||||||
defp headers_mock(_) do
|
|
||||||
ClientMock
|
|
||||||
|> expect(:request, fn :get, "/headers", headers, _, _ ->
|
|
||||||
Registry.register(ClientMock, "/headers", 0)
|
|
||||||
{:ok, 200, [{"content-type", "application/json"}], %{url: "/headers", headers: headers}}
|
|
||||||
end)
|
|
||||||
|> expect(:stream_body, 2, fn %{url: url, headers: headers} = client ->
|
|
||||||
case Registry.lookup(ClientMock, url) do
|
|
||||||
[{_, 0}] ->
|
|
||||||
Registry.update_value(ClientMock, url, &(&1 + 1))
|
|
||||||
headers = for {k, v} <- headers, into: %{}, do: {String.capitalize(k), v}
|
|
||||||
{:ok, Jason.encode!(%{headers: headers}), client}
|
|
||||||
|
|
||||||
[{_, 1}] ->
|
|
||||||
Registry.unregister(ClientMock, url)
|
|
||||||
:done
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|
|
||||||
:ok
|
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "keep request headers" do
|
describe "keep request headers" do
|
||||||
setup [:headers_mock]
|
|
||||||
|
|
||||||
test "header passes", %{conn: conn} do
|
test "header passes", %{conn: conn} do
|
||||||
|
Tesla.Mock.mock(fn %{url: "/headers"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
body: ""
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
conn =
|
conn =
|
||||||
Conn.put_req_header(
|
Conn.put_req_header(
|
||||||
conn,
|
conn,
|
||||||
|
@ -249,68 +159,76 @@ defmodule Pleroma.ReverseProxyTest do
|
||||||
)
|
)
|
||||||
|> ReverseProxy.call("/headers")
|
|> ReverseProxy.call("/headers")
|
||||||
|
|
||||||
%{"headers" => headers} = json_response(conn, 200)
|
assert response(conn, 200)
|
||||||
assert headers["Accept"] == "text/html"
|
assert {"accept", "text/html"} in conn.req_headers
|
||||||
end
|
end
|
||||||
|
|
||||||
test "header is filtered", %{conn: conn} do
|
test "header is filtered", %{conn: conn} do
|
||||||
|
# Mock will fail if the accept-language header isn't filtered
|
||||||
|
wanted_headers = [{"accept-encoding", "*"}]
|
||||||
|
|
||||||
|
Tesla.Mock.mock(fn %{url: "/headers", headers: ^wanted_headers} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
body: ""
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
conn =
|
conn =
|
||||||
Conn.put_req_header(
|
conn
|
||||||
conn,
|
|> Conn.put_req_header("accept-language", "en-US")
|
||||||
"accept-language",
|
|> Conn.put_req_header("accept-encoding", "*")
|
||||||
"en-US"
|
|
||||||
)
|
|
||||||
|> ReverseProxy.call("/headers")
|
|> ReverseProxy.call("/headers")
|
||||||
|
|
||||||
%{"headers" => headers} = json_response(conn, 200)
|
assert response(conn, 200)
|
||||||
refute headers["Accept-Language"]
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns 400 on non GET, HEAD requests", %{conn: conn} do
|
test "returns 400 on non GET, HEAD requests", %{conn: conn} do
|
||||||
|
Tesla.Mock.mock(fn %{url: "/ip"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
body: ""
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
conn = ReverseProxy.call(Map.put(conn, :method, "POST"), "/ip")
|
conn = ReverseProxy.call(Map.put(conn, :method, "POST"), "/ip")
|
||||||
assert conn.status == 400
|
assert response(conn, 400)
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "cache resp headers" do
|
describe "cache resp headers not filtered" do
|
||||||
test "add cache-control", %{conn: conn} do
|
test "add cache-control", %{conn: conn} do
|
||||||
ClientMock
|
Tesla.Mock.mock(fn %{url: "/cache"} ->
|
||||||
|> expect(:request, fn :get, "/cache", _, _, _ ->
|
%Tesla.Env{
|
||||||
{:ok, 200, [{"ETag", "some ETag"}], %{}}
|
status: 200,
|
||||||
|
headers: [
|
||||||
|
{"cache-control", "public, max-age=1209600"},
|
||||||
|
{"etag", "some ETag"},
|
||||||
|
{"expires", "Wed, 21 Oct 2015 07:28:00 GMT"}
|
||||||
|
],
|
||||||
|
body: ""
|
||||||
|
}
|
||||||
end)
|
end)
|
||||||
|> expect(:stream_body, fn _ -> :done end)
|
|
||||||
|
|
||||||
conn = ReverseProxy.call(conn, "/cache")
|
conn = ReverseProxy.call(conn, "/cache")
|
||||||
assert {"cache-control", "public, max-age=1209600"} in conn.resp_headers
|
assert {"cache-control", "public, max-age=1209600"} in conn.resp_headers
|
||||||
|
assert {"etag", "some ETag"} in conn.resp_headers
|
||||||
|
assert {"expires", "Wed, 21 Oct 2015 07:28:00 GMT"} in conn.resp_headers
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp disposition_headers_mock(headers) do
|
|
||||||
ClientMock
|
|
||||||
|> expect(:request, fn :get, "/disposition", _, _, _ ->
|
|
||||||
Registry.register(ClientMock, "/disposition", 0)
|
|
||||||
|
|
||||||
{:ok, 200, headers, %{url: "/disposition"}}
|
|
||||||
end)
|
|
||||||
|> expect(:stream_body, 2, fn %{url: "/disposition"} = client ->
|
|
||||||
case Registry.lookup(ClientMock, "/disposition") do
|
|
||||||
[{_, 0}] ->
|
|
||||||
Registry.update_value(ClientMock, "/disposition", &(&1 + 1))
|
|
||||||
{:ok, "", client}
|
|
||||||
|
|
||||||
[{_, 1}] ->
|
|
||||||
Registry.unregister(ClientMock, "/disposition")
|
|
||||||
:done
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
describe "response content disposition header" do
|
describe "response content disposition header" do
|
||||||
test "not atachment", %{conn: conn} do
|
test "not attachment", %{conn: conn} do
|
||||||
disposition_headers_mock([
|
Tesla.Mock.mock(fn %{url: "/disposition"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
headers: [
|
||||||
{"content-type", "image/gif"},
|
{"content-type", "image/gif"},
|
||||||
{"content-length", "0"}
|
{"content-length", "0"}
|
||||||
])
|
],
|
||||||
|
body: ""
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
conn = ReverseProxy.call(conn, "/disposition")
|
conn = ReverseProxy.call(conn, "/disposition")
|
||||||
|
|
||||||
|
@ -318,10 +236,16 @@ defmodule Pleroma.ReverseProxyTest do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "with content-disposition header", %{conn: conn} do
|
test "with content-disposition header", %{conn: conn} do
|
||||||
disposition_headers_mock([
|
Tesla.Mock.mock(fn %{url: "/disposition"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
headers: [
|
||||||
{"content-disposition", "attachment; filename=\"filename.jpg\""},
|
{"content-disposition", "attachment; filename=\"filename.jpg\""},
|
||||||
{"content-length", "0"}
|
{"content-length", "0"}
|
||||||
])
|
],
|
||||||
|
body: ""
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
conn = ReverseProxy.call(conn, "/disposition")
|
conn = ReverseProxy.call(conn, "/disposition")
|
||||||
|
|
||||||
|
|
|
@ -7,9 +7,6 @@ ExUnit.start(exclude: [:federated, :erratic] ++ os_exclude)
|
||||||
|
|
||||||
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, :manual)
|
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, :manual)
|
||||||
|
|
||||||
Mox.defmock(Pleroma.ReverseProxy.ClientMock, for: Pleroma.ReverseProxy.Client)
|
|
||||||
Mox.defmock(Pleroma.GunMock, for: Pleroma.Gun)
|
|
||||||
|
|
||||||
{:ok, _} = Application.ensure_all_started(:ex_machina)
|
{:ok, _} = Application.ensure_all_started(:ex_machina)
|
||||||
|
|
||||||
ExUnit.after_suite(fn _results ->
|
ExUnit.after_suite(fn _results ->
|
||||||
|
|
Loading…
Reference in a new issue