change the structure of image ttl parsar

This commit is contained in:
Sachin Joshi 2019-07-19 11:43:42 +05:45
parent 18234cc44e
commit de9906ad56
5 changed files with 85 additions and 36 deletions

View File

@ -4,7 +4,7 @@
Richmedia are cached without the ttl but the rich media may have image which can expire, like aws signed url. Richmedia are cached without the ttl but the rich media may have image which can expire, like aws signed url.
In such cases the old image url (expired) is returned from the media cache. In such cases the old image url (expired) is returned from the media cache.
So to avoid such situation we can define a moddule that will set ttl based no image. So to avoid such situation we can define a moddule that will set ttl based on image.
The module must have a `run` function and it should be registered in the config. The module must have a `run` function and it should be registered in the config.

View File

@ -35,17 +35,17 @@ def parse(url) do
@doc """ @doc """
Set the rich media cache based on the expiration time of image. Set the rich media cache based on the expiration time of image.
Define a module that has `run` function Adopt behaviour `Pleroma.Web.RichMedia.Parser.TTL`
## Example ## Example
defmodule MyModule do defmodule MyModule do
def run(data, url) do @behaviour Pleroma.Web.RichMedia.Parser.TTL
def ttl(data, url) do
image_url = Map.get(data, :image) image_url = Map.get(data, :image)
# do some parsing in the url and get the ttl of the image # do some parsing in the url and get the ttl of the image
# ttl is unix time # and return ttl is unix time
ttl = parse_ttl_from_url(image_url) parse_ttl_from_url(image_url)
Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
end end
end end
@ -55,22 +55,26 @@ def run(data, url) do
ttl_setters: [MyModule] ttl_setters: [MyModule]
""" """
def set_ttl_based_on_image({:ok, data}, url) do def set_ttl_based_on_image({:ok, data}, url) do
case Cachex.ttl(:rich_media_cache, url) do with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url) do
{:ok, nil} -> ttl = get_ttl_from_image(data, url)
modules = Pleroma.Config.get([:rich_media, :ttl_setters]) Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
if Enum.count(modules) > 0 do
Enum.each(modules, & &1.run(data, url))
end
{:ok, data} {:ok, data}
else
_ -> _ ->
{:ok, data} {:ok, data}
end end
end end
def set_ttl_based_on_image(data, _url), do: data defp get_ttl_from_image(data, url) do
Pleroma.Config.get([:rich_media, :ttl_setters])
|> Enum.reduce({:ok, nil}, fn
module, {:ok, _ttl} ->
module.ttl(data, url)
_, error ->
error
end)
end
defp parse_url(url) do defp parse_url(url) do
try do try do

View File

@ -1,5 +1,8 @@
defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl do defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl do
def run(data, url) do @behaviour Pleroma.Web.RichMedia.Parser.TTL
@impl Pleroma.Web.RichMedia.Parser.TTL
def ttl(data, _url) do
image = Map.get(data, :image) image = Map.get(data, :image)
if is_aws_signed_url(image) do if is_aws_signed_url(image) do
@ -7,7 +10,6 @@ def run(data, url) do
|> parse_query_params() |> parse_query_params()
|> format_query_params() |> format_query_params()
|> get_expiration_timestamp() |> get_expiration_timestamp()
|> set_ttl(url)
end end
end end
@ -47,8 +49,4 @@ defp get_expiration_timestamp(params) when is_map(params) do
Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires")) Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))
end end
defp set_ttl(ttl, url) do
Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
end
end end

View File

@ -0,0 +1,3 @@
defmodule Pleroma.Web.RichMedia.Parser.TTL do
@callback ttl(Map.t(), String.t()) :: {:ok, Integer.t()} | {:error, String.t()}
end

View File

@ -5,7 +5,7 @@
defmodule Pleroma.Web.RichMedia.TTL.AwsSignedUrlTest do defmodule Pleroma.Web.RichMedia.TTL.AwsSignedUrlTest do
use ExUnit.Case, async: true use ExUnit.Case, async: true
test "amazon signed url is parsed and correct ttl is set for rich media" do test "s3 signed url is parsed correct for expiration time" do
url = "https://pleroma.social/amz" url = "https://pleroma.social/amz"
{:ok, timestamp} = {:ok, timestamp} =
@ -16,22 +16,66 @@ test "amazon signed url is parsed and correct ttl is set for rich media" do
# in seconds # in seconds
valid_till = 30 valid_till = 30
data = %{ metadata = construct_metadata(timestamp, valid_till, url)
image:
"https://pleroma.s3.ap-southeast-1.amazonaws.com/sachin%20%281%29%20_a%20-%25%2Aasdasd%20BNN%20bnnn%20.png?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAIBLWWK6RGDQXDLJQ%2F20190716%2Fap-southeast-1%2Fs3%2Faws4_request&X-Amz-Date=#{ expire_time =
timestamp Timex.parse!(timestamp, "{ISO:Basic:Z}") |> Timex.to_unix() |> Kernel.+(valid_till)
}&X-Amz-Expires=#{valid_till}&X-Amz-Signature=04ffd6b98634f4b1bbabc62e0fac4879093cd54a6eed24fe8eb38e8369526bbf&X-Amz-SignedHeaders=host",
locale: "en_US", assert expire_time == Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.ttl(metadata, url)
site_name: "Pleroma", end
title: "PLeroma",
url: url test "s3 signed url is parsed and correct ttl is set for rich media" do
} url = "https://pleroma.social/amz"
{:ok, timestamp} =
Timex.now()
|> DateTime.truncate(:second)
|> Timex.format("{ISO:Basic:Z}")
# in seconds
valid_till = 30
metadata = construct_metadata(timestamp, valid_till, url)
body = """
<meta name="twitter:card" content="Pleroma" />
<meta name="twitter:site" content="Pleroma" />
<meta name="twitter:title" content="Pleroma" />
<meta name="twitter:description" content="Pleroma" />
<meta name="twitter:image" content="#{Map.get(metadata, :image)}" />
"""
Tesla.Mock.mock(fn
%{
method: :get,
url: "https://pleroma.social/amz"
} ->
%Tesla.Env{status: 200, body: body}
end)
Cachex.put(:rich_media_cache, url, metadata)
Pleroma.Web.RichMedia.Parser.set_ttl_based_on_image({:ok, metadata}, url)
Cachex.put(:rich_media_cache, url, data)
assert {:ok, _} = Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.run(data, url)
{:ok, cache_ttl} = Cachex.ttl(:rich_media_cache, url) {:ok, cache_ttl} = Cachex.ttl(:rich_media_cache, url)
# as there is delay in setting and pulling the data from cache we ignore 1 second # as there is delay in setting and pulling the data from cache we ignore 1 second
assert_in_delta(valid_till * 1000, cache_ttl, 1000) assert_in_delta(valid_till * 1000, cache_ttl, 1000)
end end
defp construct_s3_url(timestamp, valid_till) do
"https://pleroma.s3.ap-southeast-1.amazonaws.com/sachin%20%281%29%20_a%20-%25%2Aasdasd%20BNN%20bnnn%20.png?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAIBLWWK6RGDQXDLJQ%2F20190716%2Fap-southeast-1%2Fs3%2Faws4_request&X-Amz-Date=#{
timestamp
}&X-Amz-Expires=#{valid_till}&X-Amz-Signature=04ffd6b98634f4b1bbabc62e0fac4879093cd54a6eed24fe8eb38e8369526bbf&X-Amz-SignedHeaders=host"
end
defp construct_metadata(timestamp, valid_till, url) do
%{
image: construct_s3_url(timestamp, valid_till),
site: "Pleroma",
title: "Pleroma",
description: "Pleroma",
url: url
}
end
end end