diff --git a/config/config.exs b/config/config.exs index d2d855e..22e06fe 100644 --- a/config/config.exs +++ b/config/config.exs @@ -1 +1,3 @@ -use Mix.Config +import Config + +config :tesla, adapter: Tesla.Adapter.Mint diff --git a/lib/handler.ex b/lib/handler.ex new file mode 100644 index 0000000..e098c5f --- /dev/null +++ b/lib/handler.ex @@ -0,0 +1,118 @@ +defmodule PlugMicropub.Handler do + import Plug.Conn + alias PlugMicropub.{Response, Parser, Properties} + + def handle_action(:create, access_token, conn) do + content_type = conn |> get_req_header("content-type") |> List.first() + handler = conn.private[:plug_micropub][:handler] + + with {:ok, type, raw_properties} <- Parser.parse_create_body(content_type, conn.body_params), + {:ok, properties} = Properties.parse(raw_properties), + {:ok, code, url} <- handler.handle_create(type, properties, access_token) do + conn + |> put_resp_header("location", url) + |> send_resp(code, "") + else + error -> Response.send_error(conn, error) + end + end + + def handle_action(:update, access_token, conn) do + content_type = conn |> get_req_header("content-type") |> List.first() + + with "application/json" <- content_type, + {url, properties} when is_binary(url) <- Map.pop(conn.body_params, "url"), + {:ok, replace, add, delete} <- Parser.parse_update_properties(properties), + do: do_update(conn, access_token, url, replace, add, delete), + else: (_ -> Response.send_error(conn, {:error, :invalid_request})) + end + + def handle_action(:delete, access_token, conn) do + with {:ok, url} <- Map.fetch(conn.body_params, "url"), + do: do_delete(conn, access_token, url), + else: (_ -> Response.send_error(conn, {:error, :invalid_request})) + end + + def handle_action(:undelete, access_token, conn) do + with {:ok, url} <- Map.fetch(conn.body_params, "url"), + do: do_undelete(conn, access_token, url), + else: (_ -> Response.send_error(conn, {:error, :invalid_request})) + end + + def handle_query(:config, access_token, conn) do + handler = conn.private[:plug_micropub][:handler] + + case handler.handle_config_query(access_token) do + {:ok, content} -> Response.send_content(conn, content) + error -> Response.send_error(conn, error) + end + end + + def handle_query(:source, access_token, conn) do + with {:ok, url} <- Map.fetch(conn.query_params, "url"), + do: do_source_query(conn, access_token, url), + else: (_ -> Response.send_error(conn, {:error, :invalid_request})) + end + + def handle_query(:"syndicate-to", access_token, conn) do + handler = conn.private[:plug_micropub][:handler] + + case handler.handle_syndicate_to_query(access_token) do + {:ok, content} -> Response.send_content(conn, content) + error -> Response.send_error(conn, error) + end + end + + defp do_update(conn, access_token, url, replace, add, delete) do + handler = conn.private[:plug_micropub][:handler] + + case handler.handle_update(url, replace, add, delete, access_token) do + :ok -> + send_resp(conn, :no_content, "") + + {:ok, url} -> + conn + |> put_resp_header("location", url) + |> send_resp(:created, "") + + error -> + Response.send_error(conn, error) + end + end + + defp do_delete(conn, access_token, url) do + handler = conn.private[:plug_micropub][:handler] + + case handler.handle_delete(url, access_token) do + :ok -> send_resp(conn, :no_content, "") + error -> Response.send_error(conn, error) + end + end + + defp do_undelete(conn, access_token, url) do + handler = conn.private[:plug_micropub][:handler] + + case handler.handle_undelete(url, access_token) do + :ok -> + send_resp(conn, :no_content, "") + + {:ok, url} -> + conn + |> put_resp_header("location", url) + |> send_resp(:created, "") + + error -> + Response.send_error(conn, error) + end + end + + defp do_source_query(conn, access_token, url) do + handler = conn.private[:plug_micropub][:handler] + properties = Map.get(conn.query_params, "properties", []) + + case handler.handle_source_query(url, properties, access_token) do + {:ok, content} -> Response.send_content(conn, content) + error -> Response.send_error(conn, error) + end + end +end diff --git a/lib/parser.ex b/lib/parser.ex new file mode 100644 index 0000000..86b1ed9 --- /dev/null +++ b/lib/parser.ex @@ -0,0 +1,104 @@ +defmodule PlugMicropub.Parser do + import Plug.Conn + + def get_action(conn) do + {action, body_params} = Map.pop(conn.body_params, "action") + conn = %Plug.Conn{conn | body_params: body_params} + + case action do + nil -> + {:ok, :create, conn} + + action when action in ["delete", "undelete", "update"] -> + {:ok, String.to_existing_atom(action), conn} + + _ -> + {:error, :invalid_request, "Invalid action supplied."} + end + end + + def get_query(conn) do + case Map.fetch(conn.query_params, "q") do + {:ok, query} when query in ["config", "source", "syndicate-to"] -> + {:ok, String.to_existing_atom(query)} + + _ -> + {:error, :invalid_request, "Invalid query supplied."} + end + end + + def get_file(conn) do + case Map.fetch(conn.body_params, "file") do + {:ok, file} -> {:ok, file} + :error -> {:error, :invalid_request, "Invalid file supplied."} + end + end + + def get_access_token(conn) do + {access_token, body_params} = Map.pop(conn.body_params, "access_token") + conn = %Plug.Conn{conn | body_params: body_params} + + case access_token do + nil -> parse_auth_header(conn) + access_token -> {:ok, access_token, conn} + end + end + + defp parse_auth_header(conn) do + with [header] <- get_req_header(conn, "authorization"), + "Bearer" <> token <- header, + do: {:ok, String.trim(token), conn}, + else: (_ -> {:error, :unauthorized, "Authentication from header failed."}) + end + + def parse_create_body("application/json", params) do + with {:ok, ["h-" <> type]} <- Map.fetch(params, "type"), + {:ok, properties} when is_map(properties) <- Map.fetch(params, "properties") do + {:ok, type, Map.new(properties)} + else + _ -> + {:error, :invalid_request} + end + end + + def parse_create_body(_, params) do + with {type, params} when is_binary(type) <- Map.pop(params, "h") do + properties = + params + |> Enum.map(fn {k, v} -> {k, List.wrap(v)} end) + |> Map.new() + + {:ok, type, properties} + else + _ -> {:error, :invalid_request} + end + end + + def parse_update_properties(properties) do + properties = Map.take(properties, ["replace", "add", "delete"]) + + valid? = + Enum.all?(properties, fn + {"delete", prop} when is_list(prop) -> + Enum.all?(prop, &is_binary/1) + + {_k, prop} when is_map(prop) -> + Enum.all?(prop, fn + {_k, v} when is_list(v) -> true + _ -> false + end) + + _ -> + false + end) + + if valid? do + replace = Map.get(properties, "replace", %{}) + add = Map.get(properties, "add", %{}) + delete = Map.get(properties, "delete", %{}) + {:ok, replace, add, delete} + else + :error + end + end +end diff --git a/lib/plug_micropub.ex b/lib/plug_micropub.ex index 5e0332e..dfabb70 100644 --- a/lib/plug_micropub.ex +++ b/lib/plug_micropub.ex @@ -5,23 +5,56 @@ defmodule PlugMicropub do To use: """ + require Logger use Plug.Router + alias PlugMicropub.{Parser, Handler, Response} plug :match plug :dispatch + @default_scopes [ + "create", + "media" + ] + # Plug Callbacks @doc false def init(opts) do + hostname = + Keyword.get(opts, :hostname) || + raise ArgumentError, "Micropub Plug requires :hostname option" + handler = Keyword.get(opts, :handler) || raise ArgumentError, "Micropub Plug requires :handler option" + token_endpoint = + Keyword.get(opts, :token_endpoint) || + raise ArgumentError, "Micropub Plug requires :token_endpoint option" + json_encoder = Keyword.get(opts, :json_encoder) || raise ArgumentError, "Micropub Plug requires :json_encoder option" - [handler: handler, json_encoder: json_encoder] + user_agent = + Keyword.get(opts, :user_agent) || + raise ArgumentError, "Micropub Plug requires :user_agent option" + + scopes = + Keyword.get(opts, :scopes) || @default_scopes + + token_handler = + Keyword.get(opts, :token_handler) || PlugMicropub.Token + + [ + hostname: hostname, + handler: handler, + token_handler: token_handler, + json_encoder: json_encoder, + scopes: scopes, + token_endpoint: token_endpoint, + user_agent: user_agent + ] end @doc false @@ -33,290 +66,86 @@ defmodule PlugMicropub do # Routes post "/" do - with {:ok, access_token, conn} <- get_access_token(conn), - {:ok, action, conn} <- get_action(conn) do - handle_action(action, access_token, conn) + token_endpoint = get_config(conn, :token_endpoint) + supported_scopes = get_config(conn, :scopes) + hostname = get_config(conn, :hostname) + user_agent = get_config(conn, :user_agent) + token_handler = get_config(conn, :token_handler) + + with {:ok, access_token, conn} <- Parser.get_access_token(conn), + {:ok, action, conn} <- Parser.get_action(conn), + :ok <- + token_handler.verify( + access_token, + token_endpoint, + Atom.to_string(action), + supported_scopes, + hostname, + user_agent + ) do + Handler.handle_action(action, access_token, conn) else - error -> send_error(conn, error) + error -> Response.send_error(conn, error) end end get "/" do - with {:ok, access_token, conn} <- get_access_token(conn), - {:ok, query} <- get_query(conn) do - handle_query(query, access_token, conn) + token_endpoint = get_config(conn, :token_endpoint) + supported_scopes = get_config(conn, :scopes) + hostname = get_config(conn, :hostname) + user_agent = get_config(conn, :user_agent) + token_handler = get_config(conn, :token_handler) + + with {:ok, access_token, conn} <- Parser.get_access_token(conn), + {:ok, query} <- Parser.get_query(conn), + :ok <- + token_handler.verify( + access_token, + token_endpoint, + "source", + supported_scopes, + hostname, + user_agent + ) do + Handler.handle_query(query, access_token, conn) else - error -> send_error(conn, error) + error -> Response.send_error(conn, error) end end post "/media" do - handler = conn.private[:plug_micropub][:handler] + handler = get_config(conn, :handler) + token_endpoint = get_config(conn, :token_endpoint) + supported_scopes = get_config(conn, :scopes) + hostname = get_config(conn, :hostname) + user_agent = get_config(conn, :user_agent) + token_handler = get_config(conn, :token_handler) - with {:ok, access_token, conn} <- get_access_token(conn), - {:ok, file} <- get_file(conn), - {:ok, url} <- handler.handle_media(file, access_token) do + with {:ok, access_token, conn} <- Parser.get_access_token(conn), + {:ok, file} <- Parser.get_file(conn), + {:ok, url} <- handler.handle_media(file, access_token), + :ok <- + token_handler.verify( + access_token, + token_endpoint, + "media", + supported_scopes, + hostname, + user_agent + ) do conn |> put_resp_header("location", url) |> send_resp(:created, "") else - error -> send_error(conn, error) + error -> Response.send_error(conn, error) end end match _ do - send_error(conn, {:error, :invalid_request}) + Response.send_error(conn, {:error, :invalid_request, "Request did not match any route."}) end - # Internal Functions - - defp send_content(conn, content) do - json_encoder = conn.private[:plug_micropub][:json_encoder] - body = json_encoder.encode!(content) - - conn - |> put_resp_content_type("application/json") - |> send_resp(:ok, body) - end - - defp send_error(conn, {:error, error}) do - body = %{error: error} - _send_error(conn, body) - end - - defp send_error(conn, {:error, error, description}) do - body = %{error: error, error_description: description} - _send_error(conn, body) - end - - defp _send_error(conn, body) do - json_encoder = conn.private[:plug_micropub][:json_encoder] - - code = get_error_code(body.error) - body = json_encoder.encode!(body) - - conn - |> put_resp_content_type("application/json") - |> send_resp(code, body) - end - - defp get_error_code(:insufficient_scope), do: :unauthorized - defp get_error_code(:invalid_request), do: :bad_request - defp get_error_code(code), do: code - - defp get_action(conn) do - {action, body_params} = Map.pop(conn.body_params, "action") - conn = %Plug.Conn{conn | body_params: body_params} - - case action do - nil -> - {:ok, :create, conn} - - action when action in ["delete", "undelete", "update"] -> - {:ok, String.to_existing_atom(action), conn} - - _ -> - {:error, :invalid_request} - end - end - - defp get_query(conn) do - case Map.fetch(conn.query_params, "q") do - {:ok, query} when query in ["config", "source", "syndicate-to"] -> - {:ok, String.to_existing_atom(query)} - - _ -> - {:error, :invalid_request} - end - end - - defp get_file(conn) do - case Map.fetch(conn.body_params, "file") do - {:ok, file} -> {:ok, file} - :error -> {:error, :invalid_request} - end - end - - defp get_access_token(conn) do - {access_token, body_params} = Map.pop(conn.body_params, "access_token") - conn = %Plug.Conn{conn | body_params: body_params} - - case access_token do - nil -> parse_auth_header(conn) - access_token -> {:ok, access_token, conn} - end - end - - defp parse_auth_header(conn) do - with [header] <- get_req_header(conn, "authorization"), - _ = IO.inspect(header), - "Bearer" <> token <- header, - do: {:ok, String.trim(token), conn}, - else: (_ -> {:error, :unauthorized}) - end - - defp handle_action(:create, access_token, conn) do - content_type = conn |> get_req_header("content-type") |> List.first() - handler = conn.private[:plug_micropub][:handler] - - with {:ok, type, properties} <- parse_create_body(content_type, conn.body_params), - {:ok, code, url} <- handler.handle_create(type, properties, access_token) do - conn - |> put_resp_header("location", url) - |> send_resp(code, "") - else - error -> send_error(conn, error) - end - end - - defp handle_action(:update, access_token, conn) do - content_type = conn |> get_req_header("content-type") |> List.first() - - with "application/json" <- content_type, - {url, properties} when is_binary(url) <- Map.pop(conn.body_params, "url"), - {:ok, replace, add, delete} <- parse_update_properties(properties), - do: do_update(conn, access_token, url, replace, add, delete), - else: (_ -> send_error(conn, {:error, :invalid_request})) - end - - defp handle_action(:delete, access_token, conn) do - with {:ok, url} <- Map.fetch(conn.body_params, "url"), - do: do_delete(conn, access_token, url), - else: (_ -> send_error(conn, {:error, :invalid_request})) - end - - defp handle_action(:undelete, access_token, conn) do - with {:ok, url} <- Map.fetch(conn.body_params, "url"), - do: do_undelete(conn, access_token, url), - else: (_ -> send_error(conn, {:error, :invalid_request})) - end - - defp handle_query(:config, access_token, conn) do - handler = conn.private[:plug_micropub][:handler] - - case handler.handle_config_query(access_token) do - {:ok, content} -> send_content(conn, content) - error -> send_error(conn, error) - end - end - - defp handle_query(:source, access_token, conn) do - with {:ok, url} <- Map.fetch(conn.query_params, "url"), - do: do_source_query(conn, access_token, url), - else: (_ -> send_error(conn, {:error, :invalid_request})) - end - - defp handle_query(:"syndicate-to", access_token, conn) do - handler = conn.private[:plug_micropub][:handler] - - case handler.handle_syndicate_to_query(access_token) do - {:ok, content} -> send_content(conn, content) - error -> send_error(conn, error) - end - end - - defp parse_update_properties(properties) do - properties = Map.take(properties, ["replace", "add", "delete"]) - - valid? = - Enum.all?(properties, fn - {"delete", prop} when is_list(prop) -> - Enum.all?(prop, &is_binary/1) - - {_k, prop} when is_map(prop) -> - Enum.all?(prop, fn - {_k, v} when is_list(v) -> true - _ -> false - end) - - _ -> - false - end) - - if valid? do - replace = Map.get(properties, "replace", %{}) - add = Map.get(properties, "add", %{}) - delete = Map.get(properties, "delete", %{}) - {:ok, replace, add, delete} - else - :error - end - end - - defp do_update(conn, access_token, url, replace, add, delete) do - handler = conn.private[:plug_micropub][:handler] - - case handler.handle_update(url, replace, add, delete, access_token) do - :ok -> - send_resp(conn, :no_content, "") - - {:ok, url} -> - conn - |> put_resp_header("location", url) - |> send_resp(:created, "") - - error -> - send_error(conn, error) - end - end - - defp do_delete(conn, access_token, url) do - handler = conn.private[:plug_micropub][:handler] - - case handler.handle_delete(url, access_token) do - :ok -> send_resp(conn, :no_content, "") - error -> send_error(conn, error) - end - end - - defp do_undelete(conn, access_token, url) do - handler = conn.private[:plug_micropub][:handler] - - case handler.handle_undelete(url, access_token) do - :ok -> - send_resp(conn, :no_content, "") - - {:ok, url} -> - conn - |> put_resp_header("location", url) - |> send_resp(:created, "") - - error -> - send_error(conn, error) - end - end - - defp do_source_query(conn, access_token, url) do - handler = conn.private[:plug_micropub][:handler] - properties = Map.get(conn.query_params, "properties", []) - - case handler.handle_source_query(url, properties, access_token) do - {:ok, content} -> send_content(conn, content) - error -> send_error(conn, error) - end - end - - defp parse_create_body("application/json", params) do - with {:ok, ["h-" <> type]} <- Map.fetch(params, "type"), - {:ok, properties} when is_map(properties) <- Map.fetch(params, "properties") do - properties = Map.new(properties) - - {:ok, type, properties} - else - _ -> {:error, :invalid_request} - end - end - - defp parse_create_body(_, params) do - with {type, params} when is_binary(type) <- Map.pop(params, "h") do - properties = - params - |> Enum.map(fn {k, v} -> {k, List.wrap(v)} end) - |> Map.new() - - {:ok, type, properties} - else - _ -> {:error, :invalid_request} - end + defp get_config(conn, name) do + conn.private[:plug_micropub][name] end end diff --git a/lib/post.ex b/lib/post.ex new file mode 100644 index 0000000..f54014c --- /dev/null +++ b/lib/post.ex @@ -0,0 +1,3 @@ +defmodule PlugMicropub.Post do + defstruct [:type, :title, :content] +end diff --git a/lib/properties.ex b/lib/properties.ex new file mode 100644 index 0000000..e4f02b2 --- /dev/null +++ b/lib/properties.ex @@ -0,0 +1,85 @@ +defmodule PlugMicropub.Properties do + def parse(properties) do + {:ok, type} = get_post_type(properties) + content = get_content(properties) + title = get_title(properties) + + case type do + :note -> + {:ok, + %PlugMicropub.Post{ + type: type, + title: title, + content: content + }} + + :unknown -> + {:error, :parse_error} + end + end + + def get_post_type(properties) do + cond do + Map.has_key?(properties, "like-of") -> + {:ok, :like} + + Map.has_key?(properties, "bookmark-of") -> + {:ok, :bookmark} + + Map.has_key?(properties, "content") -> + {:ok, :note} + + true -> + {:ok, :unknown} + end + end + + def get_tags(%{"category" => [""]} = _props), do: [] + def get_tags(%{"category" => tags} = _props), do: tags + def get_tags(_props), do: [] + + def get_title(%{"name" => [title]} = _props), do: title + def get_title(_props), do: nil + + def get_content(%{"content" => [%{"html" => content_html}]} = _props), do: content_html + def get_content(%{"content" => [content]} = _props), do: content + def get_content(_props), do: nil + + def get_bookmarked_url(%{"bookmark-of" => [url]} = _props), do: url + def get_bookmarked_url(_props), do: nil + + def get_reposted_url(%{"repost-of" => [url]} = _props), do: url + def get_reposted_url(_props), do: nil + + def get_liked_url(%{"like-of" => [url]} = _props), do: url + def get_liked_url(_props), do: nil + + def get_read_url(%{"read-of" => [url]} = _props), do: url + def get_read_url(_props), do: nil + + def get_watched_url(%{"watch-of" => [url]} = _props), do: url + def get_watched_url(_props), do: nil + + def get_listened_url(%{"listen-of" => [url]} = _props), do: url + def get_listened_url(_props), do: nil + + def get_reply_to(%{"in-reply-to" => [reply_to]} = _props), do: reply_to + def get_reply_to(_props), do: nil + + def is_published?(%{"post-status" => ["draft"]} = _props), do: false + def is_published?(_props), do: true + + def get_photo(%{"photo" => [photo]} = _props), do: photo + def get_photo(_props), do: nil + + def get_syndication_targets(%{"mp-syndicate-to" => targets} = _props), do: targets + def get_syndication_targets(_props), do: [] + + def get_channel(%{"mp-channel" => [channel]} = _props), do: channel + def get_channel(_props), do: nil + + def has_target?(%{"mp-syndicate-to" => targets} = _props, name), + do: Enum.any?(targets, fn t -> t == name end) + + def has_target?(_props, _name), do: false +end diff --git a/lib/response.ex b/lib/response.ex new file mode 100644 index 0000000..f1a2c32 --- /dev/null +++ b/lib/response.ex @@ -0,0 +1,38 @@ +defmodule PlugMicropub.Response do + import Plug.Conn + + def send_content(conn, content) do + json_encoder = conn.private[:plug_micropub][:json_encoder] + body = json_encoder.encode!(content) + + conn + |> put_resp_content_type("application/json") + |> send_resp(:ok, body) + end + + def send_error(conn, {:error, error}) do + body = %{error: error} + _send_error(conn, body) + end + + def send_error(conn, {:error, error, description}) do + body = %{error: error, error_description: description} + _send_error(conn, body) + end + + defp _send_error(conn, body) do + json_encoder = conn.private[:plug_micropub][:json_encoder] + + code = get_error_code(body.error) + body = json_encoder.encode!(body) + + conn + |> put_resp_content_type("application/json") + |> send_resp(code, body) + end + + defp get_error_code(:insufficient_scope), do: :unauthorized + defp get_error_code(:invalid_request), do: :bad_request + defp get_error_code(:request_error), do: :internal_server_error + defp get_error_code(code), do: code +end diff --git a/lib/test_handler.ex b/lib/test_handler.ex new file mode 100644 index 0000000..1ba819b --- /dev/null +++ b/lib/test_handler.ex @@ -0,0 +1,42 @@ +defmodule TestHandler do + @behaviour PlugMicropub.HandlerBehaviour + + @impl true + def handle_create(_type, properties, _access_token) do + case properties.type do + :note -> + {:ok, :created, "/notes/4711"} + + :bookmark -> + {:ok, :created, "/bookmarks/4711"} + end + end + + @impl true + def handle_update(_, _, _, _, _) do + end + + @impl true + def handle_undelete(_, _) do + end + + @impl true + def handle_delete(_, _) do + end + + @impl true + def handle_syndicate_to_query(_) do + end + + @impl true + def handle_source_query(_, _, _) do + end + + @impl true + def handle_config_query(_) do + end + + @impl true + def handle_media(_, _) do + end +end diff --git a/lib/test_token.ex b/lib/test_token.ex new file mode 100644 index 0000000..b0a818c --- /dev/null +++ b/lib/test_token.ex @@ -0,0 +1,5 @@ +defmodule TestToken do + def verify(_, _, _, _, _, _) do + :ok + end +end diff --git a/lib/token.ex b/lib/token.ex new file mode 100644 index 0000000..b2d5204 --- /dev/null +++ b/lib/token.ex @@ -0,0 +1,110 @@ +defmodule PlugMicropub.Token do + require Logger + + def verify( + access_token, + token_endpoint, + required_scope, + supported_scopes, + own_hostname, + user_agent + ) do + case do_verify_token(access_token, token_endpoint, user_agent) do + {:ok, %{status: 200, body: body}} -> + verify_token_response(body, required_scope, supported_scopes, own_hostname) + + {:ok, %{status: status}} -> + {:error, :request_error, status} + + {:error, %{code: code}} -> + Logger.error("Token endpoint responded with unexpected code: #{inspect(code)}") + {:error, :request_error, code} + + {:error, %{reason: reason}} -> + Logger.error("Could not reach token endpoint: #{inspect(reason)}") + {:error, :request_error, reason} + + error -> + Logger.error("Unexpected error: #{inspect(error)}") + {:error, :request_error, "Internal Server Error"} + end + end + + defp do_verify_token(access_token, token_endpoint, user_agent) do + client = + Tesla.client([ + Tesla.Middleware.JSON, + {Tesla.Middleware.Headers, + [ + {"User-Agent", user_agent}, + {"Authorization", "Bearer #{access_token}"}, + {"Accept", "application/json"} + ]} + ]) + + Tesla.get(client, token_endpoint) + end + + defp verify_token_response( + %{ + me: host_uri, + scope: scope, + client_id: client_id, + issued_at: _issued_at, + issued_by: _issued_by, + nonce: _nonce + }, + required_scope, + supported_scopes, + own_hostname + ) do + Logger.info("Host-URI: '#{host_uri}'") + Logger.info("ClientId: '#{client_id}'") + Logger.info("Scopes: '#{scope}'") + + with :ok <- verify_hostname_match(host_uri, own_hostname), + :ok <- verify_scope_support(scope, required_scope, supported_scopes) do + :ok + else + {:error, name, reason} -> + Logger.error("Could not verify token response: #{reason}") + {:error, name, reason} + end + end + + defp verify_hostname_match(host_uri, own_hostname) do + hostnames_match? = get_hostname(host_uri) == own_hostname + + case hostnames_match? do + true -> + :ok + + _ -> + Logger.warning("Hostnames do not match: Given #{host_uri}, Actual: #{own_hostname}") + {:error, "verify_hostname_match", "hostname does not match"} + end + end + + defp get_hostname(host_uri) do + host_uri |> URI.parse() |> Map.get(:host) + end + + defp verify_scope_support(_scopes, nil, _supported_scopes), do: :ok + + defp verify_scope_support(scopes, required_scope, supported_scopes) + when not is_nil(required_scope) do + required = Enum.member?(supported_scopes, required_scope) + requested = Enum.member?(String.split(scopes), required_scope) + + cond do + required && requested -> + :ok + + !required -> + {:error, "verify_scope_support", "scope '#{required_scope}' is not supported"} + + !requested -> + {:error, "verify_scope_support", "scope '#{required_scope}' was not requested"} + end + end +end diff --git a/mix.exs b/mix.exs index a2996b7..0149c43 100644 --- a/mix.exs +++ b/mix.exs @@ -3,20 +3,21 @@ defmodule PlugMicropub.MixProject do def project do [ - app: :plug_micropub, + app: :plug_indie, version: "0.1.0", elixir: "~> 1.6", start_permanent: Mix.env() == :prod, deps: deps(), - name: "PlugMicropub", - description: "A small library for building a Plug-based Micropub server.", - source_url: "https://github.com/bismark/plug_micropub", + name: "PlugIndie", + description: + "A small library for building a Plug-based IndieWeb server. Forked from bismark/plug_micropub", + source_url: "https://github.com/inhji/plug_indie", docs: [main: "readme", extras: ["README.md"]], package: [ - name: "plug_micropub", + name: "plug_indie", licenses: ["BSD 3-Clause"], - maintainers: ["Ryan Johnson"], - links: %{github: "https://github.com/bismark/plug_micropub"} + maintainers: ["Jonathan Jenne"], + links: %{github: "https://github.com/inhji/plug_indie"} ] ] end @@ -29,8 +30,12 @@ defmodule PlugMicropub.MixProject do defp deps do [ - {:plug, "~> 1.5"}, - {:ex_doc, "~> 0.18.3", only: :dev, runtime: false} + {:tesla, "~> 1.13"}, + {:mint, "~> 1.0"}, + {:jason, "~> 1.4"}, + {:plug, "~> 1.16"}, + {:ex_doc, "~> 0.35.1", only: :dev, runtime: false}, + {:mix_test_watch, "~> 1.0", only: [:dev, :test], runtime: false} ] end end diff --git a/mix.lock b/mix.lock index 9f25dcc..3bfcda3 100644 --- a/mix.lock +++ b/mix.lock @@ -1,6 +1,19 @@ %{ - "earmark": {:hex, :earmark, "1.2.4", "99b637c62a4d65a20a9fb674b8cffb8baa771c04605a80c911c4418c69b75439", [:mix], [], "hexpm"}, - "ex_doc": {:hex, :ex_doc, "0.18.3", "f4b0e4a2ec6f333dccf761838a4b253d75e11f714b85ae271c9ae361367897b7", [:mix], [{:earmark, "~> 1.1", [hex: :earmark, repo: "hexpm", optional: false]}], "hexpm"}, - "mime": {:hex, :mime, "1.2.0", "78adaa84832b3680de06f88f0997e3ead3b451a440d183d688085be2d709b534", [:mix], [], "hexpm"}, - "plug": {:hex, :plug, "1.5.0", "224b25b4039bedc1eac149fb52ed456770b9678bbf0349cdd810460e1e09195b", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.1", [hex: :cowboy, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}], "hexpm"}, + "earmark": {:hex, :earmark, "1.2.4", "99b637c62a4d65a20a9fb674b8cffb8baa771c04605a80c911c4418c69b75439", [:mix], [], "hexpm", "1b34655872366414f69dd987cb121c049f76984b6ac69f52fff6d8fd64d29cfd"}, + "earmark_parser": {:hex, :earmark_parser, "1.4.41", "ab34711c9dc6212dda44fcd20ecb87ac3f3fce6f0ca2f28d4a00e4154f8cd599", [:mix], [], "hexpm", "a81a04c7e34b6617c2792e291b5a2e57ab316365c2644ddc553bb9ed863ebefa"}, + "ex_doc": {:hex, :ex_doc, "0.35.1", "de804c590d3df2d9d5b8aec77d758b00c814b356119b3d4455e4b8a8687aecaf", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "2121c6402c8d44b05622677b761371a759143b958c6c19f6558ff64d0aed40df"}, + "file_system": {:hex, :file_system, "1.0.1", "79e8ceaddb0416f8b8cd02a0127bdbababe7bf4a23d2a395b983c1f8b3f73edd", [:mix], [], "hexpm", "4414d1f38863ddf9120720cd976fce5bdde8e91d8283353f0e31850fa89feb9e"}, + "hpax": {:hex, :hpax, "1.0.0", "28dcf54509fe2152a3d040e4e3df5b265dcb6cb532029ecbacf4ce52caea3fd2", [:mix], [], "hexpm", "7f1314731d711e2ca5fdc7fd361296593fc2542570b3105595bb0bc6d0fad601"}, + "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"}, + "makeup": {:hex, :makeup, "1.2.1", "e90ac1c65589ef354378def3ba19d401e739ee7ee06fb47f94c687016e3713d1", [:mix], [{:nimble_parsec, "~> 1.4", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "d36484867b0bae0fea568d10131197a4c2e47056a6fbe84922bf6ba71c8d17ce"}, + "makeup_elixir": {:hex, :makeup_elixir, "1.0.0", "74bb8348c9b3a51d5c589bf5aebb0466a84b33274150e3b6ece1da45584afc82", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "49159b7d7d999e836bedaf09dcf35ca18b312230cf901b725a64f3f42e407983"}, + "makeup_erlang": {:hex, :makeup_erlang, "1.0.1", "c7f58c120b2b5aa5fd80d540a89fdf866ed42f1f3994e4fe189abebeab610839", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "8a89a1eeccc2d798d6ea15496a6e4870b75e014d1af514b1b71fa33134f57814"}, + "mime": {:hex, :mime, "2.0.6", "8f18486773d9b15f95f4f4f1e39b710045fa1de891fada4516559967276e4dc2", [:mix], [], "hexpm", "c9945363a6b26d747389aac3643f8e0e09d30499a138ad64fe8fd1d13d9b153e"}, + "mint": {:hex, :mint, "1.6.2", "af6d97a4051eee4f05b5500671d47c3a67dac7386045d87a904126fd4bbcea2e", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "5ee441dffc1892f1ae59127f74afe8fd82fda6587794278d924e4d90ea3d63f9"}, + "mix_test_watch": {:hex, :mix_test_watch, "1.2.0", "1f9acd9e1104f62f280e30fc2243ae5e6d8ddc2f7f4dc9bceb454b9a41c82b42", [:mix], [{:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm", "278dc955c20b3fb9a3168b5c2493c2e5cffad133548d307e0a50c7f2cfbf34f6"}, + "nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"}, + "plug": {:hex, :plug, "1.16.1", "40c74619c12f82736d2214557dedec2e9762029b2438d6d175c5074c933edc9d", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a13ff6b9006b03d7e33874945b2755253841b238c34071ed85b0e86057f8cddc"}, + "plug_crypto": {:hex, :plug_crypto, "2.1.0", "f44309c2b06d249c27c8d3f65cfe08158ade08418cf540fd4f72d4d6863abb7b", [:mix], [], "hexpm", "131216a4b030b8f8ce0f26038bc4421ae60e4bb95c5cf5395e1421437824c4fa"}, + "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"}, + "tesla": {:hex, :tesla, "1.13.2", "85afa342eb2ac0fee830cf649dbd19179b6b359bec4710d02a3d5d587f016910", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.13", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, ">= 1.0.0", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.2", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:mox, "~> 1.0", [hex: :mox, repo: "hexpm", optional: true]}, {:msgpax, "~> 2.3", [hex: :msgpax, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "960609848f1ef654c3cdfad68453cd84a5febecb6ed9fed9416e36cd9cd724f9"}, } diff --git a/test/parser_test.exs b/test/parser_test.exs new file mode 100644 index 0000000..0ca6818 --- /dev/null +++ b/test/parser_test.exs @@ -0,0 +1,17 @@ +defmodule ParserTest do + use ExUnit.Case + doctest PlugMicropub.Parser + import PlugMicropub.Parser, only: [parse_create_body: 2] + + test "parse_create_body with content-type json" do + params = %{ + "type" => ["h-entry"], + "properties" => %{ + "content" => "Hello World!" + } + } + + assert {:ok, "entry", %{"content" => "Hello World!"}} = + parse_create_body("application/json", params) + end +end diff --git a/test/plug_micropub_test.exs b/test/plug_micropub_test.exs index c44fcdf..eda5842 100644 --- a/test/plug_micropub_test.exs +++ b/test/plug_micropub_test.exs @@ -1,4 +1,40 @@ defmodule PlugMicropubTest do - use ExUnit.Case + use ExUnit.Case, async: true + use Plug.Test + doctest PlugMicropub + + @opts PlugMicropub.init( + hostname: "example.com", + handler: TestHandler, + token_endpoint: "http://example.com/token", + json_encoder: Jason, + user_agent: "ExUnit", + token_handler: TestToken + ) + + test "creates a new note using json" do + # Create a test connection + conn = + request("/", %{ + "content" => ["Hello World!"] + }) + + # Invoke the plug + conn = PlugMicropub.call(conn, @opts) + + # Assert the response and status + assert conn.state == :sent + assert conn.status == 201 + assert ["/notes" <> _] = get_resp_header(conn, "location") + end + + defp request(url, properties, content_type \\ "application/json") do + conn(:post, url, %{ + "type" => ["h-entry"], + "properties" => properties + }) + |> put_req_header("authorization", "Bearer 1234567890") + |> put_req_header("content-type", content_type) + end end