diff --git a/CHANGELOG.md b/CHANGELOG.md index 17e525a8..128c3715 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,27 @@ * `Pow.Plug.Session` now stores a keyword list with metadata for the session rather than just the timestamp * `Pow.Phoenix.Router` now only filters routes that has equal number of bindings * `Pow.Phoenix.Routes.user_not_authenticated_path/1` now only puts the `:request_path` param if the request is using "GET" method +* The stores has been refactored so the command conforms with ETS store. This means that put commands now accept `{key, value}` record element(s), and keys may be lists or tuples for easier lookup. + * `Pow.Store.Backend.Base` behaviour now requires to; + * Accept `Pow.Store.Backend.Base.record/0` values for `put/2` + * Accept `Pow.Store.Backend.Base.key/0` for `delete/2` and `get/2` + * Implement `all/2` + * Remove `keys/1` + * Remove `put/3` + * `Pow.Store.Backend.EtsCache.keys/1` deprecated + * `Pow.Store.Backend.MnesiaCache.keys/1` deprecated + * `Pow.Store.Backend.EtsCache` and `Pow.Store.Backend.MnesiaCache` now uses `:ordered_set` instead of `:set` for efficiency + * `Pow.Store.Base` behaviour now requires to; + * Accept erlang term value for keys in all methods + * Remove `keys/2` + * Remove `put/4` + * `Pow.Store.Base.all/3` added + * `Pow.Store.Base.keys/2` deprecated + * `Pow.Store.Base.put/4` deprecated + * Added `Pow.Store.CredentialsCache.users/3` + * Added `Pow.Store.CredentialsCache.put/3` + * Deprecated `Pow.Store.CredentialsCache.put/4` + * Deprecated `Pow.Store.CredentialsCache.user_session_keys/3` ## v1.0.13 (2019-08-25) diff --git a/guides/redis_cache_store_backend.md b/guides/redis_cache_store_backend.md index 252aee04..3808c8a9 100644 --- a/guides/redis_cache_store_backend.md +++ b/guides/redis_cache_store_backend.md @@ -23,26 +23,45 @@ defmodule MyAppWeb.PowRedisCache do @redix_instance_name :redix - def put(config, key, value) do - key = redis_key(config, key) - ttl = Config.get(config, :ttl) - value = :erlang.term_to_binary(value) - command = put_command(key, value, ttl) - - Redix.noreply_command(@redix_instance_name, command) + @impl true + def put(config, record_or_records) do + ttl = Config.get(config, :ttl) || raise_ttl_error() + commands = + record_or_records + |> List.wrap() + |> Enum.map(fn {key, value} -> + {redis_key(config, key), value} + end) + |> Enum.map(fn {key, value} -> + put_command(key, value, ttl) + end) + + Redix.noreply_pipeline(@redix_instance_name, commands) end - defp put_command(key, value, ttl) when is_integer(ttl) and ttl > 0, do: ["SET", key, value, "PX", ttl] - defp put_command(key, value, _ttl), do: ["SET", key, value] + defp put_command(key, value, ttl) do + key = to_binary_redis_key(key) + value = :erlang.term_to_binary(value) + + ["SET", key, value, "PX", ttl] + end + @impl true def delete(config, key) do - key = redis_key(config, key) + key = + config + |> redis_key(key) + |> to_binary_redis_key() Redix.noreply_command(@redix_instance_name, ["DEL", key]) end + @impl true def get(config, key) do - key = redis_key(config, key) + key = + config + |> redis_key(key) + |> to_binary_redis_key() case Redix.command(@redix_instance_name, ["GET", key]) do {:ok, nil} -> :not_found @@ -50,23 +69,94 @@ defmodule MyAppWeb.PowRedisCache do end end - def keys(config) do - namespace = redis_key(config, "") - length = String.length(namespace) + @impl true + def all(config, match_spec) do + compiled_match_spec = :ets.match_spec_compile([{match_spec, [], [:"$_"]}]) + + Stream.resource( + fn -> do_scan(config, compiled_match_spec, "0") end, + &stream_scan(config, compiled_match_spec, &1), + fn _ -> :ok end) + |> Enum.to_list() + |> case do + [] -> [] + keys -> fetch_values_for_keys(keys, config) + end + end + + defp fetch_values_for_keys(keys, config) do + binary_keys = Enum.map(keys, &binary_redis_key(config, &1)) + + case Redix.command(@redix_instance_name, ["MGET"] ++ binary_keys) do + {:ok, values} -> + values = Enum.map(values, &:erlang.binary_to_term/1) + + keys + |> Enum.zip(values) + |> Enum.reject(fn {_key, value} -> is_nil(value) end) + end + end + + defp stream_scan(_config, _compiled_match_spec, {[], "0"}), do: {:halt, nil} + defp stream_scan(config, compiled_match_spec, {[], iterator}), do: do_scan(config, compiled_match_spec, iterator) + defp stream_scan(_config, _compiled_match_spec, {keys, iterator}), do: {keys, {[], iterator}} + + defp do_scan(config, compiled_match_spec, iterator) do + prefix = to_binary_redis_key([namespace(config)]) <> ":*" - {:ok, values} = Redix.command(@redix_instance_name, ["KEYS", "#{namespace}*"]) + case Redix.command(@redix_instance_name, ["SCAN", iterator, "MATCH", prefix]) do + {:ok, [iterator, res]} -> {filter_or_load_value(compiled_match_spec, res), iterator} + end + end - Enum.map(values, &String.slice(&1, length..-1)) + defp filter_or_load_value(compiled_match_spec, keys) do + keys + |> Enum.map(&convert_key/1) + |> Enum.sort() + |> :ets.match_spec_run(compiled_match_spec) + end + + defp convert_key(key) do + key + |> from_binary_redis_key() + |> unwrap() + end + + defp unwrap([_namespace, key]), do: key + defp unwrap([_namespace | key]), do: key + + defp binary_redis_key(config, key) do + config + |> redis_key(key) + |> to_binary_redis_key() end defp redis_key(config, key) do - namespace = Config.get(config, :namespace, "cache") + [namespace(config)] ++ List.wrap(key) + end + + defp namespace(config), do: Config.get(config, :namespace, "cach") + + defp to_binary_redis_key(key) do + key + |> Enum.map(&:erlang.term_to_binary/1) + |> Enum.join(":") + end - "#{namespace}:#{key}" + defp from_binary_redis_key(key) do + key + |> String.split(":") + |> Enum.map(&:erlang.binary_to_term/1) end + + @spec raise_ttl_error :: no_return + defp raise_ttl_error, + do: Config.raise_error("`:ttl` configuration option is required for #{inspect(__MODULE__)}") end ``` +We are converting keys to binary keys since we can't directly use the Erlang terms as with ETS and Mnesia. + We'll need to start the Redix application on our app startup, so in `application.ex` add `{Redix, name: :redix}` to your supervision tree: ```elixir @@ -107,10 +197,17 @@ defmodule MyAppWeb.PowRedisCacheTest do @default_config [namespace: "test", ttl: :timer.hours(1)] + setup do + start_supervised!({Redix, host: "localhost", port: 6379, name: :redix}) + Redix.command!(:redix, ["FLUSHALL"]) + + :ok + end + test "can put, get and delete records" do assert PowRedisCache.get(@default_config, "key") == :not_found - PowRedisCache.put(@default_config, "key", "value") + PowRedisCache.put(@default_config, {"key", "value"}) :timer.sleep(100) assert PowRedisCache.get(@default_config, "key") == "value" @@ -119,22 +216,39 @@ defmodule MyAppWeb.PowRedisCacheTest do assert PowRedisCache.get(@default_config, "key") == :not_found end - test "fetch keys" do - PowRedisCache.put(@default_config, "key1", "value") - PowRedisCache.put(@default_config, "key2", "value") + test "can put multiple records at once" do + PowRedisCache.put(@default_config, [{"key1", "1"}, {"key2", "2"}]) + :timer.sleep(100) + assert PowRedisCache.get(@default_config, "key1") == "1" + assert PowRedisCache.get(@default_config, "key2") == "2" + end + + test "can match fetch all" do + PowRedisCache.put(@default_config, {"key1", "value"}) + PowRedisCache.put(@default_config, {"key2", "value"}) :timer.sleep(100) - assert Enum.sort(PowRedisCache.keys(@default_config)) == ["key1", "key2"] + assert PowRedisCache.all(@default_config, :_) == [{"key1", "value"}, {"key2", "value"}] + + PowRedisCache.put(@default_config, {["namespace", "key"], "value"}) + :timer.sleep(100) + + assert PowRedisCache.all(@default_config, ["namespace", :_]) == [{["namespace", "key"], "value"}] end test "records auto purge" do config = Keyword.put(@default_config, :ttl, 100) - PowRedisCache.put(config, "key", "value") + PowRedisCache.put(config, {"key", "value"}) + PowRedisCache.put(config, [{"key1", "1"}, {"key2", "2"}]) :timer.sleep(50) assert PowRedisCache.get(config, "key") == "value" + assert PowRedisCache.get(config, "key1") == "1" + assert PowRedisCache.get(config, "key2") == "2" :timer.sleep(100) assert PowRedisCache.get(config, "key") == :not_found + assert PowRedisCache.get(config, "key1") == :not_found + assert PowRedisCache.get(config, "key2") == :not_found end end -``` \ No newline at end of file +``` diff --git a/lib/pow/store/backend/base.ex b/lib/pow/store/backend/base.ex index b11d28c4..0afffd62 100644 --- a/lib/pow/store/backend/base.ex +++ b/lib/pow/store/backend/base.ex @@ -12,8 +12,12 @@ defmodule Pow.Store.Backend.Base do """ alias Pow.Config - @callback put(Config.t(), binary(), any()) :: :ok - @callback delete(Config.t(), binary()) :: :ok - @callback get(Config.t(), binary()) :: any() | :not_found - @callback keys(Config.t()) :: [any()] + @type key() :: [binary() | atom()] | binary() + @type record() :: {key(), any()} + @type key_match() :: [atom() | binary()] + + @callback put(Config.t(), record() | [record()]) :: :ok + @callback delete(Config.t(), key()) :: :ok + @callback get(Config.t(), key()) :: any() | :not_found + @callback all(Config.t(), key_match()) :: [record()] end diff --git a/lib/pow/store/backend/ets_cache.ex b/lib/pow/store/backend/ets_cache.ex index 759ed64f..e3c2ac32 100644 --- a/lib/pow/store/backend/ets_cache.ex +++ b/lib/pow/store/backend/ets_cache.ex @@ -11,8 +11,7 @@ defmodule Pow.Store.Backend.EtsCache do * `:ttl` - integer value in milliseconds for ttl of records. If this value is not provided, or is set to nil, the records will never expire. - * `:namespace` - string value to use for namespacing keys. Defaults to - "cache". + * `:namespace` - value to use for namespacing keys. Defaults to "cache". """ use GenServer alias Pow.{Config, Store.Backend.Base} @@ -26,27 +25,23 @@ defmodule Pow.Store.Backend.EtsCache do end @impl Base - @spec put(Config.t(), binary(), any()) :: :ok - def put(config, key, value) do - GenServer.cast(__MODULE__, {:cache, config, key, value}) + def put(config, record_or_records) do + GenServer.cast(__MODULE__, {:cache, config, record_or_records}) end @impl Base - @spec delete(Config.t(), binary()) :: :ok def delete(config, key) do GenServer.cast(__MODULE__, {:delete, config, key}) end @impl Base - @spec get(Config.t(), binary()) :: any() | :not_found def get(config, key) do - table_get(config, key) + table_get(key, config) end @impl Base - @spec keys(Config.t()) :: [any()] - def keys(config) do - table_keys(config) + def all(config, match) do + table_all(match, config) end # Callbacks @@ -60,91 +55,111 @@ defmodule Pow.Store.Backend.EtsCache do end @impl GenServer - @spec handle_cast({:cache, Config.t(), binary(), any()}, map()) :: {:noreply, map()} - def handle_cast({:cache, config, key, value}, %{invalidators: invalidators} = state) do - invalidators = update_invalidators(config, invalidators, key) - table_update(config, key, value) + @spec handle_cast({:cache, Config.t(), Base.record() | [Base.record()]}, map()) :: {:noreply, map()} + def handle_cast({:cache, config, record_or_records}, %{invalidators: invalidators} = state) do + invalidators = + record_or_records + |> table_insert(config) + |> Enum.reduce(invalidators, &append_invalidator(elem(&1, 0), &2, config)) {:noreply, %{state | invalidators: invalidators}} end - @spec handle_cast({:delete, Config.t(), binary()}, map()) :: {:noreply, map()} + @spec handle_cast({:delete, Config.t(), Base.key()}, map()) :: {:noreply, map()} def handle_cast({:delete, config, key}, %{invalidators: invalidators} = state) do - invalidators = clear_invalidator(invalidators, key) - table_delete(config, key) + invalidators = + key + |> table_delete(config) + |> clear_invalidator(invalidators) {:noreply, %{state | invalidators: invalidators}} end @impl GenServer - @spec handle_info({:invalidate, Config.t(), binary()}, map()) :: {:noreply, map()} + @spec handle_info({:invalidate, Config.t(), Base.key()}, map()) :: {:noreply, map()} def handle_info({:invalidate, config, key}, %{invalidators: invalidators} = state) do - invalidators = clear_invalidator(invalidators, key) - - table_delete(config, key) + invalidators = + key + |> table_delete(config) + |> clear_invalidator(invalidators) {:noreply, %{state | invalidators: invalidators}} end - defp update_invalidators(config, invalidators, key) do - case Config.get(config, :ttl) do - nil -> - invalidators - - ttl -> - invalidators = clear_invalidator(invalidators, key) - invalidator = Process.send_after(self(), {:invalidate, config, key}, ttl) + defp table_get(key, config) do + ets_key = ets_key(config, key) - Map.put(invalidators, key, invalidator) + @ets_cache_tab + |> :ets.lookup(ets_key) + |> case do + [{^ets_key, value} | _rest] -> value + [] -> :not_found end end - defp clear_invalidator(invalidators, key) do - case Map.get(invalidators, key) do - nil -> nil - invalidator -> Process.cancel_timer(invalidator) - end + defp table_all(key_match, config) do + ets_key_match = ets_key(config, key_match) - Map.drop(invalidators, [key]) + @ets_cache_tab + |> :ets.select([{{ets_key_match, :_}, [], [:"$_"]}]) + |> Enum.map(fn {keys, value} -> {unwrap(keys), value} end) end - defp table_get(config, key) do - ets_key = ets_key(config, key) + defp unwrap([_namespace, key]), do: key + defp unwrap([_namespace | key]), do: key - @ets_cache_tab - |> :ets.lookup(ets_key) - |> case do - [{^ets_key, value} | _rest] -> value - [] -> :not_found - end + defp table_insert(record_or_records, config) do + records = List.wrap(record_or_records) + ets_records = Enum.map(records, fn {key, value} -> + {ets_key(config, key), value} + end) + + :ets.insert(@ets_cache_tab, ets_records) + + records end - defp table_update(config, key, value), - do: :ets.insert(@ets_cache_tab, {ets_key(config, key), value}) + defp table_delete(key, config) do + ets_key = ets_key(config, key) + + :ets.delete(@ets_cache_tab, ets_key) - defp table_delete(config, key), do: :ets.delete(@ets_cache_tab, ets_key(config, key)) + key + end defp init_table do - :ets.new(@ets_cache_tab, [:set, :protected, :named_table]) + :ets.new(@ets_cache_tab, [:ordered_set, :protected, :named_table]) + end + + defp ets_key(config, key) do + Enum.concat([namespace(config)], List.wrap(key)) end - defp table_keys(config) do - namespace = ets_key(config, "") - length = String.length(namespace) + defp namespace(config), do: Config.get(config, :namespace, "cache") - Stream.resource( - fn -> :ets.first(@ets_cache_tab) end, - fn :"$end_of_table" -> {:halt, nil} - previous_key -> {[previous_key], :ets.next(@ets_cache_tab, previous_key)} end, - fn _ -> :ok - end) - |> Enum.filter(&String.starts_with?(&1, namespace)) - |> Enum.map(&String.slice(&1, length..-1)) + defp append_invalidator(key, invalidators, config) do + case Config.get(config, :ttl) do + nil -> + invalidators + + ttl -> + invalidators = clear_invalidator(key, invalidators) + invalidator = trigger_ttl(key, ttl, config) + + Map.put(invalidators, key, invalidator) + end end - defp ets_key(config, key) do - namespace = Config.get(config, :namespace, "cache") + defp trigger_ttl(key, ttl, config) do + Process.send_after(self(), {:invalidate, config, key}, ttl) + end + + defp clear_invalidator(key, invalidators) do + case Map.get(invalidators, key) do + nil -> nil + invalidator -> Process.cancel_timer(invalidator) + end - "#{namespace}:#{key}" + Map.delete(invalidators, key) end end diff --git a/lib/pow/store/backend/mnesia_cache.ex b/lib/pow/store/backend/mnesia_cache.ex index 278f2978..b0be821b 100644 --- a/lib/pow/store/backend/mnesia_cache.ex +++ b/lib/pow/store/backend/mnesia_cache.ex @@ -102,27 +102,25 @@ defmodule Pow.Store.Backend.MnesiaCache do end @impl Base - @spec put(Config.t(), binary(), any()) :: :ok - def put(config, key, value) do - GenServer.cast(__MODULE__, {:cache, config, key, value, ttl(config)}) + def put(config, record_or_records) do + ttl = ttl!(config) + + GenServer.cast(__MODULE__, {:cache, config, record_or_records, ttl}) end @impl Base - @spec delete(Config.t(), binary()) :: :ok def delete(config, key) do GenServer.cast(__MODULE__, {:delete, config, key}) end @impl Base - @spec get(Config.t(), binary()) :: any() | :not_found def get(config, key) do - table_get(config, key) + table_get(key, config) end @impl Base - @spec keys(Config.t()) :: [any()] - def keys(config) do - table_keys(config) + def all(config, match) do + table_all(match, config) end # Callbacks @@ -136,62 +134,74 @@ defmodule Pow.Store.Backend.MnesiaCache do end @impl GenServer - @spec handle_cast({:cache, Config.t(), binary(), any(), integer()}, map()) :: {:noreply, map()} - def handle_cast({:cache, config, key, value, ttl}, %{invalidators: invalidators} = state) do - table_update(config, key, value, ttl) + @spec handle_cast({:cache, Config.t(), Base.record() | [Base.record()], integer()}, map()) :: {:noreply, map()} + def handle_cast({:cache, config, record_or_records, ttl}, %{invalidators: invalidators} = state) do + invalidators = + record_or_records + |> table_insert(ttl, config) + |> Enum.reduce(invalidators, fn {key, _}, invalidators -> + append_invalidator(key, invalidators, ttl, config) + end) - invalidators = update_invalidators(config, invalidators, key, ttl) refresh_invalidators_in_cluster(config) {:noreply, %{state | invalidators: invalidators}} end - @spec handle_cast({:delete, Config.t(), binary()}, map()) :: {:noreply, map()} + @spec handle_cast({:delete, Config.t(), Base.key() | [Base.key()]}, map()) :: {:noreply, map()} def handle_cast({:delete, config, key}, %{invalidators: invalidators} = state) do - invalidators = clear_invalidator(invalidators, key) - table_delete(config, key) + invalidators = + key + |> table_delete(config) + |> clear_invalidator(invalidators) {:noreply, %{state | invalidators: invalidators}} end @spec handle_cast({:refresh_invalidators, Config.t()}, map()) :: {:noreply, map()} def handle_cast({:refresh_invalidators, config}, %{invalidators: invalidators} = state) do - clear_invalidators(invalidators) - - {:noreply, %{state | invalidators: init_invalidators(config)}} + {:noreply, %{state | invalidators: init_invalidators(config, invalidators)}} end @impl GenServer - @spec handle_info({:invalidate, Config.t(), binary()}, map()) :: {:noreply, map()} + @spec handle_info({:invalidate, Config.t(), [Base.key()]}, map()) :: {:noreply, map()} def handle_info({:invalidate, config, key}, %{invalidators: invalidators} = state) do - invalidators = clear_invalidator(invalidators, key) - invalidators = - config - |> fetch(key) - |> delete_or_reschedule(config, invalidators) + invalidators = delete_or_reschedule(key, invalidators, config) {:noreply, %{state | invalidators: invalidators}} end - defp delete_or_reschedule(nil, _config, invalidators), do: invalidators - defp delete_or_reschedule({key, _value, key_config, expire}, config, invalidators) do - case Enum.max([expire - timestamp(), 0]) do - 0 -> - table_delete(config, key) - + defp delete_or_reschedule(key, invalidators, config) do + config + |> fetch(key) + |> case do + nil -> invalidators - ttl -> - update_invalidators(key_config, invalidators, key, ttl) + + {_value, expire} -> + case Enum.max([expire - timestamp(), 0]) do + 0 -> + key + |> table_delete(config) + |> clear_invalidator(invalidators) + + ttl -> + append_invalidator(key, invalidators, ttl, config) + end end end - defp update_invalidators(config, invalidators, key, ttl) do - invalidators = clear_invalidator(invalidators, key) - invalidator = trigger_ttl(config, key, ttl) + defp append_invalidator(key, invalidators, ttl, config) do + invalidators = clear_invalidator(key, invalidators) + invalidator = trigger_ttl(key, ttl, config) Map.put(invalidators, key, invalidator) end + defp trigger_ttl(key, ttl, config) do + Process.send_after(self(), {:invalidate, config, key}, ttl) + end + defp refresh_invalidators_in_cluster(config) do :running_db_nodes |> :mnesia.system_info() @@ -199,27 +209,21 @@ defmodule Pow.Store.Backend.MnesiaCache do |> Enum.each(&:rpc.call(&1, GenServer, :cast, [__MODULE__, {:refresh_invalidators, config}])) end - defp clear_invalidators(invalidators) do - Enum.reduce(invalidators, invalidators, fn {key, _ref}, invalidators -> - clear_invalidator(invalidators, key) - end) - end - - defp clear_invalidator(invalidators, key) do + defp clear_invalidator(key, invalidators) do case Map.get(invalidators, key) do nil -> nil invalidator -> Process.cancel_timer(invalidator) end - Map.drop(invalidators, [key]) + Map.delete(invalidators, key) end - defp table_get(config, key) do + defp table_get(key, config) do config |> fetch(key) |> case do - {_key, value, _config, _expire} -> value - nil -> :not_found + {value, _expire} -> value + nil -> :not_found end end @@ -229,54 +233,50 @@ defmodule Pow.Store.Backend.MnesiaCache do {@mnesia_cache_tab, mnesia_key} |> :mnesia.dirty_read() |> case do - [{@mnesia_cache_tab, ^mnesia_key, {_key, value, config, expire}} | _rest] -> {key, value, config, expire} - [] -> nil + [{@mnesia_cache_tab, ^mnesia_key, value} | _rest] -> value + [] -> nil end end - defp table_update(config, key, value, ttl) do - mnesia_key = mnesia_key(config, key) - expire = timestamp() + ttl - value = {key, value, config, expire} + defp table_all(key_match, config) do + mnesia_key_match = mnesia_key(config, key_match) - :mnesia.sync_transaction(fn -> - :mnesia.write({@mnesia_cache_tab, mnesia_key, value}) - end) + @mnesia_cache_tab + |> :mnesia.dirty_select([{{@mnesia_cache_tab, mnesia_key_match, :_}, [], [:"$_"]}]) + |> Enum.map(fn {@mnesia_cache_tab, key, {value, _expire}} -> {unwrap(key), value} end) end - defp table_delete(config, key) do - mnesia_key = mnesia_key(config, key) + defp unwrap([_namespace, key]), do: key + defp unwrap([_namespace | key]), do: key - :mnesia.sync_transaction(fn -> - :mnesia.delete({@mnesia_cache_tab, mnesia_key}) - end) - end + defp table_insert(record_or_records, ttl, config) do + expire = timestamp() + ttl + records = List.wrap(record_or_records) + + {:atomic, _result} = + :mnesia.sync_transaction(fn -> + Enum.map(records, fn {key, value} -> + mnesia_key = mnesia_key(config, key) + value = {value, expire} - defp table_keys(config, opts \\ []) do - namespace = mnesia_key(config, "") + :mnesia.write({@mnesia_cache_tab, mnesia_key, value}) + end) + end) - sync_all_keys() - |> Enum.filter(&String.starts_with?(&1, namespace)) - |> maybe_remove_namespace(namespace, opts) + records end - defp sync_all_keys do - {:atomic, keys} = :mnesia.sync_transaction(fn -> - :mnesia.all_keys(@mnesia_cache_tab) - end) + defp table_delete(key, config) do + {:atomic, key} = + :mnesia.sync_transaction(fn -> + mnesia_key = mnesia_key(config, key) - keys - end + :mnesia.delete({@mnesia_cache_tab, mnesia_key}) - defp maybe_remove_namespace(keys, namespace, opts) do - case Keyword.get(opts, :remove_namespace, true) do - true -> - start = String.length(namespace) - Enum.map(keys, &String.slice(&1, start..-1)) + key + end) - _ -> - keys - end + key end defp init_mnesia(config) do @@ -370,7 +370,7 @@ defmodule Pow.Store.Backend.MnesiaCache do defp create_table(config) do table_opts = Config.get(config, :table_opts, [disc_copies: [node()]]) - table_def = Keyword.merge(table_opts, [type: :set]) + table_def = Keyword.merge(table_opts, [type: :ordered_set]) case :mnesia.create_table(@mnesia_cache_tab, table_def) do {:atomic, :ok} -> :ok @@ -402,42 +402,51 @@ defmodule Pow.Store.Backend.MnesiaCache do end defp mnesia_key(config, key) do - namespace = Config.get(config, :namespace, "cache") - - "#{namespace}:#{key}" + Enum.concat([namespace(config)], List.wrap(key)) end - defp init_invalidators(config) do - config - |> table_keys(remove_namespace: false) - |> Enum.map(&init_invalidator(config, &1)) - |> Enum.reject(&is_nil/1) - |> Enum.into(%{}) - end + defp namespace(config), do: Config.get(config, :namespace, "cache") - defp init_invalidator(_config, key) do - {@mnesia_cache_tab, key} - |> :mnesia.dirty_read() - |> case do - [{@mnesia_cache_tab, ^key, {_key_id, _value, _config, nil}} | _rest] -> - nil + defp init_invalidators(config, existing_invalidators \\ %{}) do + clear_all_invalidators(existing_invalidators) - [{@mnesia_cache_tab, ^key, {key_id, _value, config, expire}} | _rest] -> - ttl = Enum.max([expire - timestamp(), 0]) + {:atomic, invalidators} = + :mnesia.sync_transaction(fn -> + :mnesia.foldl(fn + {@mnesia_cache_tab, key, {_value, expire}}, invalidators when is_list(key) -> + ttl = Enum.max([expire - timestamp(), 0]) - {key, trigger_ttl(config, key_id, ttl)} + key + |> unwrap() + |> append_invalidator(invalidators, ttl, config) - [] -> nil - end + # TODO: Remove by 1.1.0 + {@mnesia_cache_tab, key, {_value, expire}}, invalidators when is_binary(key) and is_number(expire) -> + :mnesia.delete({@mnesia_cache_tab, key}) + + invalidators + + _any, invalidators -> + invalidators + end, + %{}, + @mnesia_cache_tab) + end) + + invalidators end - defp trigger_ttl(config, key, ttl) do - Process.send_after(self(), {:invalidate, config, key}, ttl) + defp clear_all_invalidators(invalidators) do + invalidators + |> Map.keys() + |> Enum.reduce(invalidators, fn key, invalidators -> + clear_invalidator(key, invalidators) + end) end defp timestamp, do: :os.system_time(:millisecond) - defp ttl(config) do + defp ttl!(config) do Config.get(config, :ttl) || raise_ttl_error() end diff --git a/lib/pow/store/base.ex b/lib/pow/store/base.ex index a7e37ebf..1305df82 100644 --- a/lib/pow/store/base.ex +++ b/lib/pow/store/base.ex @@ -11,40 +11,42 @@ defmodule Pow.Store.Base do @impl true def put(config, backend_config, key, value) do - Pow.Store.Base.put(config, backend_config, key, value) + Pow.Store.Base.put(config, backend_config, {key, value}) end end """ - alias Pow.{Config, Store.Backend.EtsCache} + alias Pow.Config + alias Pow.Store.Backend.{EtsCache, MnesiaCache, Base} - @callback put(Config.t(), Config.t(), binary(), any()) :: :ok - @callback delete(Config.t(), Config.t(), binary()) :: :ok - @callback get(Config.t(), Config.t(), binary()) :: any() | :not_found - @callback keys(Config.t(), Config.t()) :: [any()] + @type key :: Base.key() + @type record :: Base.record() + @type key_match :: Base.key_match() + + @callback put(Config.t(), Config.t(), key(), any()) :: :ok + @callback delete(Config.t(), Config.t(), key()) :: :ok + @callback get(Config.t(), Config.t(), key()) :: any() | :not_found + @callback all(Config.t(), Config.t(), key_match()) :: [record()] @doc false defmacro __using__(defaults) do quote do @behaviour unquote(__MODULE__) - # TODO: Remove by 1.1.0 - @behaviour Pow.Store.Backend.Base - - @spec put(Config.t(), binary(), any()) :: :ok + @spec put(Config.t(), unquote(__MODULE__).key(), any()) :: :ok def put(config, key, value), do: put(config, backend_config(config), key, value) - @spec delete(Config.t(), binary()) :: :ok + @spec delete(Config.t(), unquote(__MODULE__).key()) :: :ok def delete(config, key), do: delete(config, backend_config(config), key) - @spec get(Config.t(), binary()) :: any() | :not_found + @spec get(Config.t(), unquote(__MODULE__).key()) :: any() | :not_found def get(config, key), do: get(config, backend_config(config), key) - @spec keys(Config.t()) :: [any()] - def keys(config), - do: keys(config, backend_config(config)) + @spec all(Config.t(), unquote(__MODULE__).key_match()) :: [unquote(__MODULE__).record()] + def all(config, key_match), + do: all(config, backend_config(config), key_match) defp backend_config(config) do [ @@ -53,43 +55,129 @@ defmodule Pow.Store.Base do ] end - defdelegate put(config, backend_config, key, value), to: unquote(__MODULE__) + @impl unquote(__MODULE__) + def put(config, backend_config, key, value), do: unquote(__MODULE__).put(config, backend_config, {key, value}) + defdelegate delete(config, backend_config, key), to: unquote(__MODULE__) defdelegate get(config, backend_config, key), to: unquote(__MODULE__) - defdelegate keys(config, backend_config), to: unquote(__MODULE__) + defdelegate all(config, backend_config, key_match), to: unquote(__MODULE__) defoverridable unquote(__MODULE__) # TODO: Remove by 1.1.0 - defoverridable Pow.Store.Backend.Base + defoverridable put: 3, delete: 2, get: 2, all: 2 end end - @doc false - @spec put(Config.t(), Config.t(), binary(), any()) :: :ok - def put(config, backend_config, key, value) do - store(config).put(backend_config, key, value) + @spec put(Config.t(), Config.t(), record() | [record()]) :: :ok + def put(config, backend_config, record_or_records) do + # TODO: Update by 1.1.0 + backwards_compatible_call(store(config), :put, [backend_config, record_or_records]) end @doc false - @spec delete(Config.t(), Config.t(), binary()) :: :ok + @spec delete(Config.t(), Config.t(), key()) :: :ok def delete(config, backend_config, key) do - store(config).delete(backend_config, key) + # TODO: Update by 1.1.0 + backwards_compatible_call(store(config), :delete, [backend_config, key]) end @doc false - @spec get(Config.t(), Config.t(), binary()) :: any() | :not_found + @spec get(Config.t(), Config.t(), key()) :: any() | :not_found def get(config, backend_config, key) do - store(config).get(backend_config, key) + # TODO: Update by 1.1.0 + backwards_compatible_call(store(config), :get, [backend_config, key]) end @doc false - @spec keys(Config.t(), Config.t()) :: [any()] - def keys(config, backend_config) do - store(config).keys(backend_config) + @spec all(Config.t(), Config.t(), key_match()) :: [record()] + def all(config, backend_config, key_match) do + # TODO: Update by 1.1.0 + backwards_compatible_call(store(config), :all, [backend_config, key_match]) end defp store(config) do Config.get(config, :backend, EtsCache) end + + # TODO: Remove by 1.1.0 + defp backwards_compatible_call(store, method, args) do + store + |> has_binary_keys?() + |> case do + false -> + apply(store, method, args) + + true -> + IO.warn("binary key for backend stores is depecated, update `#{store}` to accept erlang terms instead") + + case method do + :put -> binary_key_put(store, args) + :get -> binary_key_get(store, args) + :delete -> binary_key_delete(store, args) + :all -> binary_key_all(store, args) + end + end + end + + # TODO: Remove by 1.1.0 + defp has_binary_keys?(store) when store in [EtsCache, MnesiaCache], do: false + defp has_binary_keys?(store), do: function_exported?(store, :keys, 1) + + # TODO: Remove by 1.1.0 + defp binary_key_put(store, [backend_config, record_or_records]) do + record_or_records + |> List.wrap() + |> Enum.each(fn {key, value} -> + key = binary_key(key) + + store.put(backend_config, key, value) + end) + end + + # TODO: Remove by 1.1.0 + defp binary_key_get(store, [backend_config, key]) do + key = binary_key(key) + + store.get(backend_config, key) + end + + # TODO: Remove by 1.1.0 + defp binary_key_delete(store, [backend_config, key]) do + key = binary_key(key) + + store.delete(backend_config, key) + end + + # TODO: Remove by 1.1.0 + defp binary_key_all(store, [backend_config, match_spec]) do + match_spec = :ets.match_spec_compile([{match_spec, [], [:"$_"]}]) + + backend_config + |> store.keys() + |> Enum.map(&:erlang.binary_to_term/1) + |> :ets.match_spec_run(match_spec) + |> Enum.map(&{&1, binary_key_get(store, [backend_config, &1])}) + end + + # TODO: Remove by 1.1.0 + defp binary_key(key) do + key + |> List.wrap() + |> :erlang.term_to_binary() + end + + # TODO: Remove by 1.1.0 + @doc false + @deprecated "Use `put/3` instead" + def put(config, backend_config, key, value) do + put(config, backend_config, {key, value}) + end + + # TODO: Remove by 1.1.0 + @doc false + @deprecated "Use `all/2` instead" + def keys(config, backend_config) do + store(config).keys(backend_config) + end end diff --git a/lib/pow/store/credentials_cache.ex b/lib/pow/store/credentials_cache.ex index 2b601e9c..cf551527 100644 --- a/lib/pow/store/credentials_cache.ex +++ b/lib/pow/store/credentials_cache.ex @@ -21,17 +21,17 @@ defmodule Pow.Store.CredentialsCache do namespace: "credentials" @doc """ - List all user session keys stored for a certain user struct. + List all user for a certain user struct. - Each user session key can be used to look up all sessions for that user. + Sessions for a user can be looked up with `sessions/3`. """ - @spec user_session_keys(Config.t(), Config.t(), module()) :: [any()] - def user_session_keys(config, backend_config, struct) do - namespace = "#{Macro.underscore(struct)}_sessions_" - + @spec users(Config.t(), Config.t(), module()) :: [any()] + def users(config, backend_config, struct) do config - |> Base.keys(backend_config) - |> Enum.filter(&String.starts_with?(&1, namespace)) + |> Base.all(backend_config, [struct, :user, :_]) + |> Enum.map(fn {[^struct, :user, _id], user} -> + user + end) end @doc """ @@ -39,32 +39,43 @@ defmodule Pow.Store.CredentialsCache do """ @spec sessions(Config.t(), Config.t(), map()) :: [binary()] def sessions(config, backend_config, user) do - case Base.get(config, backend_config, user_session_list_key(user)) do - :not_found -> [] - %{sessions: sessions} -> sessions - end + {struct, id} = user_to_struct_id(user) + + config + |> Base.all(backend_config, [struct, :user, id, :session, :_]) + |> Enum.map(fn {[^struct, :user, ^id, :session, session_id], _value} -> + session_id + end) end @doc """ Add user credentials with the session id to the backend store. - This will either create or update the current user credentials in the - backend store. The session id will be appended to the session list for the - user credentials. + This will create these three elements: + + - `{session_id, {[user_struct, :user, user_id], metadata}}` + - `{[user_struct, :user, user_id], user}` + - `{[user_struct, :user, user_id, :session, session_id], inserted_at}` The credentials are expected to be in the format of `{credentials, metadata}`. """ @impl true - @spec put(Config.t(), Config.t(), binary(), {map(), list()}) :: :ok def put(config, backend_config, session_id, {user, metadata}) do - key = append_to_session_list(config, backend_config, session_id, user) - - Base.put(config, backend_config, session_id, {key, metadata}) + {struct, id} = user_to_struct_id(user) + user_key = [struct, :user, id] + session_key = [struct, :user, id, :session, session_id] + records = [ + {session_id, {user_key, metadata}}, + {user_key, user}, + {session_key, :os.system_time(:millisecond)} + ] + + Base.put(config, backend_config, records) end @doc """ - Delete the sesison id from the backend store. + Delete the session id from the backend store. This will delete the session id from the session list for the user credentials in the backend store. If the session id is the only one in the @@ -72,15 +83,20 @@ defmodule Pow.Store.CredentialsCache do store. """ @impl true - @spec delete(Config.t(), Config.t(), binary()) :: :ok def delete(config, backend_config, session_id) do - with {key, _metadata} when is_binary(key) <- Base.get(config, backend_config, session_id), - :ok <- delete_from_session_list(config, backend_config, session_id, key) do - Base.delete(config, backend_config, session_id) - else + case Base.get(config, backend_config, session_id) do + {[struct, :user, key_id], _metadata} -> + session_key = [struct, :user, key_id, :session, session_id] + + Base.delete(config, backend_config, session_id) + Base.delete(config, backend_config, session_key) + # TODO: Remove by 1.1.0 - {user, _metadata} when is_map(user) -> Base.delete(config, backend_config, session_id) - :not_found -> :ok + {user, _metadata} when is_map(user) -> + Base.delete(config, backend_config, session_id) + + :not_found -> + :ok end end @@ -90,8 +106,8 @@ defmodule Pow.Store.CredentialsCache do @impl true @spec get(Config.t(), Config.t(), binary()) :: {map(), list()} | :not_found def get(config, backend_config, session_id) do - with {key, metadata} when is_binary(key) <- Base.get(config, backend_config, session_id), - %{user: user} <- Base.get(config, backend_config, key) do + with {user_key, metadata} when is_list(user_key) <- Base.get(config, backend_config, session_id), + user when is_map(user) <- Base.get(config, backend_config, user_key) do {user, metadata} else # TODO: Remove by 1.1.0 @@ -100,72 +116,43 @@ defmodule Pow.Store.CredentialsCache do end end - defp append_to_session_list(config, backend_config, session_id, user) do - new_list = - config - |> sessions(backend_config, user) - |> Enum.reject(&get(config, backend_config, &1) == :not_found) - |> Enum.concat([session_id]) - |> Enum.uniq() - - update_session_list(config, backend_config, user, new_list) - end - - defp delete_from_session_list(config, backend_config, session_id, key) do - %{user: user} = Base.get(config, backend_config, key) - - config - |> sessions(backend_config, user) - |> Enum.filter(&(&1 != session_id)) - |> case do - [] -> - Base.delete(config, backend_config, key) - - new_list -> - update_session_list(config, backend_config, user, new_list) - - :ok + defp user_to_struct_id(%struct{} = user) do + key_value = case function_exported?(struct, :__schema__, 1) do + true -> key_value_from_primary_keys(user) + false -> primary_keys_to_keyword_list!([:id], user) end - end - - defp update_session_list(config, backend_config, user, list) do - key = user_session_list_key(user) - - Base.put(config, backend_config, key, %{user: user, sessions: list}) - key + {struct, key_value} end - - defp user_session_list_key(%struct{} = user) do - key_value = - case function_exported?(struct, :__schema__, 1) do - true -> key_value_from_primary_keys(user) - false -> primary_keys_to_binary!([:id], user) - end - - "#{Macro.underscore(struct)}_sessions_#{key_value}" - end - defp user_session_list_key(_user), do: raise "Only structs can be stored as credentials" + defp user_to_struct_id(_user), do: raise "Only structs can be stored as credentials" defp key_value_from_primary_keys(%struct{} = user) do :primary_key |> struct.__schema__() |> Enum.sort() - |> primary_keys_to_binary!(user) + |> primary_keys_to_keyword_list!(user) end - defp primary_keys_to_binary!([], %struct{}), do: raise "No primary keys found for #{inspect struct}" - defp primary_keys_to_binary!([key], user), do: get_primary_key_value!(key, user) - defp primary_keys_to_binary!(keys, user) do - keys - |> Enum.map(&"#{&1}:#{get_primary_key_value!(&1, user)}") - |> Enum.join("_") + defp primary_keys_to_keyword_list!([], %struct{}), do: raise "No primary keys found for #{inspect struct}" + defp primary_keys_to_keyword_list!([key], user), do: get_primary_key_value!(user, key) + defp primary_keys_to_keyword_list!(keys, user) do + Enum.map(keys, &{&1, get_primary_key_value!(user, &1)}) end - defp get_primary_key_value!(key, %struct{} = user) do + defp get_primary_key_value!(%struct{} = user, key) do case Map.get(user, key) do nil -> raise "Primary key value for key `#{inspect key}` in #{inspect struct} can't be `nil`" val -> val end end + + # TODO: Remove by 1.1.0 + @deprecated "Use `users/3` or `sessions/3` instead" + def user_session_keys(config, backend_config, struct) do + config + |> Base.all(backend_config, [struct, :user, :_, :session, :_]) + |> Enum.map(fn {key, _value} -> + key + end) + end end diff --git a/test/extensions/persistent_session/plug/cookie_test.exs b/test/extensions/persistent_session/plug/cookie_test.exs index ac0265e3..988c128a 100644 --- a/test/extensions/persistent_session/plug/cookie_test.exs +++ b/test/extensions/persistent_session/plug/cookie_test.exs @@ -114,7 +114,7 @@ defmodule PowPersistentSession.Plug.CookieTest do config = Keyword.put(config, :persistent_session_ttl, 1000) conn = Cookie.create(conn, %User{id: 1}, config) - assert_received {:ets, :put, _key, _value, config} + assert_received {:ets, :put, [{_key, _value} | _rest], config} assert config[:ttl] == 1000 assert %{max_age: 1, path: "/"} = conn.resp_cookies["persistent_session_cookie"] diff --git a/test/extensions/reset_password/phoenix/controllers/reset_password_controller_test.exs b/test/extensions/reset_password/phoenix/controllers/reset_password_controller_test.exs index 07a52704..f8a05718 100644 --- a/test/extensions/reset_password/phoenix/controllers/reset_password_controller_test.exs +++ b/test/extensions/reset_password/phoenix/controllers/reset_password_controller_test.exs @@ -41,7 +41,7 @@ defmodule PowResetPassword.Phoenix.ResetPasswordControllerTest do test "with valid params", %{conn: conn, ets: ets} do conn = post conn, Routes.pow_reset_password_reset_password_path(conn, :create, @valid_params) - [token] = ResetTokenCache.keys([backend: ets]) + [{token, _}] = ResetTokenCache.all([backend: ets], [:_]) assert_received {:mail_mock, mail} diff --git a/test/pow/phoenix/controllers/registration_controller_test.exs b/test/pow/phoenix/controllers/registration_controller_test.exs index 848f64df..50b2c19f 100644 --- a/test/pow/phoenix/controllers/registration_controller_test.exs +++ b/test/pow/phoenix/controllers/registration_controller_test.exs @@ -187,7 +187,7 @@ defmodule Pow.Phoenix.RegistrationControllerTest do conn = post conn, Routes.pow_registration_path(conn, :create, @valid_params) assert %{id: 1} = Plug.current_user(conn) assert conn.private[:plug_session]["auth"] - assert_receive {:ets, :put, _key, _value, _opts} + assert_receive {:ets, :put, [{_key, _value} | _rest], _opts} conn end diff --git a/test/pow/phoenix/controllers/session_controller_test.exs b/test/pow/phoenix/controllers/session_controller_test.exs index 50513e36..4c5cca9a 100644 --- a/test/pow/phoenix/controllers/session_controller_test.exs +++ b/test/pow/phoenix/controllers/session_controller_test.exs @@ -107,7 +107,7 @@ defmodule Pow.Phoenix.SessionControllerTest do conn = post conn, Routes.pow_session_path(conn, :create, @valid_params) assert %{id: 1} = Plug.current_user(conn) assert conn.private[:plug_session]["auth"] - assert_receive {:ets, :put, _key, _value, _opts} + assert_receive {:ets, :put, [{_key, _value} | _rest], _opts} conn = delete(conn, Routes.pow_session_path(conn, :delete)) assert redirected_to(conn) == "/signed_out" diff --git a/test/pow/plug/session_test.exs b/test/pow/plug/session_test.exs index 66636e31..9afad5d6 100644 --- a/test/pow/plug/session_test.exs +++ b/test/pow/plug/session_test.exs @@ -123,7 +123,7 @@ defmodule Pow.Plug.SessionTest do @store_config |> Keyword.put(:namespace, "credentials") - |> EtsCacheMock.put("token", {@user, stale_timestamp}) + |> EtsCacheMock.put({"token", {@user, stale_timestamp}}) opts = Session.init(config) conn = diff --git a/test/pow/plug_test.exs b/test/pow/plug_test.exs index 80fe703f..92f42692 100644 --- a/test/pow/plug_test.exs +++ b/test/pow/plug_test.exs @@ -100,7 +100,7 @@ defmodule Pow.PlugTest do assert user = Plug.current_user(conn) assert session_id = conn.private[:plug_session]["auth"] assert {key, _metadata} = EtsCacheMock.get([namespace: "credentials"], session_id) - assert %{user: ^user} = EtsCacheMock.get([namespace: "credentials"], key) + assert EtsCacheMock.get([namespace: "credentials"], key) == user {:ok, conn} = Plug.clear_authenticated_user(conn) refute Plug.current_user(conn) diff --git a/test/pow/store/backend/ets_cache_test.exs b/test/pow/store/backend/ets_cache_test.exs index b99cacb4..14131b61 100644 --- a/test/pow/store/backend/ets_cache_test.exs +++ b/test/pow/store/backend/ets_cache_test.exs @@ -9,7 +9,7 @@ defmodule Pow.Store.Backend.EtsCacheTest do test "can put, get and delete records" do assert EtsCache.get(@default_config, "key") == :not_found - EtsCache.put(@default_config, "key", "value") + EtsCache.put(@default_config, {"key", "value"}) :timer.sleep(100) assert EtsCache.get(@default_config, "key") == "value" @@ -18,10 +18,17 @@ defmodule Pow.Store.Backend.EtsCacheTest do assert EtsCache.get(@default_config, "key") == :not_found end + test "can put multiple records at once" do + EtsCache.put(@default_config, [{"key1", "1"}, {"key2", "2"}]) + :timer.sleep(100) + assert EtsCache.get(@default_config, "key1") == "1" + assert EtsCache.get(@default_config, "key2") == "2" + end + test "with no `:ttl` option" do config = [namespace: "pow:test"] - EtsCache.put(config, "key", "value") + EtsCache.put(config, {"key", "value"}) :timer.sleep(100) assert EtsCache.get(config, "key") == "value" @@ -29,21 +36,28 @@ defmodule Pow.Store.Backend.EtsCacheTest do :timer.sleep(100) end - test "fetch keys" do - EtsCache.put(@default_config, "key1", "value") - EtsCache.put(@default_config, "key2", "value") + test "can match fetch all" do + EtsCache.put(@default_config, {"key1", "value"}) + EtsCache.put(@default_config, {"key2", "value"}) + EtsCache.put(@default_config, {["namespace", "key"], "value"}) :timer.sleep(100) - assert Enum.sort(EtsCache.keys(@default_config)) == ["key1", "key2"] + assert EtsCache.all(@default_config, :_) == [{"key1", "value"}, {"key2", "value"}] + assert EtsCache.all(@default_config, ["namespace", :_]) == [{["namespace", "key"], "value"}] end test "records auto purge" do config = Config.put(@default_config, :ttl, 100) - EtsCache.put(config, "key", "value") + EtsCache.put(config, {"key", "value"}) + EtsCache.put(config, [{"key1", "1"}, {"key2", "2"}]) :timer.sleep(50) assert EtsCache.get(config, "key") == "value" + assert EtsCache.get(config, "key1") == "1" + assert EtsCache.get(config, "key2") == "2" :timer.sleep(100) assert EtsCache.get(config, "key") == :not_found + assert EtsCache.get(config, "key1") == :not_found + assert EtsCache.get(config, "key2") == :not_found end end diff --git a/test/pow/store/backend/mnesia_cache_test.exs b/test/pow/store/backend/mnesia_cache_test.exs index 21df99a9..3eda14f6 100644 --- a/test/pow/store/backend/mnesia_cache_test.exs +++ b/test/pow/store/backend/mnesia_cache_test.exs @@ -33,7 +33,7 @@ defmodule Pow.Store.Backend.MnesiaCacheTest do test "can put, get and delete records with persistent storage" do assert MnesiaCache.get(@default_config, "key") == :not_found - MnesiaCache.put(@default_config, "key", "value") + MnesiaCache.put(@default_config, {"key", "value"}) :timer.sleep(100) assert MnesiaCache.get(@default_config, "key") == "value" @@ -46,35 +46,71 @@ defmodule Pow.Store.Backend.MnesiaCacheTest do assert MnesiaCache.get(@default_config, "key") == :not_found end + test "can put multiple records" do + assert MnesiaCache.get(@default_config, "key") == :not_found + + MnesiaCache.put(@default_config, [{"key1", "1"}, {"key2", "2"}]) + :timer.sleep(100) + assert MnesiaCache.get(@default_config, "key1") == "1" + assert MnesiaCache.get(@default_config, "key2") == "2" + + restart(@default_config) + + assert MnesiaCache.get(@default_config, "key1") == "1" + assert MnesiaCache.get(@default_config, "key2") == "2" + end + test "with no `:ttl` config option" do assert_raise ConfigError, "`:ttl` configuration option is required for Pow.Store.Backend.MnesiaCache", fn -> - MnesiaCache.put([namespace: "pow:test"], "key", "value") + MnesiaCache.put([namespace: "pow:test"], {"key", "value"}) end end - test "fetch keys" do - MnesiaCache.put(@default_config, "key1", "value") - MnesiaCache.put(@default_config, "key2", "value") + test "can match fetch all" do + MnesiaCache.put(@default_config, {"key1", "value"}) + MnesiaCache.put(@default_config, {"key2", "value"}) + MnesiaCache.put(@default_config, {["namespace", "key"], "value"}) :timer.sleep(100) - assert MnesiaCache.keys(@default_config) == ["key1", "key2"] + assert MnesiaCache.all(@default_config, :_) == [{"key1", "value"}, {"key2", "value"}] + assert MnesiaCache.all(@default_config, ["namespace", :_]) == [{["namespace", "key"], "value"}] end test "records auto purge with persistent storage" do config = Config.put(@default_config, :ttl, 100) - MnesiaCache.put(config, "key", "value") + MnesiaCache.put(config, {"key", "value"}) + MnesiaCache.put(config, [{"key1", "1"}, {"key2", "2"}]) :timer.sleep(50) assert MnesiaCache.get(config, "key") == "value" + assert MnesiaCache.get(config, "key1") == "1" + assert MnesiaCache.get(config, "key2") == "2" :timer.sleep(100) assert MnesiaCache.get(config, "key") == :not_found + assert MnesiaCache.get(config, "key1") == :not_found + assert MnesiaCache.get(config, "key2") == :not_found - MnesiaCache.put(config, "key", "value") + # After restart + MnesiaCache.put(config, {"key", "value"}) + MnesiaCache.put(config, [{"key1", "1"}, {"key2", "2"}]) :timer.sleep(50) restart(config) assert MnesiaCache.get(config, "key") == "value" + assert MnesiaCache.get(config, "key1") == "1" + assert MnesiaCache.get(config, "key2") == "2" :timer.sleep(100) assert MnesiaCache.get(config, "key") == :not_found + assert MnesiaCache.get(config, "key1") == :not_found + assert MnesiaCache.get(config, "key2") == :not_found + + # After record expiration updated reschedules + MnesiaCache.put(config, {"key", "value"}) + :timer.sleep(50) + :mnesia.dirty_write({MnesiaCache, ["pow:test", "key"], {"value", :os.system_time(:millisecond) + 150}}) + :timer.sleep(100) + assert MnesiaCache.get(config, "key") == "value" + :timer.sleep(50) + assert MnesiaCache.get(config, "key") == :not_found end end @@ -110,7 +146,7 @@ defmodule Pow.Store.Backend.MnesiaCacheTest do assert :rpc.call(node_a, :mnesia, :table_info, [MnesiaCache, :storage_type]) == :disc_copies assert :rpc.call(node_a, :mnesia, :system_info, [:extra_db_nodes]) == [] assert :rpc.call(node_a, :mnesia, :system_info, [:running_db_nodes]) == [node_a] - assert :rpc.call(node_a, MnesiaCache, :put, [@default_config, "key_set_on_a", "value"]) + assert :rpc.call(node_a, MnesiaCache, :put, [@default_config, {"key_set_on_a", "value"}]) :timer.sleep(50) assert :rpc.call(node_a, MnesiaCache, :get, [@default_config, "key_set_on_a"]) == "value" @@ -124,13 +160,13 @@ defmodule Pow.Store.Backend.MnesiaCacheTest do assert :rpc.call(node_b, MnesiaCache, :get, [@default_config, "key_set_on_a"]) == "value" # Write to node b can be fetched on node a - assert :rpc.call(node_b, MnesiaCache, :put, [@default_config, "key_set_on_b", "value"]) + assert :rpc.call(node_b, MnesiaCache, :put, [@default_config, {"key_set_on_b", "value"}]) :timer.sleep(50) assert :rpc.call(node_a, MnesiaCache, :get, [@default_config, "key_set_on_b"]) == "value" # Set short TTL on node a config = Config.put(@default_config, :ttl, 150) - assert :rpc.call(node_a, MnesiaCache, :put, [config, "short_ttl_key_set_on_a", "value"]) + assert :rpc.call(node_a, MnesiaCache, :put, [config, {"short_ttl_key_set_on_a", "value"}]) :timer.sleep(50) # Stop node a @@ -145,7 +181,7 @@ defmodule Pow.Store.Backend.MnesiaCacheTest do # Continue writing to node b with short TTL config = Config.put(@default_config, :ttl, @startup_wait_time + 100) - assert :rpc.call(node_b, MnesiaCache, :put, [config, "short_ttl_key_2_set_on_b", "value"]) + assert :rpc.call(node_b, MnesiaCache, :put, [config, {"short_ttl_key_2_set_on_b", "value"}]) :timer.sleep(50) assert :rpc.call(node_b, MnesiaCache, :get, [config, "short_ttl_key_2_set_on_b"]) == "value" @@ -188,7 +224,7 @@ defmodule Pow.Store.Backend.MnesiaCacheTest do :ok = :rpc.call(node_b, :mnesia, :dirty_write, [{:node_b_table, :key, "b"}]) # Ensure that data writing on node a is replicated on node b - assert :rpc.call(node_a, MnesiaCache, :put, [@default_config, "key_1", "value"]) + assert :rpc.call(node_a, MnesiaCache, :put, [@default_config, {"key_1", "value"}]) :timer.sleep(50) assert :rpc.call(node_a, MnesiaCache, :get, [@default_config, "key_1"]) == "value" assert :rpc.call(node_b, MnesiaCache, :get, [@default_config, "key_1"]) == "value" @@ -197,10 +233,10 @@ defmodule Pow.Store.Backend.MnesiaCacheTest do disconnect(node_b, node_a) # Continue writing on node a and node b - assert :rpc.call(node_a, MnesiaCache, :put, [@default_config, "key_1", "a"]) - assert :rpc.call(node_a, MnesiaCache, :put, [@default_config, "key_1_a", "value"]) - assert :rpc.call(node_b, MnesiaCache, :put, [@default_config, "key_1", "b"]) - assert :rpc.call(node_b, MnesiaCache, :put, [@default_config, "key_1_b", "value"]) + assert :rpc.call(node_a, MnesiaCache, :put, [@default_config, {"key_1", "a"}]) + assert :rpc.call(node_a, MnesiaCache, :put, [@default_config, {"key_1_a", "value"}]) + assert :rpc.call(node_b, MnesiaCache, :put, [@default_config, {"key_1", "b"}]) + assert :rpc.call(node_b, MnesiaCache, :put, [@default_config, {"key_1_b", "value"}]) :timer.sleep(50) assert :rpc.call(node_a, MnesiaCache, :get, [@default_config, "key_1"]) == "a" assert :rpc.call(node_a, MnesiaCache, :get, [@default_config, "key_1_a"]) == "value" @@ -291,4 +327,33 @@ defmodule Pow.Store.Backend.MnesiaCacheTest do true = :rpc.call(node_a, Node, :connect, [node_b]) :timer.sleep(500) end + + # TODO: Remove by 1.1.0 + describe "backwards compatible" do + setup do + :mnesia.kill() + + File.rm_rf!("tmp/mnesia") + File.mkdir_p!("tmp/mnesia") + + :ok + end + + test "removes old entries" do + :ok = :mnesia.start() + {:atomic, :ok} = :mnesia.change_table_copy_type(:schema, node(), :disc_copies) + {:atomic, :ok} = :mnesia.create_table(MnesiaCache, type: :set, disc_copies: [node()]) + :ok = :mnesia.wait_for_tables([MnesiaCache], :timer.seconds(15)) + + key = "#{@default_config[:namespace]}:key1" + + :ok = :mnesia.dirty_write({MnesiaCache, key, {"test", :os.system_time(:millisecond) + 100}}) + + :stopped = :mnesia.stop() + + start(@default_config) + + assert :mnesia.dirty_read({MnesiaCache, key}) == [] + end + end end diff --git a/test/pow/store/base_test.exs b/test/pow/store/base_test.exs index c5008e4a..6e26d3d3 100644 --- a/test/pow/store/base_test.exs +++ b/test/pow/store/base_test.exs @@ -40,4 +40,49 @@ defmodule Pow.Store.BaseTest do assert BaseMock.get(default_config, :test) == :value assert BaseMock.get(override_config, :test) == :not_found end + + defmodule BackwardsCompabilityMock do + def put(config, key, value) do + send(self(), {:put, key(config, key), value}) + end + + def get(config, key) do + send(self(), {:get, key(config, key)}) + + :value + end + + def delete(config, key) do + send(self(), {:delete, key(config, key)}) + + :ok + end + + def keys(_config) do + [:erlang.term_to_binary([BackwardsCompabilityMock, :id, 2])] + end + + defp key(config, key) do + "#{Pow.Config.get(config, :namespace, "cache")}:#{key}" + end + end + + # TODO: Remove by 1.1.0 + test "backwards compatible with binary keys support" do + config = [backend: BackwardsCompabilityMock] + + assert BaseMock.put(config, [BackwardsCompabilityMock, :id, 2], :value) == :ok + binary_key = "default_namespace:#{:erlang.term_to_binary([BackwardsCompabilityMock, :id, 2])}" + assert_received {:put, ^binary_key, :value} + + assert BaseMock.get(config, [BackwardsCompabilityMock, :id, 2]) == :value + assert_received {:get, ^binary_key} + + assert BaseMock.delete(config, [BackwardsCompabilityMock, :id, 2]) == :ok + assert_received {:delete, ^binary_key} + + assert BaseMock.all(config, [BackwardsCompabilityMock | :_]) == [{[BackwardsCompabilityMock, :id, 2], :value}] + assert BaseMock.all(config, [BackwardsCompabilityMock, :id, :_]) == [{[BackwardsCompabilityMock, :id, 2], :value}] + assert BaseMock.all(config, [BackwardsCompabilityMock, :id, 3]) == [] + end end diff --git a/test/pow/store/credentials_cache_test.exs b/test/pow/store/credentials_cache_test.exs index e1ab011d..89a94c20 100644 --- a/test/pow/store/credentials_cache_test.exs +++ b/test/pow/store/credentials_cache_test.exs @@ -30,15 +30,16 @@ defmodule Pow.Store.CredentialsCacheTest do assert CredentialsCache.get(@config, @backend_config, "key_3") == {user_2, a: 3} assert CredentialsCache.get(@config, @backend_config, "key_4") == {user_3, a: 4} - assert Enum.sort(CredentialsCache.user_session_keys(@config, @backend_config, User)) == ["pow/test/ecto/users/user_sessions_1", "pow/test/ecto/users/user_sessions_2"] - assert CredentialsCache.user_session_keys(@config, @backend_config, UsernameUser) == ["pow/test/ecto/users/username_user_sessions_1"] + assert Enum.sort(CredentialsCache.users(@config, @backend_config, User)) == [user_1, user_2] + assert CredentialsCache.users(@config, @backend_config, UsernameUser) == [user_3] assert CredentialsCache.sessions(@config, @backend_config, user_1) == ["key_1", "key_2"] assert CredentialsCache.sessions(@config, @backend_config, user_2) == ["key_3"] assert CredentialsCache.sessions(@config, @backend_config, user_3) == ["key_4"] - assert EtsCacheMock.get(@backend_config, "key_1") == {"#{Macro.underscore(User)}_sessions_1", a: 1} - assert EtsCacheMock.get(@backend_config, "#{Macro.underscore(User)}_sessions_1") == %{user: user_1, sessions: ["key_1", "key_2"]} + assert EtsCacheMock.get(@backend_config, "key_1") == {[User, :user, 1], a: 1} + assert EtsCacheMock.get(@backend_config, [User, :user, 1]) == user_1 + assert EtsCacheMock.get(@backend_config, [User, :user, 1, :session, "key_1"]) CredentialsCache.put(@config, @backend_config, "key_2", {%{user_1 | email: :updated}, a: 5}) assert CredentialsCache.get(@config, @backend_config, "key_1") == {%{user_1 | email: :updated}, a: 1} @@ -47,10 +48,16 @@ defmodule Pow.Store.CredentialsCacheTest do assert CredentialsCache.get(@config, @backend_config, "key_1") == :not_found assert CredentialsCache.sessions(@config, @backend_config, user_1) == ["key_2"] + assert EtsCacheMock.get(@backend_config, "key_1") == :not_found + assert EtsCacheMock.get(@backend_config, [User, :user, 1]) == %{user_1 | email: :updated} + assert EtsCacheMock.get(@backend_config, [User, :user, 1, :session, "key_1"]) == :not_found + assert CredentialsCache.delete(@config, @backend_config, "key_2") == :ok assert CredentialsCache.sessions(@config, @backend_config, user_1) == [] - assert EtsCacheMock.get(@backend_config, "#{Macro.underscore(User)}_sessions_1") == :not_found + assert EtsCacheMock.get(@backend_config, "key_1") == :not_found + assert EtsCacheMock.get(@backend_config, [User, :user, 1]) == %{user_1 | email: :updated} + assert EtsCacheMock.get(@backend_config, [User, :user, 1, :session, "key_1"]) == :not_found end test "raises for nil primary key value" do @@ -91,9 +98,11 @@ defmodule Pow.Store.CredentialsCacheTest do CredentialsCache.put(@config, @backend_config, "key_1", {%CompositePrimaryFieldsUser{}, a: 1}) end - CredentialsCache.put(@config, @backend_config, "key_1", {%CompositePrimaryFieldsUser{some_id: 1, another_id: 2}, a: 1}) + user = %CompositePrimaryFieldsUser{some_id: 1, another_id: 2} + + CredentialsCache.put(@config, @backend_config, "key_1", {user, a: 1}) - assert CredentialsCache.user_session_keys(@config, @backend_config, CompositePrimaryFieldsUser) == ["pow/store/credentials_cache_test/composite_primary_fields_user_sessions_another_id:2_some_id:1"] + assert CredentialsCache.users(@config, @backend_config, CompositePrimaryFieldsUser) == [user] end defmodule NonEctoUser do @@ -105,9 +114,11 @@ defmodule Pow.Store.CredentialsCacheTest do CredentialsCache.put(@config, @backend_config, "key_1", {%NonEctoUser{}, a: 1}) end - assert CredentialsCache.put(@config, @backend_config, "key_1", {%NonEctoUser{id: 1}, a: 1}) + user = %NonEctoUser{id: 1} + + assert CredentialsCache.put(@config, @backend_config, "key_1", {user, a: 1}) - assert CredentialsCache.user_session_keys(@config, @backend_config, NonEctoUser) == ["pow/store/credentials_cache_test/non_ecto_user_sessions_1"] + assert CredentialsCache.users(@config, @backend_config, NonEctoUser) == [user] end # TODO: Remove by 1.1.0 @@ -115,11 +126,22 @@ defmodule Pow.Store.CredentialsCacheTest do user_1 = %User{id: 1} timestamp = :os.system_time(:millisecond) - EtsCacheMock.put(@backend_config, "key_1", {user_1, inserted_at: timestamp}) + EtsCacheMock.put(@backend_config, {"key_1", {user_1, inserted_at: timestamp}}) assert CredentialsCache.get(@config, @backend_config, "key_1") == {user_1, inserted_at: timestamp} assert CredentialsCache.delete(@config, @backend_config, "key_1") == :ok assert CredentialsCache.get(@config, @backend_config, "key_1") == :not_found + + assert CredentialsCache.user_session_keys(@config, @backend_config, User) == [] + + user_2 = %UsernameUser{id: 1} + + CredentialsCache.put(@config, @backend_config, "key_1", {user_1, a: 1}) + CredentialsCache.put(@config, @backend_config, "key_2", {user_1, a: 1}) + CredentialsCache.put(@config, @backend_config, "key_3", {user_2, a: 1}) + + assert CredentialsCache.user_session_keys(@config, @backend_config, User) == [[Pow.Test.Ecto.Users.User, :user, 1, :session, "key_1"], [Pow.Test.Ecto.Users.User, :user, 1, :session, "key_2"]] + assert CredentialsCache.user_session_keys(@config, @backend_config, UsernameUser) == [[Pow.Test.Ecto.Users.UsernameUser, :user, 1, :session, "key_3"]] end describe "with EtsCache backend" do @@ -140,7 +162,7 @@ defmodule Pow.Store.CredentialsCacheTest do :timer.sleep(50) assert CredentialsCache.get(config, backend_config, "key_1") == :not_found assert CredentialsCache.get(config, backend_config, "key_2") == {user_1, a: 2} - assert CredentialsCache.sessions(config, backend_config, user_1) == ["key_1", "key_2"] + assert CredentialsCache.sessions(config, backend_config, user_1) == ["key_2"] CredentialsCache.put(config, backend_config ++ [ttl: 100], "key_2", {user_1, a: 3}) :timer.sleep(50) diff --git a/test/support/ets_cache_mock.ex b/test/support/ets_cache_mock.ex index 187d8beb..80d43316 100644 --- a/test/support/ets_cache_mock.ex +++ b/test/support/ets_cache_mock.ex @@ -2,7 +2,7 @@ defmodule Pow.Test.EtsCacheMock do @moduledoc false @tab __MODULE__ - def init, do: :ets.new(@tab, [:set, :protected, :named_table]) + def init, do: :ets.new(@tab, [:ordered_set, :protected, :named_table]) def get(config, key) do ets_key = ets_key(config, key) @@ -21,28 +21,25 @@ defmodule Pow.Test.EtsCacheMock do :ok end - def put(config, key, value) do - send(self(), {:ets, :put, key, value, config}) - :ets.insert(@tab, {ets_key(config, key), value}) + def put(config, record_or_records) do + records = List.wrap(record_or_records) + ets_records = Enum.map(records, fn {key, value} -> + {ets_key(config, key), value} + end) + + send(self(), {:ets, :put, records, config}) + :ets.insert(@tab, ets_records) end - def keys(config) do - namespace = ets_key(config, "") - length = String.length(namespace) + def all(config, match) do + ets_key_match = ets_key(config, match) - Stream.resource( - fn -> :ets.first(@tab) end, - fn :"$end_of_table" -> {:halt, nil} - previous_key -> {[previous_key], :ets.next(@tab, previous_key)} end, - fn _ -> :ok - end) - |> Enum.filter(&String.starts_with?(&1, namespace)) - |> Enum.map(&String.slice(&1, length..-1)) + @tab + |> :ets.select([{{ets_key_match, :_}, [], [:"$_"]}]) + |> Enum.map(fn {[_namespace | keys], value} -> {keys, value} end) end defp ets_key(config, key) do - namespace = Pow.Config.get(config, :namespace, "cache") - - "#{namespace}:#{key}" + [Keyword.get(config, :namespace, "cache")] ++ List.wrap(key) end end