|
| 1 | +defmodule Sqlitex.Server.StatementCache do |
| 2 | + @moduledoc """ |
| 3 | + Implements a least-recently used (LRU) cache for prepared SQLite statements. |
| 4 | +
|
| 5 | + Caches a fixed number of prepared statements and purges the statements which |
| 6 | + were least-recently used when that limit is exceeded. |
| 7 | + """ |
| 8 | + |
| 9 | + defstruct db: false, size: 0, limit: 1, cached_stmts: %{}, lru: [] |
| 10 | + |
| 11 | + @doc """ |
| 12 | + Creates a new prepared statement cache. |
| 13 | + """ |
| 14 | + def new({:connection, _, _} = db, limit) when is_integer(limit) and limit > 0 do |
| 15 | + %__MODULE__{db: db, limit: limit} |
| 16 | + end |
| 17 | + |
| 18 | + @doc """ |
| 19 | + Given a statement cache and an SQL statement (string), returns a tuple containing |
| 20 | + the updated statement cache and a prepared SQL statement. |
| 21 | +
|
| 22 | + If possible, reuses an existing prepared statement; if not, prepares the statement |
| 23 | + and adds it to the cache, possibly removing the least-recently used prepared |
| 24 | + statement if the designated cache size limit would be exceeded. |
| 25 | +
|
| 26 | + Will return `{:error, reason}` if SQLite is unable to prepare the statement. |
| 27 | + """ |
| 28 | + def prepare(%__MODULE__{cached_stmts: cached_stmts} = cache, sql) |
| 29 | + when is_binary(sql) and byte_size(sql) > 0 |
| 30 | + do |
| 31 | + case Map.fetch(cached_stmts, sql) do |
| 32 | + {:ok, stmt} -> {update_cache_for_read(cache, sql), stmt} |
| 33 | + :error -> prepare_new_statement(cache, sql) |
| 34 | + end |
| 35 | + end |
| 36 | + |
| 37 | + defp prepare_new_statement(%__MODULE__{db: db} = cache, sql) do |
| 38 | + case Sqlitex.Statement.prepare(db, sql) do |
| 39 | + {:ok, prepared} -> |
| 40 | + cache = cache |
| 41 | + |> store_new_stmt(sql, prepared) |
| 42 | + |> purge_cache_if_full |
| 43 | + |> update_cache_for_read(sql) |
| 44 | + |
| 45 | + {cache, prepared} |
| 46 | + error -> error |
| 47 | + end |
| 48 | + end |
| 49 | + |
| 50 | + defp store_new_stmt(%__MODULE__{size: size, cached_stmts: cached_stmts} = cache, |
| 51 | + sql, prepared) |
| 52 | + do |
| 53 | + %{cache | size: size + 1, cached_stmts: Map.put(cached_stmts, sql, prepared)} |
| 54 | + end |
| 55 | + |
| 56 | + defp purge_cache_if_full(%__MODULE__{size: size, |
| 57 | + limit: limit, |
| 58 | + cached_stmts: cached_stmts, |
| 59 | + lru: [purge_victim | lru]} = cache) |
| 60 | + when size > limit |
| 61 | + do |
| 62 | + %{cache | size: size - 1, |
| 63 | + cached_stmts: Map.drop(cached_stmts, [purge_victim]), |
| 64 | + lru: lru} |
| 65 | + end |
| 66 | + defp purge_cache_if_full(cache), do: cache |
| 67 | + |
| 68 | + defp update_cache_for_read(%__MODULE__{lru: lru} = cache, sql) do |
| 69 | + lru = lru |
| 70 | + |> Enum.reject(&(&1 == sql)) |
| 71 | + |> Kernel.++([sql]) |
| 72 | + |
| 73 | + %{cache | lru: lru} |
| 74 | + end |
| 75 | +end |
0 commit comments