From 8fecbe1e6ac3dd0c5ec62713db40f441f87c8d35 Mon Sep 17 00:00:00 2001 From: zentourist Date: Thu, 2 Oct 2025 21:02:17 -0400 Subject: [PATCH] Mnesia DL: internal layout and in-mnesia Filter support --- .../dsls/DSL-Ash.DataLayer.Mnesia.md | 11 +- lib/ash/data_layer/mnesia/matchspec.ex | 253 ++++++++++++ lib/ash/data_layer/mnesia/mnesia.ex | 186 ++++++--- .../mnesia/transformers/define_records.ex | 258 ++++++++++++ test/ash/data_layer/mnesia/matchspec_test.exs | 388 ++++++++++++++++++ test/ash/data_layer/mnesia_test.exs | 3 +- 6 files changed, 1027 insertions(+), 72 deletions(-) create mode 100644 lib/ash/data_layer/mnesia/matchspec.ex create mode 100644 lib/ash/data_layer/mnesia/transformers/define_records.ex create mode 100644 test/ash/data_layer/mnesia/matchspec_test.exs diff --git a/documentation/dsls/DSL-Ash.DataLayer.Mnesia.md b/documentation/dsls/DSL-Ash.DataLayer.Mnesia.md index 032eb19b32..47be55f38f 100644 --- a/documentation/dsls/DSL-Ash.DataLayer.Mnesia.md +++ b/documentation/dsls/DSL-Ash.DataLayer.Mnesia.md @@ -9,9 +9,14 @@ In your application initialization, you will need to call `Mnesia.create_schema( Additionally, you will want to create your mnesia tables there. -This data layer is *unoptimized*, fetching all records from a table and filtering them -in memory. For that reason, it is not recommended to use it with large amounts of data. It can be -great for prototyping or light usage, though. +## Performance + +This data layer uses Mnesia matchspecs to push filter operations down to the database +level when possible. Supported filters (equality, comparison, boolean logic, etc.) are +converted to matchspecs for efficient querying. Unsupported filters fall back to +runtime filtering in memory. Due to this limitation, unsupported filtering may +have an effect on performance as filtering occurs in memory. For more on the +supported options see `Ash.DataLayer.Mnesia.MatchSpec`. ## mnesia diff --git a/lib/ash/data_layer/mnesia/matchspec.ex b/lib/ash/data_layer/mnesia/matchspec.ex new file mode 100644 index 0000000000..d30c7e299c --- /dev/null +++ b/lib/ash/data_layer/mnesia/matchspec.ex @@ -0,0 +1,253 @@ +defmodule Ash.DataLayer.Mnesia.MatchSpec do + @moduledoc """ + Converts an `Ash.Filter` to Mnesia matchspecs for efficient querying. + + ## Supported Operators + + * Equality: `Eq`, `NotEq`, `IsNil` + * Comparison: `GreaterThan`, `GreaterThanOrEqual`, `LessThan`, `LessThanOrEqual` + * Logical: `And`, `Or`, `Not` + * Membership: `In` + + ## Limitations and room for improvement: + + * Right now we are building up a Guard and using wildcards for the MatchHead. + This will be much faster than the [Runtime + filters](lib/ash/filter/runtime.ex), but it could be improved upon by + determining when to use a simple MatchHead if the filters are sufficiently + simple. + + * We are always returning the full record (:"$_"). We could optimize for + `select` queries by returning only the necessary fields. + + * There is no support for [Has queries](lib/ash/query/operator/has.ex). + """ + + @typedoc """ + A guard expression used in Mnesia matchspecs. + Guards are tuples representing conditional expressions. + + The grammar for the MatchSpec Guards is available on the + [erts matchspec page](https://www.erlang.org/doc/apps/erts/match_spec). + """ + @type guard :: tuple() | atom() | boolean() + + @typedoc """ + The MatchSpec uses positional arguments to represent the fields being matched. + This is a mapping of our `Ash.Resource` fields to matchspec variables (e.g., + :"$1", :"$2"). + """ + @type field_map :: %{atom() => atom()} + + @typedoc """ + A complete Mnesia matchspec in the format [{match_head, [guards], [result]}]. + + For more information about the parts of a MatchSpec, see Mnesia's + [select/3](https://www.erlang.org/doc/apps/mnesia/mnesia.html#select/3). + """ + @type matchspec :: [{tuple(), [guard()], [term()]}] + + alias Ash.Query + + alias Ash.Query.Operator.{ + Eq, + GreaterThan, + GreaterThanOrEqual, + In, + IsNil, + LessThan, + LessThanOrEqual, + NotEq + } + + @doc """ + Converts an Ash.Filter to a complete Mnesia matchspec. + + Returns a matchspec in the format: [{match_head, [guards], [result]}] + where the result is the full record (:"$_"). + + ## Examples + + iex> filter = %Ash.Filter{resource: MyResource, expression: ...} + iex> to_matchspec(filter) + {:ok, [{match_head, [guards], [:"$_"]}]} + + """ + @spec to_matchspec(Ash.Filter.t() | nil) :: {:ok, matchspec()} | {:error, String.t()} + def to_matchspec(%Ash.Filter{expression: expression, resource: resource}) do + field_map = field_to_number_map(resource) + match_head = build_match_head(resource) + + case parse(expression, field_map) do + {:ok, guards} -> + # Return the full record + result = :"$_" + {:ok, [{match_head, [guards], [result]}]} + + {:error, _} = error -> + error + end + end + + def to_matchspec(nil) do + {:ok, [{:_, [], [:"$_"]}]} + end + + @spec field_to_number_map(Ash.Resource.t()) :: field_map() + defp field_to_number_map(resource) do + attribute_names = Ash.Resource.Info.attributes(resource) |> Enum.map(& &1.name) + + Ash.DataLayer.Mnesia.Info.table(resource) + |> :mnesia.table_info(:wild_pattern) + |> Tuple.to_list() + |> Enum.with_index() + |> Enum.reduce(%{}, fn val, acc -> + case val do + # NOTE: Short of adding a large list of atoms in a module attribute, we + # will get a Sobelow warning for this "unsafe" string to atom conversion + {:_, index} -> Map.put(acc, Enum.at(attribute_names, index - 1), :"$#{index}") + _ -> acc + end + end) + end + + @spec build_match_head(Ash.Resource.t()) :: tuple() + defp build_match_head(resource) do + resource.to_ex_record(%{}, :position) + end + + @doc """ + Parses an Ash.Filter or filter expression into a matchspec guard. + + ## Examples + + iex> filter = %Ash.Filter{expression: expr, resource: MyResource} + iex> field_map = %{name: :"$1", age: :"$2"} + iex> parse(filter, field_map) + {:ok, guard} + + """ + @spec parse(term(), field_map()) :: {:ok, guard()} | {:error, String.t()} + def parse(expression, field_map) + + def parse(%Query.BooleanExpression{op: :and, left: left, right: right} = _expression, field_map) do + with {:ok, left_match} <- parse(left, field_map), + {:ok, right_match} <- parse(right, field_map) do + {:ok, {:andalso, left_match, right_match}} + end + end + + def parse(%Query.BooleanExpression{op: :or, left: left, right: right} = _expression, field_map) do + with {:ok, left_match} <- parse(left, field_map), + {:ok, right_match} <- parse(right, field_map) do + {:ok, {:orelse, left_match, right_match}} + end + end + + def parse(%Query.Not{expression: expression}, field_map) do + with {:ok, match} <- parse(expression, field_map) do + {:ok, {:not, match}} + end + end + + def parse(%Eq{left: field, right: value}, field_map) do + with {:ok, field_var} <- get_field_var(field, field_map) do + {:ok, {:==, field_var, value}} + end + end + + def parse(%NotEq{left: field, right: value}, field_map) do + with {:ok, field_var} <- get_field_var(field, field_map) do + {:ok, {:"=/=", field_var, value}} + end + end + + def parse(%GreaterThan{left: field, right: value}, field_map) do + with {:ok, field_var} <- get_field_var(field, field_map) do + {:ok, {:>, field_var, value}} + end + end + + def parse(%GreaterThanOrEqual{left: field, right: value}, field_map) do + with {:ok, field_var} <- get_field_var(field, field_map) do + {:ok, {:>=, field_var, value}} + end + end + + def parse(%LessThan{left: field, right: value}, field_map) do + with {:ok, field_var} <- get_field_var(field, field_map) do + {:ok, {:<, field_var, value}} + end + end + + def parse(%LessThanOrEqual{left: field, right: value}, field_map) do + with {:ok, field_var} <- get_field_var(field, field_map) do + {:ok, {:<=, field_var, value}} + end + end + + def parse(%In{left: _field, right: []}, _field_map) do + {:ok, false} + end + + def parse(%In{left: field, right: values}, field_map) do + with {:ok, guards} <- collect_results(values, field, field_map) do + case guards do + [] -> + {:ok, false} + + [single] -> + {:ok, single} + + [first | rest] -> + result = + Enum.reduce(rest, first, fn guard, acc -> + {:orelse, acc, guard} + end) + + {:ok, result} + end + end + end + + def parse(%IsNil{left: left}, field_map) do + parse(%Eq{left: left, right: nil}, field_map) + end + + def parse(expression, _field_map) do + {:error, "Unsupported filter expression: #{inspect(expression)}"} + end + + # Used with the `In` parser to properly handle errors and return a list of + # guards. + defp collect_results(values, field, field_map) do + values + |> Enum.reduce_while({:ok, []}, fn value, {:ok, acc} -> + case parse(%Eq{left: field, right: value}, field_map) do + {:ok, guard} -> {:cont, {:ok, [guard | acc]}} + {:error, _} = error -> {:halt, error} + end + end) + |> case do + {:ok, guards} -> {:ok, Enum.reverse(guards)} + error -> error + end + end + + defp get_field_var(%Query.Ref{relationship_path: path}, _field_map) + when path != [] do + {:error, "Relationship traversal not supported in Mnesia matchspecs"} + end + + defp get_field_var(field, field_map) do + field_name = Query.Ref.name(field) + + case Map.fetch(field_map, field_name) do + {:ok, var} -> + {:ok, var} + + :error -> + {:error, "Unknown field: #{field_name}"} + end + end +end diff --git a/lib/ash/data_layer/mnesia/mnesia.ex b/lib/ash/data_layer/mnesia/mnesia.ex index b60905d30c..9ceba0baeb 100644 --- a/lib/ash/data_layer/mnesia/mnesia.ex +++ b/lib/ash/data_layer/mnesia/mnesia.ex @@ -1,5 +1,6 @@ defmodule Ash.DataLayer.Mnesia do @behaviour Ash.DataLayer + require Logger @mnesia %Spark.Dsl.Section{ name: :mnesia, @@ -28,13 +29,19 @@ defmodule Ash.DataLayer.Mnesia do Additionally, you will want to create your mnesia tables there. - This data layer is *unoptimized*, fetching all records from a table and filtering them - in memory. For that reason, it is not recommended to use it with large amounts of data. It can be - great for prototyping or light usage, though. + ## Performance + + This data layer uses Mnesia matchspecs to push filter operations down to the database + level when possible. Supported filters (equality, comparison, boolean logic, etc.) are + converted to matchspecs for efficient querying. Unsupported filters fall back to + runtime filtering in memory. Due to this limitation, unsupported filtering may + have an effect on performance as filtering occurs in memory. For more on the + supported options see `Ash.DataLayer.Mnesia.MatchSpec`. """ use Spark.Dsl.Extension, sections: [@mnesia], + persisters: [Ash.DataLayer.Mnesia.Transformers.DefineRecords], verifiers: [Ash.DataLayer.Verifiers.RequirePreCheckWith] alias Ash.Actions.Sort @@ -53,12 +60,12 @@ defmodule Ash.DataLayer.Mnesia do |> Ash.Domain.Info.resources() |> Enum.concat(resources) |> Enum.filter(&(__MODULE__ in Spark.extensions(&1))) - |> Enum.flat_map(fn resource -> - resource - |> Ash.DataLayer.Mnesia.Info.table() - |> List.wrap() + |> Enum.each(fn resource -> + table = Ash.DataLayer.Mnesia.Info.table(resource) + attributes = resource.mnesia_record_info() + # TODO: Implement configurable type + Mnesia.create_table(table, attributes: attributes, type: :ordered_set) end) - |> Enum.each(&Mnesia.create_table(&1, attributes: [:_pkey, :val])) end defmodule Query do @@ -154,6 +161,8 @@ defmodule Ash.DataLayer.Mnesia do @impl true def in_transaction?(_), do: Mnesia.is_transaction() + def in_transaction?, do: Mnesia.is_transaction() + @doc false @impl true def limit(query, offset, _), do: {:ok, %{query | limit: offset}} @@ -237,6 +246,9 @@ defmodule Ash.DataLayer.Mnesia do {:error, error} end |> case do + {:ok, acc} -> + {:ok, acc} + {:error, error} -> {:error, Ash.Error.to_ash_error(error)} @@ -274,17 +286,39 @@ defmodule Ash.DataLayer.Mnesia do }, _resource ) do + # Build matchspec from filter if possible. If this fails, we will fall back + # to runtime filtering. This will only happen for the small subset of + # filters we have not implemented. + {matchspec_status, matchspec} = build_matchspec(filter) + with {:atomic, records} <- Mnesia.transaction(fn -> - Mnesia.select(Ash.DataLayer.Mnesia.Info.table(resource), [{:_, [], [:"$_"]}]) + Mnesia.select(Ash.DataLayer.Mnesia.Info.table(resource), matchspec) end), + # Convert to Ash.Resource from Erlang Records {:ok, records} <- - records |> Enum.map(&elem(&1, 2)) |> Ash.DataLayer.Ets.cast_records(resource), + records + |> Enum.map(&resource.from_ex_record(&1)) + |> then(fn records -> {:ok, records} end), + # Do runtime filtering if our matchspec failed {:ok, filtered} <- - filter_matches(records, filter, domain, tenant, context[:private][:actor]), + filter_if_not_using_matchspec( + matchspec_status, + records, + filter, + domain, + tenant, + context[:private][:actor] + ), + # Runtime sort, offset, and limit + # + # TODO: I think we can implement this in Mnesia, but we need to figure + # out all of the details based on the table types (bag, set, + # ordered_set) offset_records <- filtered |> Sort.runtime_sort(sort, domain: domain) |> Enum.drop(offset || 0), limited_records <- do_limit(offset_records, limit), + # Add aggregates {:ok, records} <- Ash.DataLayer.Ets.do_add_aggregates( limited_records, @@ -292,6 +326,7 @@ defmodule Ash.DataLayer.Mnesia do resource, aggregates ), + # Add calculations {:ok, records} <- Ash.DataLayer.Ets.do_add_calculations( records, @@ -309,6 +344,26 @@ defmodule Ash.DataLayer.Mnesia do end end + defp filter_if_not_using_matchspec(matchspec_status, records, filter, domain, tenant, actor) do + if matchspec_status == :ok do + {:ok, records} + else + filter_matches(records, filter, domain, tenant, actor) + end + end + + defp build_matchspec(filter) do + case Ash.DataLayer.Mnesia.MatchSpec.to_matchspec(filter) do + {:ok, matchspec} -> + {:ok, matchspec} + + {:error, reason} -> + Logger.debug("Unable to convert filter to matchspec: #{reason}. Using runtime filtering.") + {:ok, matchspec} = Ash.DataLayer.Mnesia.MatchSpec.to_matchspec(nil) + {:runtime, matchspec} + end + end + defp do_limit(records, nil), do: records defp do_limit(records, limit), do: Enum.take(records, limit) @@ -401,7 +456,7 @@ defmodule Ash.DataLayer.Mnesia do Mnesia.transaction(fn -> Enum.reduce_while(stream, {:ok, []}, fn changeset, {:ok, results} -> # Sending in `false` prevents a transaction for every write - case create(resource, changeset, with_transaction: false) do + case create(resource, changeset) do {:ok, result} -> result = if options[:return_records?] do @@ -425,53 +480,34 @@ defmodule Ash.DataLayer.Mnesia do @doc false @impl true - def create(resource, changeset, opts \\ []) do + def create(resource, changeset) do {:ok, record} = Ash.Changeset.apply_attributes(changeset) - pkey = - resource - |> Ash.Resource.Info.primary_key() - |> Enum.map(fn attr -> - Map.get(record, attr) - end) + ex_record = resource.to_ex_record(record) - resource - |> Ash.Resource.Info.attributes() - |> Map.new(&{&1.name, Map.get(record, &1.name)}) - |> Ash.DataLayer.Ets.dump_to_native(Ash.Resource.Info.attributes(resource)) - |> case do - {:ok, values} -> - case do_write( - Ash.DataLayer.Mnesia.Info.table(resource), - pkey, - values, - Keyword.get(opts, :with_transaction, true) - ) do - # If with_transaction is false, we are in a transaction and will only return :ok - :ok -> - {:ok, %{record | __meta__: %Ecto.Schema.Metadata{state: :loaded, schema: resource}}} - - {:atomic, _} -> - {:ok, %{record | __meta__: %Ecto.Schema.Metadata{state: :loaded, schema: resource}}} + case do_write(fn -> + Mnesia.write(ex_record) + end) do + :ok -> + {:ok, %{record | __meta__: %Ecto.Schema.Metadata{state: :loaded, schema: resource}}} - {:aborted, error} -> - {:error, error} - end + {:atomic, :ok} -> + {:ok, %{record | __meta__: %Ecto.Schema.Metadata{state: :loaded, schema: resource}}} - {:error, error} -> - {:error, error} + {:aborted, reason} -> + {:error, reason} end end # This allows for writing to Mnesia without a transaction in case one was # started elsewhere. This was explicitly created for `bulk_create/3`. - defp do_write(table, pkey, values, with_transaction) do - if with_transaction && !Mnesia.is_transaction() do + defp do_write(write_fn) do + if in_transaction?() do + write_fn.() + else Mnesia.transaction(fn -> - Mnesia.write({table, pkey, values}) + write_fn.() end) - else - Mnesia.write({table, pkey, values}) end end @@ -482,6 +518,10 @@ defmodule Ash.DataLayer.Mnesia do resource |> Ash.Resource.Info.primary_key() |> Enum.map(&Map.get(record, &1)) + |> case do + [value] -> value + [first | rest] -> [first | rest] |> List.to_tuple() + end result = Mnesia.transaction(fn -> @@ -497,15 +537,17 @@ defmodule Ash.DataLayer.Mnesia do @doc false @impl true def update(resource, changeset) do - pkey = pkey_list(resource, changeset.data) + {:ok, pkey} = pkey_val(resource, changeset.data) result = Mnesia.transaction(fn -> with {:ok, record} <- Ash.Changeset.apply_attributes(%{changeset | action_type: :update}), {:ok, record} <- do_update(Ash.DataLayer.Mnesia.Info.table(resource), {pkey, record}, resource), - {:ok, record} <- Ash.DataLayer.Ets.cast_record(record, resource) do - new_pkey = pkey_list(resource, record) + {:ok, record} <- + resource.from_ex_record(record) + |> then(&{:ok, &1}) do + {:ok, new_pkey} = pkey_val(resource, record) if new_pkey != pkey do case destroy(resource, changeset) do @@ -554,29 +596,37 @@ defmodule Ash.DataLayer.Mnesia do end end - defp pkey_list(resource, data) do + # This will return primary keys in the way that Mnesia expects them. If there + # is a single primary key, it will be returned as a single value. If there are + # multiple primary keys, they will be returned as a tuple. This will always be + # stored as the first value in an Mnesia record. + defp pkey_val(resource, data) do resource |> Ash.Resource.Info.primary_key() |> Enum.map(&Map.get(data, &1)) + |> case do + [pkey] -> + {:ok, pkey} + + [_ | _] = pkeys -> + {:ok, List.to_tuple(pkeys)} + + _ -> + {:error, "Invalid primary key"} + end end defp do_update(table, {pkey, record}, resource) do - attributes = Ash.Resource.Info.attributes(resource) - - case Ash.DataLayer.Ets.dump_to_native(record, attributes) do - {:ok, casted} -> - case Mnesia.read({Ash.DataLayer.Mnesia.Info.table(resource), pkey}) do - [] -> - {:error, "Record not found matching: #{inspect(pkey)}"} - - [{_, _, record}] -> - Mnesia.write({table, pkey, Map.merge(record, casted)}) - [{_, _, record}] = Mnesia.read({table, pkey}) - {:ok, record} - end - - {:error, error} -> - {:error, error} + case Mnesia.read({table, pkey}) do + [] -> + {:error, "Record not found matching: #{inspect(pkey)}"} + + [ex_record] when is_tuple(ex_record) -> + old_record = resource.from_ex_record(ex_record) + new_record = Map.merge(old_record, record) + new_ex_record = resource.to_ex_record(new_record) + :ok = Mnesia.write(new_ex_record) + {:ok, new_ex_record} end end @@ -613,6 +663,8 @@ defmodule Ash.DataLayer.Mnesia do |> Map.put(:data, result) |> Ash.Changeset.force_change_attributes(to_set) + # TODO: we are fetching the record above, but `update/2` is going to + # fetch it again. This should be optimized to avoid redundant fetches. update(resource, changeset) {:ok, _} -> diff --git a/lib/ash/data_layer/mnesia/transformers/define_records.ex b/lib/ash/data_layer/mnesia/transformers/define_records.ex new file mode 100644 index 0000000000..f71e327f01 --- /dev/null +++ b/lib/ash/data_layer/mnesia/transformers/define_records.ex @@ -0,0 +1,258 @@ +defmodule Ash.DataLayer.Mnesia.Transformers.DefineRecords do + @moduledoc """ + Generates Erlang record definitions for Mnesia resources at compile time. + + This transformer reads the resource's attributes and creates an Erlang record + definition using Record.defrecord, making Mnesia operations more efficient and + enabling pattern matching on records. + + For compound primary keys, the first field in the record will be a tuple + containing all primary key values. It is recommended to specify all of the + primary keys first and then normal attributes. It is also recommended to leave + the ordering as static as possible. If you are rebuilding the Mnesia tables + every time, it doesn't matter. + """ + + use Spark.Dsl.Transformer + + alias Spark.Dsl.Transformer + + @doc false + def transform(dsl_state) do + resource = Transformer.get_persisted(dsl_state, :module) + attributes = Ash.Resource.Info.attributes(dsl_state) + primary_key = Ash.Resource.Info.primary_key(dsl_state) + + # Get table name from mnesia configuration or default to resource name + table = get_table_name(dsl_state, resource) + + # Build field list with defaults for the record + fields = + build_field_list(attributes, primary_key) + + fields = + if length(fields) == 1 do + fields ++ [{:__meta__, nil}] + else + fields + end + + # Inject the record definition and helper functions + dsl_state = inject_record_code(dsl_state, table, fields, attributes, primary_key) + + {:ok, dsl_state} + end + + defp get_table_name(dsl_state, resource) do + case Ash.DataLayer.Mnesia.Info.table(dsl_state) do + table when is_atom(table) -> + table + + _ -> + resource + end + end + + defp build_field_list(attributes, primary_key) do + # For compound primary keys (length > 1), we need to structure the record differently + # The first field will be a tuple of the PK values + if length(primary_key) > 1 do + pk_tuple = primary_key |> Enum.map(fn _ -> nil end) |> List.to_tuple() + + non_pk_attrs = + Enum.reject(attributes, & &1.primary_key?) + |> Enum.map(fn attr -> {attr.name, nil} end) + + [{:_pkey, pk_tuple} | non_pk_attrs] + else + Enum.map(attributes, &{&1.name, nil}) + end + end + + # This is a bit tricky of a function so let me explain... + # Since Elixir/Erlang Records are compile time contructs, it is not easy to + # dynamically define a subset of them at runtime. For instance, if you sepcify + # a `Record.defrecord(:mytable, [a: nil, b: nil])`, you can explicitly create + # the record with a subset of the attribs like `mytable(b: "foo")`. This + # doesn't work in macros very well though. This function is creating the AST + # for assigning records at runtime. So if you do a `resource.to_ex_record(%{b: + # "foo"})` it will fill in the missing fields correctly. This just adds + # defaults of `nil` because `Ash.Resource` is handling defaults, but we could + # extend this to handle other types of defaults as well. + defp build_field_assignments(field_names, is_compound_key, attributes, primary_key) do + if is_compound_key do + # First field is the compound key tuple + pk_tuple_assignment = + quote do + { + unquote_splicing( + Enum.with_index(primary_key, 1) + |> Enum.map(fn {pk, idx} -> + expand_value(pk, idx) + end) + ) + } + end + + # Non-PK field assignments + non_pk_fields = Enum.reject(attributes, &(&1.name in primary_key)) + + non_pk_assignments = + Enum.with_index(non_pk_fields, length(primary_key) + 1) + |> Enum.map(fn {attr, idx} -> + {attr.name, expand_value(attr.name, idx)} + end) + + [{:_pkey, pk_tuple_assignment} | non_pk_assignments] + else + Enum.with_index(field_names, 1) + |> Enum.map(fn {field, idx} -> + # {field, quote(do: Map.get(attrs, unquote(field), default))} + {field, expand_value(field, idx)} + end) + end + end + + defp expand_value(field, idx) do + quote do + if default == :position do + Map.get(attrs, unquote(field), :"$#{unquote(idx)}") + else + Map.get(attrs, unquote(field), default) + end + end + end + + defp inject_record_code(dsl_state, table, fields, attributes, primary_key) do + is_compound_key = length(primary_key) > 1 + field_names = Enum.map(attributes, & &1.name) + + field_assignments = + build_field_assignments(field_names, is_compound_key, attributes, primary_key) + + Transformer.eval( + dsl_state, + [ + table: table, + fields: fields, + field_names: field_names, + primary_key: primary_key, + is_compound_key: is_compound_key, + field_assignments: field_assignments + ], + quote do + require Record + + Record.defrecordp(unquote(table), unquote(fields)) + + # Store field defaults as module attribute + @is_compound_key unquote(is_compound_key) + @primary_key_fields unquote(primary_key) + + @doc """ + Returns the field names for the Mnesia record, suitable for use with + `Mnesia.create_table/2` as the `:attributes` option. + + For compound primary keys, returns `[:_pkey | non_pk_fields]`. + For simple primary keys, returns all attribute names. + + ## Examples + + iex> #{inspect(__MODULE__)}.mnesia_record_info() + [:id, :name, :age, :email] + """ + def mnesia_record_info do + if @is_compound_key do + non_pk_fields = Enum.reject(unquote(field_names), &(&1 in @primary_key_fields)) + + case [:_pkey | non_pk_fields] do + [:_pkey] -> [:_pkey, :_meta] + fields -> fields + end + else + unquote(field_names) + end + end + + @doc """ + Creates a new #{unquote(table)} record from a map or keyword list. + + ## Examples + + iex> #{inspect(__MODULE__)}.to_ex_record(id: 1, name: "Alice") + {#{inspect(unquote(table))}, 1, "Alice", ...} + + iex> #{inspect(__MODULE__)}.to_ex_record(%{id: 1, name: "Alice"}) + {#{inspect(unquote(table))}, 1, "Alice", ...} + """ + + def to_ex_record(attrs, default \\ nil) + + def to_ex_record(attrs, default) when is_list(attrs) do + to_ex_record(Map.new(attrs), default) + end + + def to_ex_record(attrs, default) when is_map(attrs) do + unquote(table)(unquote(field_assignments)) + end + + @doc """ + Converts a #{unquote(table)} record to a map. + + ## Examples + + iex> record = #{inspect(__MODULE__)}.to_ex_record(id: 1, name: "Alice") + iex> #{inspect(__MODULE__)}.record_to_map(record) + %{id: 1, name: "Alice", ...} + """ + def record_to_map(record) when is_tuple(record) do + # Skip the first element (record tag, i.e. table name) + [_tag | values] = Tuple.to_list(record) + + if @is_compound_key do + # First value is the compound key tuple + [pk_tuple | rest_values] = values + + pk_values = Tuple.to_list(pk_tuple) + + pk_map = + @primary_key_fields + |> Enum.zip(pk_values) + |> Enum.into(%{}) + + non_pk_fields = Enum.reject(unquote(field_names), &(&1 in @primary_key_fields)) + + non_pk_map = + non_pk_fields + |> Enum.zip(rest_values) + |> Enum.into(%{}) + + Map.merge(pk_map, non_pk_map) + else + # Simple key - same as before + field_names = unquote(field_names) + + field_names + |> Enum.zip(values) + |> Enum.into(%{}) + end + end + + @doc """ + Converts a #{unquote(table)} record to an Ash Resource struct. + + ## Examples + + iex> record = #{inspect(__MODULE__)}.to_ex_record(id: 1, name: "Alice") + iex> #{inspect(__MODULE__)}.from_ex_record(record) + %#{inspect(__MODULE__)}{id: 1, name: "Alice", ...} + """ + def from_ex_record(record) when is_tuple(record) do + record + |> record_to_map() + |> then(&struct(__MODULE__, &1)) + end + end + ) + end +end diff --git a/test/ash/data_layer/mnesia/matchspec_test.exs b/test/ash/data_layer/mnesia/matchspec_test.exs new file mode 100644 index 0000000000..7e09945291 --- /dev/null +++ b/test/ash/data_layer/mnesia/matchspec_test.exs @@ -0,0 +1,388 @@ +defmodule Ash.DataLayer.Mnesia.MatchSpecTest do + @moduledoc false + use ExUnit.Case, async: true + + alias Ash.DataLayer.Mnesia.MatchSpec + alias Ash.Query.BooleanExpression + alias Ash.Query.Not + alias Ash.Query.Ref + + alias Ash.Query.Operator.{ + Eq, + GreaterThan, + GreaterThanOrEqual, + In, + IsNil, + LessThan, + LessThanOrEqual, + NotEq + } + + # Helper to create a simple field reference + defp ref(name) do + %Ref{ + attribute: name, + relationship_path: [], + resource: nil + } + end + + # Sample field map for testing + defp field_map do + %{ + id: :"$1", + name: :"$2", + age: :"$3", + email: :"$4" + } + end + + describe "parse/2 - equality operators" do + test "parses Eq operator" do + expr = %Eq{left: ref(:name), right: "John"} + + assert {:ok, {:==, :"$2", "John"}} = MatchSpec.parse(expr, field_map()) + end + + test "parses NotEq operator" do + expr = %NotEq{left: ref(:age), right: 25} + + assert {:ok, {:"=/=", :"$3", 25}} = MatchSpec.parse(expr, field_map()) + end + + test "parses IsNil operator" do + expr = %IsNil{left: ref(:email)} + + assert {:ok, {:==, :"$4", nil}} = MatchSpec.parse(expr, field_map()) + end + end + + describe "parse/2 - comparison operators" do + test "parses GreaterThan operator" do + expr = %GreaterThan{left: ref(:age), right: 18} + + assert {:ok, {:>, :"$3", 18}} = MatchSpec.parse(expr, field_map()) + end + + test "parses GreaterThanOrEqual operator" do + expr = %GreaterThanOrEqual{left: ref(:age), right: 21} + + assert {:ok, {:>=, :"$3", 21}} = MatchSpec.parse(expr, field_map()) + end + + test "parses LessThan operator" do + expr = %LessThan{left: ref(:age), right: 65} + + assert {:ok, {:<, :"$3", 65}} = MatchSpec.parse(expr, field_map()) + end + + test "parses LessThanOrEqual operator" do + expr = %LessThanOrEqual{left: ref(:age), right: 100} + + assert {:ok, {:<=, :"$3", 100}} = MatchSpec.parse(expr, field_map()) + end + end + + describe "parse/2 - boolean expressions" do + test "parses AND expression" do + left = %Eq{left: ref(:name), right: "John"} + right = %GreaterThan{left: ref(:age), right: 18} + expr = %BooleanExpression{op: :and, left: left, right: right} + + assert {:ok, {:andalso, {:==, :"$2", "John"}, {:>, :"$3", 18}}} = + MatchSpec.parse(expr, field_map()) + end + + test "parses OR expression" do + left = %Eq{left: ref(:name), right: "John"} + right = %Eq{left: ref(:name), right: "Jane"} + expr = %BooleanExpression{op: :or, left: left, right: right} + + assert {:ok, {:orelse, {:==, :"$2", "John"}, {:==, :"$2", "Jane"}}} = + MatchSpec.parse(expr, field_map()) + end + + test "parses nested AND/OR expressions" do + # (name == "John" AND age > 18) OR (name == "Jane" AND age > 21) + left_and = %BooleanExpression{ + op: :and, + left: %Eq{left: ref(:name), right: "John"}, + right: %GreaterThan{left: ref(:age), right: 18} + } + + right_and = %BooleanExpression{ + op: :and, + left: %Eq{left: ref(:name), right: "Jane"}, + right: %GreaterThan{left: ref(:age), right: 21} + } + + expr = %BooleanExpression{op: :or, left: left_and, right: right_and} + + assert {:ok, + {:orelse, {:andalso, {:==, :"$2", "John"}, {:>, :"$3", 18}}, + {:andalso, {:==, :"$2", "Jane"}, {:>, :"$3", 21}}}} = + MatchSpec.parse(expr, field_map()) + end + + test "parses NOT expression" do + inner = %Eq{left: ref(:name), right: "John"} + expr = %Not{expression: inner} + + assert {:ok, {:not, {:==, :"$2", "John"}}} = MatchSpec.parse(expr, field_map()) + end + + test "parses complex NOT with AND" do + # NOT (name == "John" AND age > 18) + inner = %BooleanExpression{ + op: :and, + left: %Eq{left: ref(:name), right: "John"}, + right: %GreaterThan{left: ref(:age), right: 18} + } + + expr = %Not{expression: inner} + + assert {:ok, {:not, {:andalso, {:==, :"$2", "John"}, {:>, :"$3", 18}}}} = + MatchSpec.parse(expr, field_map()) + end + end + + describe "parse/2 - In operator" do + test "parses In with single value" do + expr = %In{left: ref(:name), right: ["John"]} + + assert {:ok, {:==, :"$2", "John"}} = MatchSpec.parse(expr, field_map()) + end + + test "parses In with multiple values" do + expr = %In{left: ref(:name), right: ["John", "Jane", "Bob"]} + + assert {:ok, + {:orelse, {:orelse, {:==, :"$2", "John"}, {:==, :"$2", "Jane"}}, + {:==, :"$2", "Bob"}}} = MatchSpec.parse(expr, field_map()) + end + + test "parses In with empty list" do + expr = %In{left: ref(:name), right: []} + + assert {:ok, false} = MatchSpec.parse(expr, field_map()) + end + + test "parses In with numeric values" do + expr = %In{left: ref(:age), right: [25, 30, 35]} + + assert {:ok, {:orelse, {:orelse, {:==, :"$3", 25}, {:==, :"$3", 30}}, {:==, :"$3", 35}}} = + MatchSpec.parse(expr, field_map()) + end + end + + describe "parse/2 - error handling" do + test "returns error for unknown field" do + expr = %Eq{left: ref(:unknown_field), right: "value"} + + assert {:error, "Unknown field: unknown_field"} = MatchSpec.parse(expr, field_map()) + end + + test "returns error for relationship path" do + ref_with_path = %Ref{ + attribute: :name, + relationship_path: [:profile], + resource: nil + } + + expr = %Eq{left: ref_with_path, right: "value"} + + assert {:error, "Relationship traversal not supported in Mnesia matchspecs"} = + MatchSpec.parse(expr, field_map()) + end + + test "returns error for unsupported expression" do + unsupported = %{__struct__: :UnsupportedOperator} + + assert {:error, error_msg} = MatchSpec.parse(unsupported, field_map()) + assert error_msg =~ "Unsupported filter expression" + end + + test "propagates error through AND expression" do + left = %Eq{left: ref(:unknown), right: "John"} + right = %GreaterThan{left: ref(:age), right: 18} + expr = %BooleanExpression{op: :and, left: left, right: right} + + assert {:error, "Unknown field: unknown"} = MatchSpec.parse(expr, field_map()) + end + + test "propagates error through OR expression" do + left = %Eq{left: ref(:name), right: "John"} + right = %Eq{left: ref(:unknown), right: "Jane"} + expr = %BooleanExpression{op: :or, left: left, right: right} + + assert {:error, "Unknown field: unknown"} = MatchSpec.parse(expr, field_map()) + end + + test "propagates error through NOT expression" do + inner = %Eq{left: ref(:unknown), right: "John"} + expr = %Not{expression: inner} + + assert {:error, "Unknown field: unknown"} = MatchSpec.parse(expr, field_map()) + end + + test "propagates error through In expression" do + expr = %In{left: ref(:unknown), right: ["John", "Jane"]} + + assert {:error, "Unknown field: unknown"} = MatchSpec.parse(expr, field_map()) + end + end + + describe "parse/2 - edge cases" do + test "parses comparison with nil value" do + expr = %Eq{left: ref(:email), right: nil} + + assert {:ok, {:==, :"$4", nil}} = MatchSpec.parse(expr, field_map()) + end + + test "parses comparison with boolean value" do + expr = %Eq{left: ref(:name), right: true} + + assert {:ok, {:==, :"$2", true}} = MatchSpec.parse(expr, field_map()) + end + + test "parses comparison with negative number" do + expr = %GreaterThan{left: ref(:age), right: -5} + + assert {:ok, {:>, :"$3", -5}} = MatchSpec.parse(expr, field_map()) + end + + test "parses comparison with float" do + expr = %LessThan{left: ref(:age), right: 25.5} + + assert {:ok, {:<, :"$3", 25.5}} = MatchSpec.parse(expr, field_map()) + end + + test "parses comparison with string containing special characters" do + expr = %Eq{left: ref(:email), right: "test@example.com"} + + assert {:ok, {:==, :"$4", "test@example.com"}} = MatchSpec.parse(expr, field_map()) + end + end + + describe "complex real-world scenarios" do + test "parses age range filter" do + # age >= 18 AND age <= 65 + expr = %BooleanExpression{ + op: :and, + left: %GreaterThanOrEqual{left: ref(:age), right: 18}, + right: %LessThanOrEqual{left: ref(:age), right: 65} + } + + assert {:ok, {:andalso, {:>=, :"$3", 18}, {:<=, :"$3", 65}}} = + MatchSpec.parse(expr, field_map()) + end + + test "parses name search with multiple options" do + # name == "John" OR name == "Jane" OR name == "Bob" + expr1 = %BooleanExpression{ + op: :or, + left: %Eq{left: ref(:name), right: "John"}, + right: %Eq{left: ref(:name), right: "Jane"} + } + + expr = %BooleanExpression{ + op: :or, + left: expr1, + right: %Eq{left: ref(:name), right: "Bob"} + } + + assert {:ok, + {:orelse, {:orelse, {:==, :"$2", "John"}, {:==, :"$2", "Jane"}}, + {:==, :"$2", "Bob"}}} = MatchSpec.parse(expr, field_map()) + end + + test "parses email verification filter" do + # email != nil AND email != "" + expr = %BooleanExpression{ + op: :and, + left: %Not{expression: %IsNil{left: ref(:email)}}, + right: %NotEq{left: ref(:email), right: ""} + } + + assert {:ok, {:andalso, {:not, {:==, :"$4", nil}}, {:"=/=", :"$4", ""}}} = + MatchSpec.parse(expr, field_map()) + end + + test "parses exclusion filter" do + # NOT (name IN ["banned1", "banned2"]) + inner = %In{left: ref(:name), right: ["banned1", "banned2"]} + expr = %Not{expression: inner} + + assert {:ok, {:not, {:orelse, {:==, :"$2", "banned1"}, {:==, :"$2", "banned2"}}}} = + MatchSpec.parse(expr, field_map()) + end + + test "parses active users filter" do + # (age >= 18 AND email != nil) OR name IN ["admin", "moderator"] + left_expr = %BooleanExpression{ + op: :and, + left: %GreaterThanOrEqual{left: ref(:age), right: 18}, + right: %Not{expression: %IsNil{left: ref(:email)}} + } + + right_expr = %In{left: ref(:name), right: ["admin", "moderator"]} + expr = %BooleanExpression{op: :or, left: left_expr, right: right_expr} + + assert {:ok, + {:orelse, {:andalso, {:>=, :"$3", 18}, {:not, {:==, :"$4", nil}}}, + {:orelse, {:==, :"$2", "admin"}, {:==, :"$2", "moderator"}}}} = + MatchSpec.parse(expr, field_map()) + end + end + + describe "guard structure validation" do + test "AND expressions create andalso tuples" do + expr = %BooleanExpression{ + op: :and, + left: %Eq{left: ref(:name), right: "test"}, + right: %Eq{left: ref(:age), right: 25} + } + + {:ok, result} = MatchSpec.parse(expr, field_map()) + assert elem(result, 0) == :andalso + assert tuple_size(result) == 3 + end + + test "OR expressions create orelse tuples" do + expr = %BooleanExpression{ + op: :or, + left: %Eq{left: ref(:name), right: "test"}, + right: %Eq{left: ref(:age), right: 25} + } + + {:ok, result} = MatchSpec.parse(expr, field_map()) + assert elem(result, 0) == :orelse + assert tuple_size(result) == 3 + end + + test "comparison operators use correct mnesia operators" do + comparisons = [ + {%Eq{left: ref(:age), right: 25}, :==}, + {%NotEq{left: ref(:age), right: 25}, :"=/="}, + {%GreaterThan{left: ref(:age), right: 25}, :>}, + {%GreaterThanOrEqual{left: ref(:age), right: 25}, :>=}, + {%LessThan{left: ref(:age), right: 25}, :<}, + {%LessThanOrEqual{left: ref(:age), right: 25}, :<=} + ] + + for {expr, expected_op} <- comparisons do + {:ok, result} = MatchSpec.parse(expr, field_map()) + assert elem(result, 0) == expected_op + assert tuple_size(result) == 3 + end + end + + test "matchspec variables are atoms with dollar prefix" do + expr = %Eq{left: ref(:name), right: "test"} + + {:ok, {_op, var, _value}} = MatchSpec.parse(expr, field_map()) + assert is_atom(var) + assert String.starts_with?(Atom.to_string(var), "$") + end + end +end diff --git a/test/ash/data_layer/mnesia_test.exs b/test/ash/data_layer/mnesia_test.exs index 6e80a71949..3fa2421f7b 100644 --- a/test/ash/data_layer/mnesia_test.exs +++ b/test/ash/data_layer/mnesia_test.exs @@ -1,7 +1,6 @@ defmodule Ash.DataLayer.MnesiaTest do use ExUnit.Case, async: false - require IEx alias Ash.DataLayer.Mnesia, as: MnesiaDataLayer alias Ash.Test.Domain, as: Domain @@ -23,7 +22,7 @@ defmodule Ash.DataLayer.MnesiaTest do end # Create fresh table for each test - :mnesia.create_table(@default_test_table, attributes: [:id, :val]) + :mnesia.create_table(@default_test_table, attributes: [:id, :name, :age, :title, :roles]) :ok end