improvement: support uniq? for count/list attributes

This commit is contained in:
Zach Daniel 2023-02-09 15:40:35 -05:00
parent d1cc4db5d4
commit e4004de15e
4 changed files with 80 additions and 37 deletions

View file

@ -191,6 +191,7 @@ spark_locals_without_parens = [
transaction: 3,
transaction?: 1,
type: 1,
uniq?: 1,
update: 1,
update: 2,
update: 3,

View file

@ -285,14 +285,15 @@ defmodule Ash.DataLayer.Ets do
case run_query(query, resource) do
{:ok, results} ->
Enum.reduce_while(aggregates, {:ok, %{}}, fn
%{kind: kind, name: name, query: query, field: field, resource: resource}, {:ok, acc} ->
%{kind: kind, name: name, query: query, field: field, resource: resource, uniq?: uniq?},
{:ok, acc} ->
results
|> filter_matches(Map.get(query || %{}, :filter), api)
|> case do
{:ok, matches} ->
field = field || Enum.at(Ash.Resource.Info.primary_key(resource), 0)
value = aggregate_value(matches, kind, field)
value = aggregate_value(matches, kind, field, uniq?)
{:cont, {:ok, Map.put(acc, name, value)}}
{:error, error} ->
@ -421,7 +422,8 @@ defmodule Ash.DataLayer.Ets do
relationship_path: relationship_path,
query: query,
name: name,
load: load
load: load,
uniq?: uniq?
},
{:ok, record} ->
with {:ok, loaded_record} <-
@ -431,7 +433,7 @@ defmodule Ash.DataLayer.Ets do
filter_matches(related, query.filter, api) do
field = field || Enum.at(Ash.Resource.Info.primary_key(query.resource), 0)
value = aggregate_value(filtered, kind, field)
value = aggregate_value(filtered, kind, field, uniq?)
{:cont, {:ok, Map.put(record, load || name, value)}}
else
@ -465,49 +467,56 @@ defmodule Ash.DataLayer.Ets do
[{key, relationship_path_to_load(rest, leaf)}]
end
defp aggregate_value(records, kind, field) do
defp aggregate_value(records, kind, field, uniq?) do
case kind do
:count ->
Enum.count(records, &(not is_nil(Map.get(&1, field))))
:sum ->
if uniq? do
records
|> Enum.map(&Map.get(&1, field))
|> case do
[] ->
nil
items ->
Enum.sum(items)
|> Stream.map(&Map.get(&1, field))
|> Stream.uniq()
|> Stream.reject(&is_nil/1)
|> Enum.count()
else
Enum.count(records, &(not is_nil(Map.get(&1, field))))
end
:first ->
Enum.find_value(records, fn record ->
case Map.get(record, field) do
nil ->
case records do
[] ->
nil
value ->
{:ok, value}
end
end)
|> case do
nil ->
nil
{:ok, value} ->
value
[record | _rest] ->
Map.get(record, field)
end
:list ->
Enum.map(records, fn record ->
records
|> Enum.map(fn record ->
Map.get(record, field)
end)
|> then(fn values ->
if uniq? do
Enum.uniq(values)
else
values
end
end)
:avg ->
records
|> Enum.reduce({0, 0}, fn record, {sum, count} ->
case Map.get(record, field) do
|> then(fn records ->
if uniq? do
records
|> Stream.map(&Map.get(&1, field))
|> Stream.uniq()
else
records
|> Stream.map(&Map.get(&1, field))
|> Stream.uniq()
end
end)
|> Enum.reduce({0, 0}, fn value, {sum, count} ->
case value do
nil ->
{sum, count}
@ -531,6 +540,13 @@ defmodule Ash.DataLayer.Ets do
nil
items ->
items =
if uniq? do
items |> Stream.uniq() |> Stream.reject(&is_nil/1)
else
items |> Stream.reject(&is_nil/1)
end
case kind do
:sum ->
Enum.sum(items)

View file

@ -12,6 +12,7 @@ defmodule Ash.Query.Aggregate do
:constraints,
:implementation,
:load,
uniq?: false,
filterable?: true
]
@ -51,6 +52,7 @@ defmodule Ash.Query.Aggregate do
- `type`: A type for the aggregate
- `constraints`: Type constraints for the aggregate's type
- `implementation`: The implementation module for custom aggregates
- `uniq?`: Wether or not the aggregate should be over unique values
"""
def new(resource, name, kind, opts \\ []) do
new(
@ -64,10 +66,12 @@ defmodule Ash.Query.Aggregate do
opts[:filterable?],
opts[:type],
opts[:constraints],
opts[:implementation]
opts[:implementation],
opts[:uniq?]
)
end
@deprecated "Use `new/4` instead."
def new(
resource,
name,
@ -79,7 +83,8 @@ defmodule Ash.Query.Aggregate do
filterable? \\ true,
type \\ nil,
constraints \\ [],
implementation \\ nil
implementation \\ nil,
uniq? \\ false
) do
if kind == :custom && !type do
raise ArgumentError, "Must supply type when building a `custom` aggregate"
@ -111,7 +116,8 @@ defmodule Ash.Query.Aggregate do
default
end
with {:ok, attribute_type} <- attribute_type,
with :ok <- validate_uniq(uniq?, kind),
{:ok, attribute_type} <- attribute_type,
:ok <- validate_path(resource, List.wrap(relationship)),
{:ok, type} <- get_type(kind, type, attribute_type),
{:ok, query} <- validate_query(query) do
@ -126,12 +132,22 @@ defmodule Ash.Query.Aggregate do
field: field,
kind: kind,
type: type,
uniq?: uniq?,
query: query,
filterable?: filterable?
}}
end
end
defp validate_uniq(true, kind) when kind in [:count, :list], do: :ok
defp validate_uniq(true, kind),
do:
{:error,
"#{kind} aggregates do not support the `uniq?` option. Only count and list are supported currently."}
defp validate_uniq(_, _), do: :ok
defp get_type(:custom, type, _), do: {:ok, type}
defp get_type(kind, _, attribute_type) do

View file

@ -855,7 +855,12 @@ defmodule Ash.Resource.Dsl do
],
target: Ash.Resource.Aggregate,
args: [:name, :relationship_path],
schema: Keyword.delete(Ash.Resource.Aggregate.schema(), :sort),
schema:
Keyword.put(Keyword.delete(Ash.Resource.Aggregate.schema(), :sort), :uniq?,
type: :boolean,
doc: "Wether or not to count unique values only",
default: false
),
auto_set_fields: [kind: :count]
}
@ -1025,7 +1030,12 @@ defmodule Ash.Resource.Dsl do
],
target: Ash.Resource.Aggregate,
args: [:name, :relationship_path, :field],
schema: Ash.Resource.Aggregate.schema(),
schema:
Keyword.put(Ash.Resource.Aggregate.schema(), :uniq?,
type: :boolean,
doc: "Wether or not to count unique values only",
default: false
),
auto_set_fields: [kind: :list]
}