improvement: dep updates & new aggregate types avg/min/max/custom

This commit is contained in:
Zach Daniel 2022-12-07 20:35:32 -05:00
parent 98a7485a32
commit 34c33c7247
19 changed files with 278 additions and 52 deletions

View file

@ -24,6 +24,8 @@ spark_locals_without_parens = [
authorize_if: 2,
authorize_unless: 1,
authorize_unless: 2,
avg: 3,
avg: 4,
base_filter: 1,
before_action?: 1,
belongs_to: 2,
@ -50,6 +52,7 @@ spark_locals_without_parens = [
create_timestamp: 2,
custom: 2,
custom: 3,
custom: 4,
debug: 1,
debug: 2,
default: 1,
@ -97,6 +100,7 @@ spark_locals_without_parens = [
has_one: 3,
identity: 2,
identity: 3,
implementation: 1,
input: 1,
integer_primary_key: 1,
integer_primary_key: 2,
@ -113,10 +117,14 @@ spark_locals_without_parens = [
map: 2,
map: 3,
match_other_defaults?: 1,
max: 3,
max: 4,
max_page_size: 1,
message: 1,
metadata: 2,
metadata: 3,
min: 3,
min: 4,
modify_query: 1,
module: 1,
name: 1,

View file

@ -164,6 +164,9 @@ defmodule Ash.Actions.Sort do
{field, {:ok, aggregate.type}}
%{name: name, kind: :custom, type: type} ->
{name, type}
%Ash.Resource.Aggregate{} = agg ->
attribute_type =
if agg.field do

View file

@ -43,6 +43,10 @@ defmodule Ash.Api.Info.Diagram do
|> Enum.sort()
end
defp aggregate_type(_resource, %{kind: :custom, type: type}) do
short_type(type)
end
defp aggregate_type(resource, aggregate) do
attribute_type =
if aggregate.field do

View file

@ -2213,7 +2213,10 @@ defmodule Ash.Filter do
aggregate_query,
aggregate.field,
aggregate.default,
aggregate.filterable?
aggregate.filterable?,
aggregate.type,
aggregate.constraints,
aggregate.implementation
) do
case parse_predicates(nested_statement, query_aggregate, context) do
{:ok, nested_statement} ->
@ -2704,7 +2707,10 @@ defmodule Ash.Filter do
aggregate_query,
aggregate.field,
aggregate.default,
aggregate.filterable?
aggregate.filterable?,
aggregate.type,
aggregate.constraints,
aggregate.implementation
) do
{:ok, %{ref | attribute: query_aggregate, resource: related}}
else

View file

@ -9,6 +9,8 @@ defmodule Ash.Query.Aggregate do
:field,
:kind,
:type,
:constraints,
:implementation,
:authorization_filter,
:load,
filterable?: true
@ -16,7 +18,7 @@ defmodule Ash.Query.Aggregate do
@type t :: %__MODULE__{}
@kinds [:count, :first, :sum, :list]
@kinds [:count, :first, :sum, :list, :max, :min, :avg, :sum, :custom]
@type kind :: unquote(Enum.reduce(@kinds, &{:|, [], [&1, &2]}))
alias Ash.Actions.Load
@ -28,7 +30,27 @@ defmodule Ash.Query.Aggregate do
@doc false
def kinds, do: @kinds
def new(resource, name, kind, relationship, query, field, default \\ nil, filterable? \\ true) do
def new(
resource,
name,
kind,
relationship,
query,
field,
default \\ nil,
filterable? \\ true,
type \\ nil,
constraints \\ [],
implementation \\ nil
) do
if kind == :custom && !type do
raise ArgumentError, "Must supply type when building a `custom` aggregate"
end
if kind == :custom && !implementation do
raise ArgumentError, "Must supply implementation when building a `custom` aggregate"
end
attribute_type =
if field do
related = Ash.Resource.Info.related(resource, relationship)
@ -36,14 +58,16 @@ defmodule Ash.Query.Aggregate do
end
with :ok <- validate_path(resource, List.wrap(relationship)),
{:ok, type} <- kind_to_type(kind, attribute_type),
{:ok, type} <- get_type(kind, type, attribute_type),
{:ok, query} <- validate_query(query) do
{:ok,
%__MODULE__{
name: name,
resource: resource,
constraints: constraints,
default_value: default || default_value(kind),
relationship_path: List.wrap(relationship),
implementation: implementation,
field: field,
kind: kind,
type: type,
@ -53,6 +77,12 @@ defmodule Ash.Query.Aggregate do
end
end
defp get_type(:custom, type, _), do: {:ok, type}
defp get_type(kind, _, attribute_type) do
kind_to_type(kind, attribute_type)
end
defp validate_path(_, []), do: :ok
defp validate_path(resource, [relationship | rest]) do
@ -93,7 +123,11 @@ defmodule Ash.Query.Aggregate do
def default_value(:count), do: 0
def default_value(:first), do: nil
def default_value(:sum), do: nil
def default_value(:max), do: nil
def default_value(:min), do: nil
def default_value(:avg), do: nil
def default_value(:list), do: []
def default_value(:custom), do: nil
defp validate_query(nil), do: {:ok, nil}
@ -114,9 +148,14 @@ defmodule Ash.Query.Aggregate do
end
@doc false
def kind_to_type({:custom, type}, _attribute_type), do: {:ok, type}
def kind_to_type(:count, _attribute_type), do: {:ok, Ash.Type.Integer}
def kind_to_type(kind, nil), do: {:error, "Must provide field type for #{kind}"}
def kind_to_type(kind, attribute_type) when kind in [:first, :sum], do: {:ok, attribute_type}
def kind_to_type(:avg, _attribute_type), do: {:ok, :float}
def kind_to_type(kind, attribute_type) when kind in [:first, :sum, :max, :min],
do: {:ok, attribute_type}
def kind_to_type(:list, attribute_type), do: {:ok, {:array, attribute_type}}
def kind_to_type(kind, _attribute_type), do: {:error, "Invalid aggregate kind: #{kind}"}

View file

@ -929,7 +929,10 @@ defmodule Ash.Query do
aggregate_query,
aggregate.field,
aggregate.default,
aggregate.filterable?
aggregate.filterable?,
aggregate.type,
aggregate.constraints,
aggregate.implementation
) do
query_aggregate = %{query_aggregate | load: field}
new_aggregates = Map.put(query.aggregates, aggregate.name, query_aggregate)
@ -1418,18 +1421,21 @@ defmodule Ash.Query do
def aggregate(
query,
name,
type,
kind,
relationship,
agg_query \\ nil,
default \\ nil,
filterable? \\ true
filterable? \\ true,
type \\ nil,
constraints \\ [],
implementation \\ nil
) do
{field, agg_query} = Keyword.pop(agg_query || [], :field)
query = to_query(query)
relationship = List.wrap(relationship)
if Ash.DataLayer.data_layer_can?(query.resource, {:aggregate, type}) do
if Ash.DataLayer.data_layer_can?(query.resource, {:aggregate, kind}) do
agg_query =
case agg_query do
[] ->
@ -1442,12 +1448,15 @@ defmodule Ash.Query do
case Aggregate.new(
query.resource,
name,
type,
kind,
relationship,
agg_query,
field,
default,
filterable?
filterable?,
type,
constraints,
implementation
) do
{:ok, aggregate} ->
new_aggregates = Map.put(query.aggregates, aggregate.name, aggregate)
@ -1461,7 +1470,7 @@ defmodule Ash.Query do
add_error(
query,
:aggregate,
AggregatesNotSupported.exception(resource: query.resource, feature: "using")
AggregatesNotSupported.exception(resource: query.resource, feature: "using #{kind}")
)
end
end

View file

@ -5,6 +5,9 @@ defmodule Ash.Resource.Aggregate do
:relationship_path,
:filter,
:kind,
:implementation,
:constraints,
:type,
:description,
:private?,
:field,
@ -27,7 +30,12 @@ defmodule Ash.Resource.Aggregate do
links: []
],
kind: [
type: {:in, [:count, :first, :sum, :list]},
type:
{:or,
[
{:in, [:count, :first, :sum, :list, :avg, :max, :min, :custom]},
{:tuple, [{:in, [:custom]}, Ash.OptionsHelpers.ash_type()]}
]},
doc: "The kind of the aggregate",
required: true,
links: []
@ -81,7 +89,7 @@ defmodule Ash.Resource.Aggregate do
relationship_path: list(atom()),
filter: Keyword.t(),
field: atom,
kind: :count | :first | :sum | :list,
kind: Ash.Query.Aggregate.kind(),
description: String.t() | nil,
private?: boolean,
default: term

View file

@ -0,0 +1,21 @@
defmodule Ash.Resource.Aggregate.CustomAggregate do
@moduledoc """
The root behavior for a custom aggregate.
See data layers for their implementation of custom aggregates.
"""
@type t :: {module(), Keyword.t()}
@callback describe(t()) :: String.t()
defmacro __using__(_) do
quote do
@behaviour Ash.Resource.Aggregate.CustomAggregate
def describe({module, opts}) do
inspect({module, opts})
end
defoverridable describe: 1
end
end
end

View file

@ -956,6 +956,56 @@ defmodule Ash.Resource.Dsl do
auto_set_fields: [kind: :first]
}
@max %Spark.Dsl.Entity{
name: :max,
links: [
guides: [
"ash:guide:Aggregates"
]
],
describe: """
Declares a named `max` aggregate on the resource
Supports `filter`, but not `sort` (because that wouldn't affect the max)
""",
examples: [
"""
max :first_assigned_ticket_subject, :assigned_tickets, :severity do
filter [active: true]
end
"""
],
target: Ash.Resource.Aggregate,
args: [:name, :relationship_path, :field],
schema: Ash.Resource.Aggregate.schema() |> Keyword.delete(:sort),
auto_set_fields: [kind: :max]
}
@min %Spark.Dsl.Entity{
name: :min,
links: [
guides: [
"ash:guide:Aggregates"
]
],
describe: """
Declares a named `min` aggregate on the resource
Supports `filter`, but not `sort` (because that wouldn't affect the min)
""",
examples: [
"""
min :first_assigned_ticket_subject, :assigned_tickets, :severity do
filter [active: true]
end
"""
],
target: Ash.Resource.Aggregate,
args: [:name, :relationship_path, :field],
schema: Ash.Resource.Aggregate.schema() |> Keyword.delete(:sort),
auto_set_fields: [kind: :min]
}
@sum %Spark.Dsl.Entity{
name: :sum,
links: [
@ -981,6 +1031,71 @@ defmodule Ash.Resource.Dsl do
auto_set_fields: [kind: :sum]
}
@avg %Spark.Dsl.Entity{
name: :avg,
links: [
guides: [
"ash:guide:Aggregates"
]
],
describe: """
Declares a named `avg` aggregate on the resource
Supports `filter`, but not `sort` (because that wouldn't affect the avg)
""",
examples: [
"""
avg :assigned_ticket_price_sum, :assigned_tickets, :price do
filter [active: true]
end
"""
],
target: Ash.Resource.Aggregate,
args: [:name, :relationship_path, :field],
schema: Keyword.delete(Ash.Resource.Aggregate.schema(), :sort),
auto_set_fields: [kind: :avg]
}
@custom %Spark.Dsl.Entity{
name: :custom,
links: [
guides: [
"ash:guide:Aggregates"
]
],
describe: """
Declares a named `custom` aggregate on the resource
Supports `filter` and `sort`.
Custom aggregates provide an `implementation` which must implement data layer specific callbacks.
See the relevant data layer documentation for more.
""",
examples: [
"""
custom :author_names, :authors, :string do
implementation {StringAgg, delimiter: ","}
end
"""
],
target: Ash.Resource.Aggregate,
args: [:name, :relationship_path, :type],
schema:
Ash.Resource.Aggregate.schema()
|> Keyword.put(:type,
type: :module,
required: true,
doc: "The type of the value returned by the aggregate"
)
|> Keyword.put(:implementation,
type: {:spark_behaviour, Ash.Resource.Aggregate.CustomAggregate},
required: true,
doc: "The module that implements the relevant data layer callbacks"
),
auto_set_fields: [kind: :custom]
}
@list %Spark.Dsl.Entity{
name: :list,
links: [
@ -1034,7 +1149,11 @@ defmodule Ash.Resource.Dsl do
@count,
@first,
@sum,
@list
@list,
@max,
@min,
@avg,
@custom
]
}

View file

@ -69,7 +69,11 @@ defmodule Ash.Schema do
for aggregate <- Ash.Resource.Info.aggregates(__MODULE__),
aggregate.name not in Ash.Resource.reserved_names() do
{:ok, type} = Aggregate.kind_to_type(aggregate.kind, :string)
if aggregate.kind == :custom do
{:ok, aggregate.type}
else
Aggregate.kind_to_type(aggregate.kind, :string)
end
field(aggregate.name, Ash.Type.ecto_type(type), virtual: true)
@ -161,7 +165,12 @@ defmodule Ash.Schema do
for aggregate <- Ash.Resource.Info.aggregates(__MODULE__),
aggregate.name not in Ash.Resource.reserved_names() do
{:ok, type} = Aggregate.kind_to_type(aggregate.kind, :string)
{:ok, type} =
if aggregate.kind == :custom do
{:ok, aggregate.type}
else
Aggregate.kind_to_type(aggregate.kind, :string)
end
field(aggregate.name, Ash.Type.ecto_type(type), virtual: true)

View file

@ -40,7 +40,7 @@ defmodule Ash.Test.Dsl.Resource.Actions.CreateTest do
test "it fails if `name` is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Dsl.Resource.Actions.CreateTest.Post]\n actions -> create -> default:\n expected :name to be an atom, got: \"default\"",
"[Ash.Test.Dsl.Resource.Actions.CreateTest.Post]\n actions -> create -> default:\n invalid value for :name option: expected atom, got: \"default\"",
fn ->
defposts do
actions do
@ -54,7 +54,7 @@ defmodule Ash.Test.Dsl.Resource.Actions.CreateTest do
test "it fails if `primary?` is not a boolean" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Dsl.Resource.Actions.CreateTest.Post]\n actions -> create -> create:\n expected :primary? to be a boolean, got: 10",
"[Ash.Test.Dsl.Resource.Actions.CreateTest.Post]\n actions -> create -> create:\n invalid value for :primary? option: expected boolean, got: 10",
fn ->
defposts do
actions do

View file

@ -40,7 +40,7 @@ defmodule Ash.Test.Dsl.Resource.Actions.DestroyTest do
test "it fails if `name` is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Dsl.Resource.Actions.DestroyTest.Post]\n actions -> destroy -> default:\n expected :name to be an atom, got: \"default\"",
"[Ash.Test.Dsl.Resource.Actions.DestroyTest.Post]\n actions -> destroy -> default:\n invalid value for :name option: expected atom, got: \"default\"",
fn ->
defposts do
actions do
@ -54,7 +54,7 @@ defmodule Ash.Test.Dsl.Resource.Actions.DestroyTest do
test "it fails if `primary?` is not a boolean" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Dsl.Resource.Actions.DestroyTest.Post]\n actions -> destroy -> destroy:\n expected :primary? to be a boolean, got: 10",
"[Ash.Test.Dsl.Resource.Actions.DestroyTest.Post]\n actions -> destroy -> destroy:\n invalid value for :primary? option: expected boolean, got: 10",
fn ->
defposts do
actions do

View file

@ -40,7 +40,7 @@ defmodule Ash.Test.Dsl.Resource.Actions.ReadTest do
test "it fails if `name` is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Dsl.Resource.Actions.ReadTest.Post]\n actions -> read -> default:\n expected :name to be an atom, got: \"default\"",
"[Ash.Test.Dsl.Resource.Actions.ReadTest.Post]\n actions -> read -> default:\n invalid value for :name option: expected atom, got: \"default\"",
fn ->
defposts do
actions do
@ -54,7 +54,7 @@ defmodule Ash.Test.Dsl.Resource.Actions.ReadTest do
test "it fails if `primary?` is not a boolean" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Dsl.Resource.Actions.ReadTest.Post]\n actions -> read -> read:\n expected :primary? to be a boolean, got: 10",
"[Ash.Test.Dsl.Resource.Actions.ReadTest.Post]\n actions -> read -> read:\n invalid value for :primary? option: expected boolean, got: 10",
fn ->
defposts do
actions do

View file

@ -38,7 +38,7 @@ defmodule Ash.Test.Dsl.Resource.Actions.UpdateTest do
test "it fails if `name` is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Dsl.Resource.Actions.UpdateTest.Post]\n actions -> update -> default:\n expected :name to be an atom, got: \"default\"",
"[Ash.Test.Dsl.Resource.Actions.UpdateTest.Post]\n actions -> update -> default:\n invalid value for :name option: expected atom, got: \"default\"",
fn ->
defposts do
actions do
@ -52,7 +52,7 @@ defmodule Ash.Test.Dsl.Resource.Actions.UpdateTest do
test "it fails if `primary?` is not a boolean" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Dsl.Resource.Actions.UpdateTest.Post]\n actions -> update -> update:\n expected :primary? to be a boolean, got: 10",
"[Ash.Test.Dsl.Resource.Actions.UpdateTest.Post]\n actions -> update -> update:\n invalid value for :primary? option: expected boolean, got: 10",
fn ->
defposts do
actions do

View file

@ -53,7 +53,7 @@ defmodule Ash.Test.Resource.AttributesTest do
test "raises if the attribute name is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.AttributesTest.Post]\n attributes -> attribute -> 10:\n expected :name to be an atom, got: 10",
"[Ash.Test.Resource.AttributesTest.Post]\n attributes -> attribute -> 10:\n invalid value for :name option: expected atom, got: 10",
fn ->
defposts do
attributes do
@ -67,7 +67,7 @@ defmodule Ash.Test.Resource.AttributesTest do
test "raises if you pass an invalid value for `primary_key?`" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.AttributesTest.Post]\n attributes -> attribute -> foo:\n expected :primary_key? to be a boolean, got: 10",
"[Ash.Test.Resource.AttributesTest.Post]\n attributes -> attribute -> foo:\n invalid value for :primary_key? option: expected boolean, got: 10",
fn ->
defposts do
attributes do
@ -81,7 +81,7 @@ defmodule Ash.Test.Resource.AttributesTest do
test "raises if you pass an invalid value for `private?`" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.AttributesTest.Post]\n attributes -> attribute -> foo:\n expected :private? to be a boolean, got: \"an_invalid_value\"",
"[Ash.Test.Resource.AttributesTest.Post]\n attributes -> attribute -> foo:\n invalid value for :private? option: expected boolean, got: \"an_invalid_value\"",
fn ->
defposts do
attributes do

View file

@ -138,7 +138,7 @@ defmodule Ash.Test.Resource.Relationships.BelongsToTest do
test "fails if destination_attribute is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n expected :destination_attribute to be an atom, got: \"foo\"",
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n invalid value for :destination_attribute option: expected atom, got: \"foo\"",
fn ->
defposts do
relationships do
@ -152,7 +152,7 @@ defmodule Ash.Test.Resource.Relationships.BelongsToTest do
test "fails if source_attribute is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n expected :source_attribute to be an atom, got: \"foo\"",
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n invalid value for :source_attribute option: expected atom, got: \"foo\"",
fn ->
defposts do
relationships do
@ -166,7 +166,7 @@ defmodule Ash.Test.Resource.Relationships.BelongsToTest do
test "fails if the destination is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n expected :destination to be an atom, got: \"foobar\"",
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n invalid value for :destination option: expected atom, got: \"foobar\"",
fn ->
defposts do
relationships do
@ -180,7 +180,7 @@ defmodule Ash.Test.Resource.Relationships.BelongsToTest do
test "fails if the relationship name is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n expected :name to be an atom, got: \"foobar\"",
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n invalid value for :name option: expected atom, got: \"foobar\"",
fn ->
defposts do
relationships do
@ -194,7 +194,7 @@ defmodule Ash.Test.Resource.Relationships.BelongsToTest do
test "fails if `primary_key?` is not a boolean" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n expected :primary_key? to be a boolean, got: \"blah\"",
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n invalid value for :primary_key? option: expected boolean, got: \"blah\"",
fn ->
defposts do
relationships do
@ -208,7 +208,7 @@ defmodule Ash.Test.Resource.Relationships.BelongsToTest do
test "fails if `private?` is not a boolean" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n expected :private? to be a boolean, got: \"blah\"",
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n invalid value for :private? option: expected boolean, got: \"blah\"",
fn ->
defposts do
relationships do
@ -223,7 +223,7 @@ defmodule Ash.Test.Resource.Relationships.BelongsToTest do
test "fails if `define_attribute?` is not a boolean" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n expected :define_attribute? to be a boolean, got: \"blah\"",
"[Ash.Test.Resource.Relationships.BelongsToTest.Post]\n relationships -> belongs_to -> foobar:\n invalid value for :define_attribute? option: expected boolean, got: \"blah\"",
fn ->
defposts do
relationships do

View file

@ -63,7 +63,7 @@ defmodule Ash.Test.Resource.Relationships.HasManyTest do
test "fails if destination_attribute is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.HasManyTest.Post]\n relationships -> has_many -> foobar:\n expected :destination_attribute to be an atom, got: \"foo\"",
"[Ash.Test.Resource.Relationships.HasManyTest.Post]\n relationships -> has_many -> foobar:\n invalid value for :destination_attribute option: expected atom, got: \"foo\"",
fn ->
defposts do
relationships do
@ -77,7 +77,7 @@ defmodule Ash.Test.Resource.Relationships.HasManyTest do
test "fails if source_attribute is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.HasManyTest.Post]\n relationships -> has_many -> foobar:\n expected :source_attribute to be an atom, got: \"foo\"",
"[Ash.Test.Resource.Relationships.HasManyTest.Post]\n relationships -> has_many -> foobar:\n invalid value for :source_attribute option: expected atom, got: \"foo\"",
fn ->
defposts do
relationships do
@ -91,7 +91,7 @@ defmodule Ash.Test.Resource.Relationships.HasManyTest do
test "fails if the destination is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.HasManyTest.Post]\n relationships -> has_many -> foobar:\n expected :destination to be an atom, got: \"foobar\"",
"[Ash.Test.Resource.Relationships.HasManyTest.Post]\n relationships -> has_many -> foobar:\n invalid value for :destination option: expected atom, got: \"foobar\"",
fn ->
defposts do
relationships do
@ -105,7 +105,7 @@ defmodule Ash.Test.Resource.Relationships.HasManyTest do
test "fails if the relationship name is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.HasManyTest.Post]\n relationships -> has_many -> foobar:\n expected :name to be an atom, got: \"foobar\"",
"[Ash.Test.Resource.Relationships.HasManyTest.Post]\n relationships -> has_many -> foobar:\n invalid value for :name option: expected atom, got: \"foobar\"",
fn ->
defposts do
relationships do
@ -119,7 +119,7 @@ defmodule Ash.Test.Resource.Relationships.HasManyTest do
test "fails if private? is not an boolean" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.HasManyTest.Post]\n relationships -> has_many -> foobar:\n expected :private? to be a boolean, got: \"foo\"",
"[Ash.Test.Resource.Relationships.HasManyTest.Post]\n relationships -> has_many -> foobar:\n invalid value for :private? option: expected boolean, got: \"foo\"",
fn ->
defposts do
relationships do

View file

@ -63,7 +63,7 @@ defmodule Ash.Test.Resource.Relationships.HasOneTest do
test "fails if destination_attribute is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.HasOneTest.Post]\n relationships -> has_one -> foobar:\n expected :destination_attribute to be an atom, got: \"foo\"",
"[Ash.Test.Resource.Relationships.HasOneTest.Post]\n relationships -> has_one -> foobar:\n invalid value for :destination_attribute option: expected atom, got: \"foo\"",
fn ->
defposts do
relationships do
@ -77,7 +77,7 @@ defmodule Ash.Test.Resource.Relationships.HasOneTest do
test "fails if source_attribute is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.HasOneTest.Post]\n relationships -> has_one -> foobar:\n expected :source_attribute to be an atom, got: \"foo\"",
"[Ash.Test.Resource.Relationships.HasOneTest.Post]\n relationships -> has_one -> foobar:\n invalid value for :source_attribute option: expected atom, got: \"foo\"",
fn ->
defposts do
relationships do
@ -91,7 +91,7 @@ defmodule Ash.Test.Resource.Relationships.HasOneTest do
test "fails if the destination is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.HasOneTest.Post]\n relationships -> has_one -> foobar:\n expected :destination to be an atom, got: \"foobar\"",
"[Ash.Test.Resource.Relationships.HasOneTest.Post]\n relationships -> has_one -> foobar:\n invalid value for :destination option: expected atom, got: \"foobar\"",
fn ->
defposts do
relationships do
@ -105,7 +105,7 @@ defmodule Ash.Test.Resource.Relationships.HasOneTest do
test "fails if the relationship name is not an atom" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.HasOneTest.Post]\n relationships -> has_one -> foobar:\n expected :name to be an atom, got: \"foobar\"",
"[Ash.Test.Resource.Relationships.HasOneTest.Post]\n relationships -> has_one -> foobar:\n invalid value for :name option: expected atom, got: \"foobar\"",
fn ->
defposts do
relationships do
@ -119,7 +119,7 @@ defmodule Ash.Test.Resource.Relationships.HasOneTest do
test "fails if private? is not an boolean" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.HasOneTest.Post]\n relationships -> has_one -> foobar:\n expected :private? to be a boolean, got: \"foo\"",
"[Ash.Test.Resource.Relationships.HasOneTest.Post]\n relationships -> has_one -> foobar:\n invalid value for :private? option: expected boolean, got: \"foo\"",
fn ->
defposts do
relationships do

View file

@ -102,7 +102,7 @@ defmodule Ash.Test.Resource.Relationships.ManyToManyTest do
test "it fails if you pass a string to `through`" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.ManyToManyTest.Post]\n relationships -> many_to_many -> foobars:\n expected :through to be an atom, got: \"some_table\"",
"[Ash.Test.Resource.Relationships.ManyToManyTest.Post]\n relationships -> many_to_many -> foobars:\n invalid value for :through option: expected atom, got: \"some_table\"",
fn ->
defposts do
relationships do
@ -130,7 +130,7 @@ defmodule Ash.Test.Resource.Relationships.ManyToManyTest do
test "it fails if you dont pass an atom for `source_attribute_on_join_resource`" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.ManyToManyTest.Post]\n relationships -> many_to_many -> foobars:\n expected :source_attribute_on_join_resource to be an atom, got: \"what\"",
"[Ash.Test.Resource.Relationships.ManyToManyTest.Post]\n relationships -> many_to_many -> foobars:\n invalid value for :source_attribute_on_join_resource option: expected atom, got: \"what\"",
fn ->
defposts do
relationships do
@ -147,7 +147,7 @@ defmodule Ash.Test.Resource.Relationships.ManyToManyTest do
test "it fails if you dont pass an atom for `destination_attribute_on_join_resource`" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.ManyToManyTest.Post]\n relationships -> many_to_many -> foobars:\n expected :destination_attribute_on_join_resource to be an atom, got: \"what\"",
"[Ash.Test.Resource.Relationships.ManyToManyTest.Post]\n relationships -> many_to_many -> foobars:\n invalid value for :destination_attribute_on_join_resource option: expected atom, got: \"what\"",
fn ->
defposts do
relationships do
@ -164,7 +164,7 @@ defmodule Ash.Test.Resource.Relationships.ManyToManyTest do
test "it fails if you dont pass an atom for `source_attribute`" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.ManyToManyTest.Post]\n relationships -> many_to_many -> foobars:\n expected :source_attribute to be an atom, got: \"what\"",
"[Ash.Test.Resource.Relationships.ManyToManyTest.Post]\n relationships -> many_to_many -> foobars:\n invalid value for :source_attribute option: expected atom, got: \"what\"",
fn ->
defposts do
relationships do
@ -182,7 +182,7 @@ defmodule Ash.Test.Resource.Relationships.ManyToManyTest do
test "it fails if you dont pass an atom for `destination_attribute`" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.ManyToManyTest.Post]\n relationships -> many_to_many -> foobars:\n expected :destination_attribute to be an atom, got: \"what\"",
"[Ash.Test.Resource.Relationships.ManyToManyTest.Post]\n relationships -> many_to_many -> foobars:\n invalid value for :destination_attribute option: expected atom, got: \"what\"",
fn ->
defposts do
relationships do
@ -200,7 +200,7 @@ defmodule Ash.Test.Resource.Relationships.ManyToManyTest do
test "fails if private? is not an boolean" do
assert_raise(
Spark.Error.DslError,
"[Ash.Test.Resource.Relationships.ManyToManyTest.Post]\n relationships -> many_to_many -> foobars:\n expected :private? to be a boolean, got: \"an_invalid_field\"",
"[Ash.Test.Resource.Relationships.ManyToManyTest.Post]\n relationships -> many_to_many -> foobars:\n invalid value for :private? option: expected boolean, got: \"an_invalid_field\"",
fn ->
defposts do
relationships do