refactor: Move opt schema definitions for built-in changes and validations to their specific modules (#1036)

* refactor: Move Spark opts definitions for built-in changes to the relevant change modules

This allows the change modules to be read and grokked standalone, without
needing to refer back to the `Builtins` module just for the opts

* refactor: Move Spark opts definitions for built-in validations to the relevant validation modules

This allows the change modules to be read and grokked standalone, without
needing to refer back to the `Builtins` module just for the opts
This commit is contained in:
Rebecca Le 2024-04-19 22:43:35 +08:00 committed by GitHub
parent 94f319a5f4
commit 7c75e5c102
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 127 additions and 148 deletions

View file

@ -5,63 +5,6 @@ defmodule Ash.Resource.Change.Builtins do
The functions in this module are imported by default in the actions section.
"""
@relate_actor_opts [
relationship: [
doc: "The relationship to set the actor to.",
required: true,
type: :atom
],
allow_nil?: [
doc: "Whether or not to allow the actor to be nil, in which case nothing will happen.",
type: :boolean,
default: false
],
field: [
doc: "The field of the actor to set the relationship to",
type: :atom
]
]
@set_attribute_opts [
attribute: [
doc: "The attribute to change.",
required: true,
type: :atom
],
value: [
doc:
"The value to set the attribute to; may be a fn/0 which will be called to produce the value.",
required: true,
type: {:custom, Ash.Resource.Change.SetAttribute, :validate_value, []}
],
set_when_nil?: [
doc: "When false, decline setting the attribute if it is nil.",
type: :boolean,
default: true
],
new?: [
doc:
"When true, sets the attribute to the value provided if the attribute is not already being changed.",
type: :boolean,
default: false
]
]
@doc """
Relates the actor to the data being changed, as the provided relationship.
## Options
#{Spark.Options.docs(@relate_actor_opts)}
## Examples
change relate_actor(:owner, allow_nil?: true)
"""
def relate_actor_opts do
@relate_actor_opts
end
@doc """
Applies a filter to the changeset. Has no effect for create actions.
@ -72,6 +15,17 @@ defmodule Ash.Resource.Change.Builtins do
{Ash.Resource.Change.Filter, filter: filter}
end
@doc """
Relates the actor to the data being changed, as the provided relationship.
## Options
#{Spark.Options.docs(Ash.Resource.Change.RelateActor.opt_schema())}
## Examples
change relate_actor(:owner, allow_nil?: true)
"""
@spec relate_actor(relationship :: atom, opts :: Keyword.t()) :: Ash.Resource.Change.ref()
def relate_actor(relationship, opts \\ []) do
opts =
@ -136,11 +90,6 @@ defmodule Ash.Resource.Change.Builtins do
{Ash.Resource.Change.GetAndLock, [lock: lock]}
end
@doc false
def set_attribute_opts do
@set_attribute_opts
end
@doc """
Updates an existing attribute change by applying a function to it.
@ -186,7 +135,7 @@ defmodule Ash.Resource.Change.Builtins do
## Options
#{Spark.Options.docs(Keyword.drop(@set_attribute_opts, [:attribute, :value]))}
#{Spark.Options.docs(Keyword.drop(Ash.Resource.Change.SetAttribute.opt_schema(), [:attribute, :value]))}
## Examples
@ -196,7 +145,7 @@ defmodule Ash.Resource.Change.Builtins do
change set_attribute(:encrypted_data, arg(:data), set_when_nil?: false)
"""
@spec set_attribute(
relationship :: atom,
attribute :: atom,
(-> term) | {:_arg, :status} | term(),
opts :: Keyword.t()
) ::

View file

@ -4,9 +4,28 @@ defmodule Ash.Resource.Change.RelateActor do
alias Ash.Changeset
alias Ash.Error.Changes.InvalidRelationship
@opt_schema [
relationship: [
doc: "The relationship to set the actor to.",
required: true,
type: :atom
],
allow_nil?: [
doc: "Whether or not to allow the actor to be nil, in which case nothing will happen.",
type: :boolean,
default: false
],
field: [
doc: "The field of the actor to set the relationship to",
type: :atom
]
]
def opt_schema, do: @opt_schema
@impl true
def init(opts) do
case Spark.Options.validate(opts, Ash.Resource.Change.Builtins.relate_actor_opts()) do
case Spark.Options.validate(opts, opt_schema()) do
{:ok, opts} ->
{:ok, opts}

View file

@ -3,9 +3,36 @@ defmodule Ash.Resource.Change.SetAttribute do
use Ash.Resource.Change
alias Ash.Changeset
@opt_schema [
attribute: [
doc: "The attribute to change.",
required: true,
type: :atom
],
value: [
doc:
"The value to set the attribute to; may be a fn/0 which will be called to produce the value.",
required: true,
type: {:custom, __MODULE__, :validate_value, []}
],
set_when_nil?: [
doc: "When false, decline setting the attribute if it is nil.",
type: :boolean,
default: true
],
new?: [
doc:
"When true, sets the attribute to the value provided if the attribute is not already being changed.",
type: :boolean,
default: false
]
]
def opt_schema, do: @opt_schema
@impl true
def init(opts) do
case Spark.Options.validate(opts, Ash.Resource.Change.Builtins.set_attribute_opts()) do
case Spark.Options.validate(opts, opt_schema()) do
{:ok, opts} ->
{:ok, opts}

View file

@ -115,30 +115,12 @@ defmodule Ash.Resource.Validation.Builtins do
{Validation.AttributeIn, attribute: attribute, list: list}
end
@string_length_opts [
min: [
type: :non_neg_integer,
doc: "String must be this length at least"
],
max: [
type: :non_neg_integer,
doc: "String must be this length at most"
],
exact: [
type: :non_neg_integer,
doc: "String must be this length exactly"
]
]
@doc false
def string_length_opts, do: @string_length_opts
@doc """
Validates that an attribute on the original record meets the given length criteria
## Options
#{Spark.Options.docs(@string_length_opts)}
#{Spark.Options.docs(Ash.Resource.Validation.StringLength.opt_schema())}
## Examples
@ -151,41 +133,13 @@ defmodule Ash.Resource.Validation.Builtins do
{Validation.StringLength, Keyword.merge(opts, attribute: attribute)}
end
@compare_opts [
greater_than: [
type: {:or, [:integer, :atom, :float, {:struct, Decimal}, {:fun, 0}]},
required: false,
doc: "The value that the attribute should be greater than."
],
greater_than_or_equal_to: [
type: {:or, [:integer, :atom, :float, {:struct, Decimal}, {:fun, 0}]},
required: false,
doc: "The value that the attribute should be greater than or equal to"
],
less_than: [
type: {:or, [:integer, :atom, :float, {:struct, Decimal}, {:fun, 0}]},
required: false,
doc: "The value that the attribute should be less than"
],
less_than_or_equal_to: [
type: {:or, [:integer, :atom, :float, {:struct, Decimal}, {:fun, 0}]},
required: false,
doc: "The value that the attribute should be less than or equal to"
]
]
@doc false
def compare_opts do
@compare_opts
end
@numericality_docs """
Validates that an attribute or argument meets the given comparison criteria.
The values provided for each option may be a literal value, attribute, argument, or a zero argument function.
## Options
#{Spark.Options.docs(@compare_opts)}
#{Spark.Options.docs(Ash.Resource.Validation.Compare.opt_schema())}
## Examples
@ -222,26 +176,6 @@ defmodule Ash.Resource.Validation.Builtins do
attribute: attribute, match: match, message: "must match #{inspect(match)}"}
end
@present_opts [
at_least: [
type: :non_neg_integer,
doc: "At least this many must be present. Defaults to the number of attributes provided"
],
at_most: [
type: :non_neg_integer,
doc: "At most this many must be present. Defaults to the number of attributes provided"
],
exactly: [
type: :non_neg_integer,
doc: "Exactly this many must be present"
]
]
@doc false
def present_opts do
@present_opts
end
@doc """
Validates the presence of a list of attributes or arguments.
@ -249,7 +183,7 @@ defmodule Ash.Resource.Validation.Builtins do
## Options
#{Spark.Options.docs(@present_opts)}
#{Spark.Options.docs(Ash.Resource.Validation.Present.opt_schema())}
"""
@spec present(attributes_or_arguments :: atom | list(atom), opts :: Keyword.t()) ::
Validation.ref()
@ -272,7 +206,7 @@ defmodule Ash.Resource.Validation.Builtins do
## Options
#{String.replace(Spark.Options.docs(@present_opts), "present", "absent")}
#{String.replace(Spark.Options.docs(Ash.Resource.Validation.Present.opt_schema()), "present", "absent")}
"""
@spec absent(attributes_or_arguments :: atom | list(atom), opts :: Keyword.t()) ::
Validation.ref()

View file

@ -6,14 +6,36 @@ defmodule Ash.Resource.Validation.Compare do
alias Ash.Error.Changes.InvalidAttribute
import Ash.Expr
@opt_schema [
greater_than: [
type: {:or, [:integer, :atom, :float, {:struct, Decimal}, {:fun, 0}]},
required: false,
doc: "The value that the attribute should be greater than."
],
greater_than_or_equal_to: [
type: {:or, [:integer, :atom, :float, {:struct, Decimal}, {:fun, 0}]},
required: false,
doc: "The value that the attribute should be greater than or equal to"
],
less_than: [
type: {:or, [:integer, :atom, :float, {:struct, Decimal}, {:fun, 0}]},
required: false,
doc: "The value that the attribute should be less than"
],
less_than_or_equal_to: [
type: {:or, [:integer, :atom, :float, {:struct, Decimal}, {:fun, 0}]},
required: false,
doc: "The value that the attribute should be less than or equal to"
]
]
def opt_schema, do: @opt_schema
@impl true
def init(opts) do
case Spark.Options.validate(
opts,
Keyword.put(Ash.Resource.Validation.Builtins.compare_opts(), :attribute,
type: :atom,
required: true
)
Keyword.put(opt_schema(), :attribute, type: :atom, required: true)
) do
{:ok, opts} ->
{:ok, opts}

View file

@ -5,14 +5,28 @@ defmodule Ash.Resource.Validation.Present do
alias Ash.Error.Changes.{InvalidAttribute, InvalidChanges}
import Ash.Expr
@opt_schema [
at_least: [
type: :non_neg_integer,
doc: "At least this many must be present. Defaults to the number of attributes provided"
],
at_most: [
type: :non_neg_integer,
doc: "At most this many must be present. Defaults to the number of attributes provided"
],
exactly: [
type: :non_neg_integer,
doc: "Exactly this many must be present"
]
]
def opt_schema, do: @opt_schema
@impl true
def init(opts) do
case Spark.Options.validate(
opts,
Keyword.put(Ash.Resource.Validation.Builtins.present_opts(), :attributes,
type: {:wrap_list, :atom},
required: true
)
Keyword.put(opt_schema(), :attributes, type: {:wrap_list, :atom}, required: true)
) do
{:ok, opts} ->
{:ok, opts}

View file

@ -5,14 +5,28 @@ defmodule Ash.Resource.Validation.StringLength do
alias Ash.Error.Changes.InvalidAttribute
import Ash.Expr
@opt_schema [
min: [
type: :non_neg_integer,
doc: "String must be this length at least"
],
max: [
type: :non_neg_integer,
doc: "String must be this length at most"
],
exact: [
type: :non_neg_integer,
doc: "String must be this length exactly"
]
]
def opt_schema, do: @opt_schema
@impl true
def init(opts) do
case Spark.Options.validate(
opts,
Keyword.put(Ash.Resource.Validation.Builtins.string_length_opts(), :attribute,
type: :atom,
required: true
)
Keyword.put(opt_schema(), :attribute, type: :atom, required: true)
) do
{:ok, opts} ->
{:ok, opts}