improvement: revamp data loading to be a single load statement

This commit is contained in:
Zach Daniel 2023-06-09 00:45:39 -04:00
parent 09ca364162
commit 05be948204
13 changed files with 879 additions and 533 deletions

View file

@ -117,7 +117,7 @@
#
{Credo.Check.Refactor.CondStatements, []},
{Credo.Check.Refactor.CyclomaticComplexity, false},
{Credo.Check.Refactor.FunctionArity, []},
{Credo.Check.Refactor.FunctionArity, [max_arity: 11]},
{Credo.Check.Refactor.LongQuoteBlocks, []},
{Credo.Check.Refactor.MapInto, false},
{Credo.Check.Refactor.MatchInCondition, []},

View file

@ -571,28 +571,8 @@ defmodule AshGraphql do
end)
end
def add_context(ctx, apis, options \\ []) do
options = Keyword.put(options, :get_policy, :tuples)
empty_dataloader = Dataloader.new(options)
dataloader =
apis
|> List.wrap()
|> Enum.map(fn
{api, _registry} ->
api
api ->
api
end)
|> Enum.reduce(empty_dataloader, fn api, dataloader ->
Dataloader.add_source(
dataloader,
api,
AshGraphql.Dataloader.new(api)
)
end)
Map.put(ctx, :loader, dataloader)
@deprecated "add_context is no longer necessary"
def add_context(ctx, _apis, _options \\ []) do
ctx
end
end

View file

@ -1,380 +0,0 @@
defmodule AshGraphql.Dataloader do
@moduledoc "The dataloader in charge of resolving "
defstruct [
:api,
batches: %{},
results: %{},
default_params: %{}
]
@type t :: %__MODULE__{
api: Ash.Api.t(),
batches: map,
results: map,
default_params: map
}
@type api_opts :: Keyword.t()
@type batch_fun :: (Ash.Resource.t(), Ash.Query.t(), any, [any], api_opts -> [any])
import AshGraphql.TraceHelpers
@doc """
Create an Ash Dataloader source.
This module handles retrieving data from Ash for dataloader. It requires a
valid Ash API.
"""
@spec new(Ash.Api.t()) :: t
def new(api) do
%__MODULE__{api: api}
end
defimpl Dataloader.Source do
def run(source) do
results = Dataloader.async_safely(__MODULE__, :run_batches, [source])
results =
Map.merge(source.results, results, fn _, {:ok, v1}, {:ok, v2} ->
{:ok, Map.merge(v1, v2)}
end)
%{source | results: results, batches: %{}}
end
def fetch(source, batch_key, item) do
{batch_key, item_key, _item} =
batch_key
|> normalize_key(source.default_params)
|> get_keys(item)
case Map.fetch(source.results, batch_key) do
{:ok, batch} ->
fetch_item_from_batch(batch, item_key)
:error ->
{:error, "Unable to find batch #{inspect(batch_key)}"}
end
end
defp fetch_item_from_batch({:error, _reason} = tried_and_failed, _item_key),
do: tried_and_failed
defp fetch_item_from_batch({:ok, batch}, item_key) do
case Map.fetch(batch, item_key) do
:error -> {:error, "Unable to find item #{inspect(item_key)} in batch"}
result -> result
end
end
def put(source, _batch, _item, %Ash.NotLoaded{type: :relationship}) do
source
end
def put(source, batch, item, result) do
batch = normalize_key(batch, source.default_params)
{batch_key, item_key, _item} = get_keys(batch, item)
results =
Map.update(
source.results,
batch_key,
{:ok, %{item_key => result}},
fn {:ok, map} -> {:ok, Map.put(map, item_key, result)} end
)
%{source | results: results}
end
def load(source, batch, item) do
{batch_key, item_key, item} =
batch
|> normalize_key(source.default_params)
|> get_keys(item)
if fetched?(source.results, batch_key, item_key) do
source
else
entry = {item_key, item}
update_in(source.batches, fn batches ->
Map.update(batches, batch_key, MapSet.new([entry]), &MapSet.put(&1, entry))
end)
end
end
defp fetched?(results, batch_key, item_key) do
case results do
%{^batch_key => {:ok, %{^item_key => _}}} -> true
_ -> false
end
end
def pending_batches?(%{batches: batches}) do
batches != %{}
end
def timeout(_) do
Dataloader.default_timeout()
end
defp related(path, resource) do
Ash.Resource.Info.related(resource, path) ||
raise """
Valid relationship for path #{inspect(path)} not found on resource #{inspect(resource)}
"""
end
defp get_keys({assoc_field, %{type: :relationship} = opts}, %resource{} = record)
when is_atom(assoc_field) do
validate_resource(resource)
pkey = Ash.Resource.Info.primary_key(resource)
id = Enum.map(pkey, &Map.get(record, &1))
queryable = related([assoc_field], resource)
{{:assoc, resource, self(), assoc_field, queryable, opts}, id, record}
end
defp get_keys({calc, %{type: :calculation} = opts}, %resource{} = record) do
validate_resource(resource)
pkey = Ash.Resource.Info.primary_key(resource)
id = Enum.map(pkey, &Map.get(record, &1))
{{:calc, resource, self(), calc, opts}, id, record}
end
defp get_keys(key, item) do
raise """
Invalid batch key: #{inspect(key)}
#{inspect(item)}
"""
end
defp validate_resource(resource) do
unless Ash.Resource.Info.resource?(resource) do
raise "The given module - #{resource} - is not an Ash resouce."
end
end
defp normalize_key({key, params}, default_params) do
{key, Enum.into(params, default_params)}
end
defp normalize_key(key, default_params) do
{key, default_params}
end
def run_batches(source) do
options = [
timeout: Dataloader.default_timeout(),
on_timeout: :kill_task
]
results =
source.batches
|> Task.async_stream(
fn batch ->
id = :erlang.unique_integer()
system_time = System.system_time()
start_time_mono = System.monotonic_time()
emit_start_event(id, system_time, batch)
batch_result = run_batch(batch, source)
emit_stop_event(id, start_time_mono, batch)
batch_result
end,
options
)
|> Enum.map(fn
{:ok, {_key, result}} -> {:ok, result}
{:exit, reason} -> {:error, reason}
end)
source.batches
|> Enum.map(fn {key, _set} -> key end)
|> Enum.zip(results)
|> Map.new()
end
defp run_batch(
{{:assoc, source_resource, _pid, field, _resource, opts} = key, records},
source
) do
tracer = AshGraphql.Api.Info.tracer(source.api)
if tracer && opts[:span_context] do
tracer.set_span_context(opts[:span_context])
end
resource_short_name = Ash.Resource.Info.short_name(source_resource)
metadata = %{
api: source.api,
resource: source_resource,
resource_short_name: resource_short_name,
actor: opts[:api_opts][:actor],
tenant: opts[:api_opts][:tenant],
relationship: field,
source: :graphql,
authorize?: AshGraphql.Api.Info.authorize?(source.api)
}
trace source.api,
source_resource,
:gql_relationship_batch,
"#{resource_short_name}.#{field}",
metadata do
{ids, records} = Enum.unzip(records)
query = opts[:query]
api_opts = opts[:api_opts]
tenant = opts[:tenant] || tenant_from_records(records)
empty = source_resource |> struct |> Map.fetch!(field)
records = records |> Enum.map(&Map.put(&1, field, empty))
relationship = Ash.Resource.Info.relationship(source_resource, field)
cardinality = relationship.cardinality
loads =
if Map.has_key?(relationship, :manual) && relationship.manual do
field
else
query =
query
|> Ash.Query.new()
|> Ash.Query.set_tenant(tenant)
|> Ash.Query.for_read(
relationship.read_action ||
Ash.Resource.Info.primary_action!(relationship.destination, :read).name,
opts[:args],
api_opts
)
{field, query}
end
loaded = source.api.load!(records, [loads], api_opts || [])
loaded =
case loaded do
%struct{results: results} when struct in [Ash.Page.Offset, Ash.Page.Keyset] ->
results
loaded ->
loaded
end
results =
case cardinality do
:many ->
Enum.map(loaded, fn record ->
List.wrap(Map.get(record, field))
end)
:one ->
Enum.map(loaded, fn record ->
Map.get(record, field)
end)
end
{key, Map.new(Enum.zip(ids, results))}
end
end
defp run_batch(
{{:calc, _, _pid, calc,
%{resource: resource, args: args, api_opts: api_opts, span_context: span_context}} =
key, records},
source
) do
tracer = AshGraphql.Api.Info.tracer(source.api)
if tracer && span_context do
tracer.set_span_context(span_context)
end
resource_short_name = Ash.Resource.Info.short_name(resource)
metadata = %{
api: source.api,
resource: resource,
resource_short_name: resource_short_name,
actor: api_opts[:actor],
tenant: api_opts[:tenant],
calculation: calc,
source: :graphql,
authorize?: AshGraphql.Api.Info.authorize?(source.api)
}
trace source.api,
resource,
:gql_calculation_batch,
"#{resource_short_name}.#{calc}.batch",
metadata do
{ids, records} = Enum.unzip(records)
calculation = Ash.Resource.Info.calculation(resource, calc)
results =
records
|> source.api.load!([{calc, args}], api_opts)
|> Enum.map(&Map.get(&1, calc))
results =
if Ash.Type.NewType.new_type?(calculation.type) &&
Ash.Type.NewType.subtype_of(Ash.Type.Union) &&
function_exported?(calculation.type, :graphql_unnested_unions, 1) do
unnested_types = calculation.type.graphql_unnested_unions(calculation.constraints)
constraints = Ash.Type.NewType.constraints(calculation.type, calculation.constraints)
Enum.map(results, fn
nil ->
nil
%Ash.Union{type: type, value: value} = result ->
if type in unnested_types do
if value do
type =
AshGraphql.Resource.field_type(
constraints[:types][type][:type],
calculation,
resource
)
Map.put(value, :__union_type__, type)
end
else
result
end
end)
else
results
end
{key, Map.new(Enum.zip(ids, results))}
end
end
defp tenant_from_records([%{__metadata__: %{tenant: tenant}}]) when not is_nil(tenant) do
tenant
end
defp tenant_from_records(_), do: nil
defp emit_start_event(id, system_time, batch) do
:telemetry.execute(
[:dataloader, :source, :batch, :run, :start],
%{system_time: system_time},
%{id: id, batch: batch}
)
end
defp emit_stop_event(id, start_time_mono, batch) do
:telemetry.execute(
[:dataloader, :source, :batch, :run, :stop],
%{duration: System.monotonic_time() - start_time_mono},
%{id: id, batch: batch}
)
end
end
end

View file

@ -61,7 +61,17 @@ defmodule AshGraphql.Graphql.Resolver do
query =
query
|> Ash.Query.do_filter(filter)
|> load_fields(resource, resolution)
|> load_fields(
[
api: api,
tenant: Map.get(context, :tenant),
authorize?: AshGraphql.Api.Info.authorize?(api),
actor: Map.get(context, :actor)
],
resource,
resolution,
[]
)
result =
query
@ -81,7 +91,17 @@ defmodule AshGraphql.Graphql.Resolver do
|> Ash.Query.set_context(get_context(context))
|> set_query_arguments(action, arguments)
|> select_fields(resource, resolution)
|> load_fields(resource, resolution)
|> load_fields(
[
api: api,
tenant: Map.get(context, :tenant),
authorize?: AshGraphql.Api.Info.authorize?(api),
actor: Map.get(context, :actor)
],
resource,
resolution,
[]
)
{{:error, error}, [query, {:error, error}]}
end
@ -172,7 +192,17 @@ defmodule AshGraphql.Graphql.Resolver do
|> Ash.Query.set_context(get_context(context))
|> set_query_arguments(action, args)
|> select_fields(resource, resolution)
|> load_fields(resource, resolution)
|> load_fields(
[
api: api,
tenant: Map.get(context, :tenant),
authorize?: AshGraphql.Api.Info.authorize?(api),
actor: Map.get(context, :actor)
],
resource,
resolution,
[]
)
result =
query
@ -257,7 +287,19 @@ defmodule AshGraphql.Graphql.Resolver do
|> set_query_arguments(action, args)
|> select_fields(resource, resolution, result_fields),
query <-
load_fields(initial_query, resource, resolution, result_fields),
load_fields(
initial_query,
[
api: api,
tenant: Map.get(context, :tenant),
authorize?: AshGraphql.Api.Info.authorize?(api),
actor: Map.get(context, :actor)
],
resource,
resolution,
[],
result_fields
),
{:ok, page} <-
query
|> Ash.Query.for_read(action, %{},
@ -850,7 +892,18 @@ defmodule AshGraphql.Graphql.Resolver do
authorize?: AshGraphql.Api.Info.authorize?(api)
)
|> select_fields(resource, resolution, ["result"])
|> load_fields(resource, resolution, ["result"])
|> load_fields(
[
api: api,
tenant: Map.get(context, :tenant),
authorize?: AshGraphql.Api.Info.authorize?(api),
actor: Map.get(context, :actor)
],
resource,
resolution,
[],
["result"]
)
{result, modify_args} =
changeset
@ -980,7 +1033,18 @@ defmodule AshGraphql.Graphql.Resolver do
authorize?: AshGraphql.Api.Info.authorize?(api)
)
|> select_fields(resource, resolution, ["result"])
|> load_fields(resource, resolution, ["result"])
|> load_fields(
[
api: api,
tenant: Map.get(context, :tenant),
authorize?: AshGraphql.Api.Info.authorize?(api),
actor: Map.get(context, :actor)
],
resource,
resolution,
[],
["result"]
)
{result, modify_args} =
changeset
@ -1181,7 +1245,9 @@ defmodule AshGraphql.Graphql.Resolver do
Logger.error("""
#{uuid}: Exception raised while resolving query.
#{Exception.format(:error, e, stacktrace)}
#{String.slice(Exception.format(:error, e), 0, 2000)}
#{Exception.format_stacktrace(stacktrace)}
""")
uuid
@ -1321,18 +1387,78 @@ defmodule AshGraphql.Graphql.Resolver do
end)
end
defp load_fields(query_or_changeset, resource, resolution, nested \\ []) do
fields =
case resolution do
%Absinthe.Resolution{} ->
fields(resolution, nested)
%Absinthe.Blueprint.Document.Field{selections: selections} ->
selections
end
defp load_fields(query_or_changeset, load_opts, resource, resolution, path, nested \\ []) do
{fields, path} = nested_fields_and_path(resolution, path, nested)
fields
|> Enum.flat_map(fn selection ->
|> resource_loads(resource, resolution, load_opts, path)
|> then(fn load ->
case query_or_changeset do
%Ash.Query{} = query ->
Ash.Query.load(query, load)
%Ash.Changeset{} = changeset ->
Ash.Changeset.load(changeset, load)
end
end)
end
defp nested_fields_and_path(resolution, path, []) do
base = List.last(path) || resolution
selections =
case base do
%Absinthe.Resolution{} ->
Absinthe.Resolution.project(resolution)
%Absinthe.Blueprint.Document.Field{selections: selections} ->
{fields, _} =
selections
|> Absinthe.Resolution.Projector.project(
Absinthe.Schema.lookup_type(resolution.schema, base.schema_node.type),
path,
%{},
resolution
)
fields
end
{selections, path}
end
defp nested_fields_and_path(resolution, path, [nested | rest]) do
base = List.last(path) || resolution
selections =
case base do
%Absinthe.Resolution{} ->
Absinthe.Resolution.project(resolution)
%Absinthe.Blueprint.Document.Field{selections: selections} ->
{fields, _} =
selections
|> Absinthe.Resolution.Projector.project(
Absinthe.Schema.lookup_type(resolution.schema, base.schema_node.type),
path,
%{},
resolution
)
fields
end
selection = Enum.find(selections, &(&1.name == nested))
if selection do
nested_fields_and_path(resolution, path ++ [selection], rest)
else
{[], path}
end
end
defp resource_loads(fields, resource, resolution, load_opts, path) do
Enum.flat_map(fields, fn selection ->
cond do
aggregate = Ash.Resource.Info.aggregate(resource, selection.schema_node.identifier) ->
[aggregate.name]
@ -1351,21 +1477,381 @@ defmodule AshGraphql.Graphql.Resolver do
end
end)
[{calculation.name, arguments}]
if Ash.Type.can_load?(calculation.type) do
loads =
type_loads(
selection.selections,
calculation.type,
calculation.constraints,
load_opts,
resource,
calculation.name,
resolution,
resolution.path ++ [selection],
selection,
AshGraphql.Resource.Info.type(resource)
)
case loads do
[] ->
[{calculation.name, arguments}]
loads ->
[{calculation.name, {arguments, loads}}]
end
else
[{calculation.name, arguments}]
end
attribute = Ash.Resource.Info.attribute(resource, selection.schema_node.identifier) ->
if Ash.Type.can_load?(attribute.type) do
loads =
type_loads(
selection.selections,
attribute.type,
attribute.constraints,
load_opts,
resource,
attribute.name,
resolution,
resolution.path ++ [selection],
selection,
AshGraphql.Resource.Info.type(resource)
)
case loads do
[] ->
if selection.alias do
{:ok, calc} =
Ash.Query.Calculation.new(
{:__ash_graphql_attribute__, selection.alias},
Ash.Resource.Calculation.LoadAttribute,
Keyword.put(load_opts, :attribute, attribute.name),
{attribute.type, attribute.constraints}
)
[
calc
]
else
[attribute.name]
end
loads ->
if selection.alias do
{:ok, calc} =
Ash.Query.Calculation.new(
{:__ash_graphql_attribute__, selection.alias},
Ash.Resource.Calculation.LoadAttribute,
Keyword.merge(load_opts, load: loads, attribute: attribute.name),
{attribute.type, attribute.constraints}
)
[
calc
]
else
[{attribute.name, loads}]
end
end
else
[attribute.name]
end
relationship = Ash.Resource.Info.relationship(resource, selection.schema_node.identifier) ->
related_query =
selection.arguments
|> Map.new(fn argument ->
{argument.schema_node.identifier, argument.input_value.data}
end)
|> apply_load_arguments(Ash.Query.new(relationship.destination))
|> select_fields(relationship.destination, selection)
|> load_fields(load_opts, relationship.destination, resolution, path ++ [selection])
if selection.alias do
{type, constraints} =
case relationship.cardinality do
:many ->
{{:array, :struct}, items: [instance_of: relationship.destination]}
:one ->
{:struct, instance_of: relationship.destination}
end
{:ok, calc} =
Ash.Query.Calculation.new(
{:__ash_graphql_relationship__, selection.alias},
Ash.Resource.Calculation.LoadRelationship,
Keyword.merge(load_opts, relationship: relationship.name, query: related_query),
{type, constraints}
)
[
calc
]
else
[{relationship.name, related_query}]
end
true ->
[]
end
end)
|> then(fn load ->
case query_or_changeset do
%Ash.Query{} = query ->
Ash.Query.load(query, load)
end
%Ash.Changeset{} = changeset ->
Ash.Changeset.load(changeset, load)
end
end)
defp type_loads(
selections,
type,
constraints,
load_opts,
resource,
field_name,
resolution,
path,
selection,
parent_type_name,
original_type \\ nil
)
defp type_loads(
selections,
{:array, type},
constraints,
load_opts,
resource,
field_name,
resolution,
path,
selection,
parent_type_name,
original_type
) do
type_loads(
selections,
type,
constraints[:items] || [],
load_opts,
resource,
field_name,
resolution,
path,
selection,
parent_type_name,
original_type
)
end
defp type_loads(
selections,
type,
constraints,
load_opts,
resource,
field_name,
resolution,
path,
selection,
parent_type_name,
original_type
) do
cond do
Ash.Type.NewType.new_type?(type) ->
subtype_constraints = Ash.Type.NewType.constraints(type, constraints)
subtype_of = Ash.Type.NewType.subtype_of(type)
type_loads(
selections,
subtype_of,
subtype_constraints,
load_opts,
resource,
field_name,
resolution,
path,
selection,
parent_type_name,
{type, constraints}
)
Ash.Type.embedded_type?(type) || Ash.Resource.Info.resource?(type) ->
selections
|> resource_loads(type, resolution, load_opts, path)
type == Ash.Type.Union ->
{global_selections, fragments} =
Enum.split_with(selections, fn
%Absinthe.Blueprint.Document.Field{} ->
true
_ ->
false
end)
loads =
case global_selections do
[] ->
[]
global_selections ->
first_type_config =
constraints[:types]
|> Enum.at(0)
|> elem(1)
first_type = first_type_config[:type]
first_constraints = first_type_config[:constraints]
type_loads(
global_selections,
first_type,
first_constraints,
load_opts,
resource,
field_name,
resolution,
path,
selection,
parent_type_name
)
end
{graphql_unnested_unions, configured_type_name} =
case original_type do
{type, constraints} ->
configured_type_name =
cond do
function_exported?(type, :graphql_type, 0) ->
type.graphql_type()
function_exported?(type, :graphql_type, 1) ->
type.graphql_type(constraints)
true ->
nil
end
unnested_unions =
if function_exported?(type, :graphql_unnested_unions, 1) do
type.graphql_unnested_unions(constraints)
else
[]
end
{unnested_unions, configured_type_name}
_ ->
{[], nil}
end
constraints[:types]
|> Enum.filter(fn {_, config} ->
Ash.Type.can_load?(config[:type])
end)
|> Enum.reduce(loads, fn {type_name, config}, acc ->
{gql_type_name, nested?} =
if type_name in graphql_unnested_unions do
{AshGraphql.Resource.field_type(
config[:type],
%Ash.Resource.Attribute{
name:
configured_type_name ||
AshGraphql.Resource.atom_enum_type(resource, field_name),
type: config[:type],
constraints: config[:constraints]
},
resource
), false}
else
{AshGraphql.Resource.nested_union_type_name(
%{name: configured_type_name || "#{parent_type_name}_#{field_name}"},
type_name,
true
), true}
end
gql_type = Absinthe.Schema.lookup_type(resolution.schema, gql_type_name)
if !gql_type do
raise Ash.Error.Framework.AssumptionFailed,
message: "Could not find a corresponding graphql type for #{inspect(gql_type_name)}"
end
if nested? do
{fields, _} =
fragments
|> Absinthe.Resolution.Projector.project(
gql_type,
path,
%{},
resolution
)
if selection = Enum.find(fields, &(&1.schema_node.identifier == :value)) do
new_path = path ++ [selection]
value_type =
Absinthe.Schema.lookup_type(resolution.schema, selection.schema_node.type)
{fields, _} =
Absinthe.Resolution.Projector.project(
selection.selections,
value_type,
path ++ [selection],
%{},
resolution
)
Keyword.put(
acc,
type_name,
type_loads(
fields,
config[:type],
config[:constraints],
load_opts,
resource,
gql_type_name,
resolution,
new_path,
selection,
gql_type_name
)
)
else
acc
end
else
{fields, _} =
Absinthe.Resolution.Projector.project(
fragments,
gql_type,
path,
%{},
resolution
)
Keyword.put(
acc,
type_name,
type_loads(
fields,
config[:type],
config[:constraints],
load_opts,
resource,
gql_type_name,
resolution,
path,
selection,
gql_type_name
)
)
end
end)
true ->
[]
end
end
defp select_fields(query_or_changeset, resource, resolution, nested \\ []) do
@ -1408,23 +1894,29 @@ defmodule AshGraphql.Graphql.Resolver do
end
defp fields(resolution, names) do
project =
resolution
|> Absinthe.Resolution.project()
{project, cache} =
case resolution do
%Absinthe.Blueprint.Document.Field{selections: selections} ->
{selections, %{}}
Enum.reduce(names, {project, resolution.fields_cache}, fn name, {fields, cache} ->
resolution ->
{resolution
|> Absinthe.Resolution.project(), resolution.fields_cache}
end
Enum.reduce(names, {project, cache}, fn name, {fields, cache} ->
case fields |> Enum.find(&(&1.name == name)) do
nil ->
{fields, cache}
path ->
type = Absinthe.Schema.lookup_type(resolution.schema, path.schema_node.type)
selection ->
type = Absinthe.Schema.lookup_type(resolution.schema, selection.schema_node.type)
path
selection
|> Map.get(:selections)
|> Absinthe.Resolution.Projector.project(
type,
resolution.path ++ [path],
resolution.path ++ [selection],
cache,
resolution
)
@ -1602,7 +2094,7 @@ defmodule AshGraphql.Graphql.Resolver do
source: parent,
context: context
} = resolution,
{api, _resource, calculation}
{api, resource, calculation}
) do
result =
if resolution.definition.alias do
@ -1611,6 +2103,20 @@ defmodule AshGraphql.Graphql.Resolver do
Map.get(parent, calculation.name)
end
result =
if Ash.Type.NewType.new_type?(calculation.type) &&
Ash.Type.NewType.subtype_of(calculation.type) == Ash.Type.Union &&
function_exported?(calculation.type, :graphql_unnested_unions, 1) do
unnested_types = calculation.type.graphql_unnested_unions(calculation.constraints)
resolve_union_result(
result,
{calculation.name, calculation.type, calculation, resource, unnested_types}
)
else
result
end
Absinthe.Resolution.put_result(resolution, to_resolution({:ok, result}, context, api))
end
@ -1618,35 +2124,17 @@ defmodule AshGraphql.Graphql.Resolver do
do: resolution
def resolve_assoc(
%{source: parent, arguments: args, context: %{loader: loader} = context} = resolution,
{api, relationship}
%{source: parent} = resolution,
{_api, relationship}
) do
api_opts = [
actor: Map.get(context, :actor),
authorize?: AshGraphql.Api.Info.authorize?(api),
verbose?: AshGraphql.Api.Info.debug?(api)
]
value =
if resolution.definition.alias do
Map.get(parent.calculations, {:__ash_graphql_relationship__, resolution.definition.alias})
else
Map.get(parent, relationship.name)
end
related_query =
args
|> apply_load_arguments(Ash.Query.new(relationship.destination))
|> select_fields(relationship.destination, resolution)
|> load_fields(relationship.destination, resolution)
tracer = AshGraphql.Api.Info.tracer(api)
opts = [
query: related_query,
api_opts: api_opts,
type: :relationship,
args: args,
resource: relationship.source,
tenant: Map.get(context, :tenant),
span_context: tracer && tracer.get_span_context()
]
batch_key = {relationship.name, opts}
do_dataloader(resolution, loader, api, batch_key, args, parent)
Absinthe.Resolution.put_result(resolution, {:ok, value})
end
def resolve_id(%Absinthe.Resolution{state: :resolved} = resolution, _),
@ -1664,35 +2152,59 @@ defmodule AshGraphql.Graphql.Resolver do
def resolve_union(
%{source: parent} = resolution,
{name, field_type, field, resource, unnested_types}
{name, _field_type, _field, _resource, _unnested_types} = data
) do
result =
case Map.get(parent, name) do
%Ash.Union{type: type, value: value} = union ->
constraints = Ash.Type.NewType.constraints(field_type, field.constraints)
if type in unnested_types do
if value do
type =
AshGraphql.Resource.field_type(
constraints[:types][type][:type],
field,
resource
)
Map.put(value, :__union_type__, type)
end
else
union
end
other ->
other
value =
if resolution.definition.alias do
Map.get(parent.calculations, {:__ash_graphql_attribute__, resolution.definition.alias})
else
Map.get(parent, name)
end
result = resolve_union_result(value, data)
Absinthe.Resolution.put_result(resolution, {:ok, result})
end
def resolve_attribute(%{source: parent} = resolution, name) do
value =
if resolution.definition.alias do
Map.get(parent.calculations, {:__ash_graphql_attribute__, resolution.definition.alias})
else
Map.get(parent, name)
end
Absinthe.Resolution.put_result(resolution, {:ok, value})
end
defp resolve_union_result(
value,
{_name, field_type, field, resource, unnested_types}
) do
case value do
%Ash.Union{type: type, value: value} = union ->
constraints = Ash.Type.NewType.constraints(field_type, field.constraints)
if type in unnested_types do
if value do
type =
AshGraphql.Resource.field_type(
constraints[:types][type][:type],
field,
resource
)
Map.put(value, :__union_type__, type)
end
else
union
end
other ->
other
end
end
def resolve_keyset(%Absinthe.Resolution{state: :resolved} = resolution, _),
do: resolution
@ -1736,30 +2248,6 @@ defmodule AshGraphql.Graphql.Resolver do
child_complexity + 1
end
def fetch_dataloader(loader, api, batch_key, context, parent) do
to_resolution(Dataloader.get(loader, api, batch_key, parent), context, api)
end
defp do_dataloader(
resolution,
loader,
api,
batch_key,
_args,
parent
) do
loader = Dataloader.load(loader, api, batch_key, parent)
fun = fn loader ->
fetch_dataloader(loader, api, batch_key, resolution.context, parent)
end
Absinthe.Resolution.put_result(
resolution,
{:middleware, Absinthe.Middleware.Dataloader, {loader, fun}}
)
end
defp apply_load_arguments(arguments, query) do
Enum.reduce(arguments, query, fn
{:limit, limit}, query ->

View file

@ -1863,7 +1863,8 @@ defmodule AshGraphql.Resource do
aggregate_type
end
defp filter_type(attribute_or_aggregate, resource, schema) do
@doc false
def filter_type(attribute_or_aggregate, resource, schema) do
type = attribute_or_aggregate_type(attribute_or_aggregate, resource)
array_type? = match?({:array, _}, type)
@ -1981,7 +1982,7 @@ defmodule AshGraphql.Resource do
end
end
rescue
_ ->
_e ->
[]
end
@ -2678,7 +2679,7 @@ defmodule AshGraphql.Resource do
{name,
field_type(
config[:type],
%{attribute | name: String.to_atom("#{attribute.name}_#{name}")},
%{attribute | name: nested_union_type_name(attribute, name)},
resource
)}
end)
@ -2724,7 +2725,7 @@ defmodule AshGraphql.Resource do
object_type_definitions =
constraints[:types]
|> Enum.reject(fn name ->
|> Enum.reject(fn {name, _} ->
name in grapqhl_unnested_unions
end)
|> Enum.map(fn {name, _} ->
@ -2772,6 +2773,18 @@ defmodule AshGraphql.Resource do
object_type_definitions
end
@doc false
# sobelow_skip ["DOS.StringToAtom"]
def nested_union_type_name(attribute, name, existing_only? \\ false) do
str = "#{attribute.name}_#{name}"
if existing_only? do
String.to_existing_atom(str)
else
String.to_atom(str)
end
end
@doc false
def get_auto_maps(resource) do
resource
@ -3461,16 +3474,19 @@ defmodule AshGraphql.Resource do
end
defp middleware_for_field(resource, field, name, type, constraints) do
if Ash.Type.NewType.new_type?(type) && Ash.Type.NewType.subtype_of(Ash.Type.Union) &&
if Ash.Type.NewType.new_type?(type) &&
Ash.Type.NewType.subtype_of(type) == Ash.Type.Union &&
function_exported?(type, :graphql_unnested_unions, 1) do
unnested_types = type.graphql_unnested_unions(constraints)
[
{AshGraphql.Graphql.Resolver, :resolve_union},
{name, type, field, resource, unnested_types}
{{AshGraphql.Graphql.Resolver, :resolve_union},
{name, type, field, resource, unnested_types}}
]
else
[]
[
{{AshGraphql.Graphql.Resolver, :resolve_attribute}, name}
]
end
end
@ -3528,6 +3544,7 @@ defmodule AshGraphql.Resource do
end)
end
@doc false
def field_type(type, field, resource, input? \\ false) do
case field do
%Ash.Resource.Attribute{name: name} ->

View file

@ -140,10 +140,9 @@ defmodule AshGraphql.MixProject do
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ash, ash_version("~> 2.9 and >= 2.9.20")},
{:ash, ash_version("~> 2.9 and >= 2.9.22")},
{:absinthe_plug, "~> 1.4"},
{:absinthe, "~> 1.7"},
{:dataloader, "~> 1.0"},
{:jason, "~> 1.2"},
{:ex_doc, "~> 0.22", only: [:dev, :test], runtime: false},
{:ex_check, "~> 0.12.0", only: [:dev, :test]},

View file

@ -1,16 +1,15 @@
%{
"absinthe": {:hex, :absinthe, "1.7.1", "aca6f64994f0914628429ddbdfbf24212747b51780dae189dd98909da911757b", [:mix], [{:dataloader, "~> 1.0.0", [hex: :dataloader, repo: "hexpm", optional: true]}, {:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 1.2.2 or ~> 1.3.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c0c4dbd93881fa3bfbad255608234b104b877c2a901850c1fe8c53b408a72a57"},
"absinthe_plug": {:hex, :absinthe_plug, "1.5.8", "38d230641ba9dca8f72f1fed2dfc8abd53b3907d1996363da32434ab6ee5d6ab", [:mix], [{:absinthe, "~> 1.5", [hex: :absinthe, repo: "hexpm", optional: false]}, {:plug, "~> 1.4", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bbb04176647b735828861e7b2705465e53e2cf54ccf5a73ddd1ebd855f996e5a"},
"ash": {:hex, :ash, "2.9.20", "abf83b253803cdd34ec74cc30130dea791b7a4df0afeb30acc1c1b8dbd5d0b16", [:mix], [{:comparable, "~> 1.0", [hex: :comparable, repo: "hexpm", optional: false]}, {:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:earmark, "~> 1.4", [hex: :earmark, repo: "hexpm", optional: true]}, {:ecto, "~> 3.7", [hex: :ecto, repo: "hexpm", optional: false]}, {:ets, "~> 0.8.0", [hex: :ets, repo: "hexpm", optional: false]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: false]}, {:picosat_elixir, "~> 0.2", [hex: :picosat_elixir, repo: "hexpm", optional: false]}, {:plug, ">= 0.0.0", [hex: :plug, repo: "hexpm", optional: true]}, {:spark, "~> 1.0", [hex: :spark, repo: "hexpm", optional: false]}, {:stream_data, "~> 0.5.0", [hex: :stream_data, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.1", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "fb50dcc977843f9536b60add850668bc7dd552bb64a1212f149aa25ab0fd0483"},
"ash": {:hex, :ash, "2.9.22", "8735377ae65956df5f596aacba1ce0b15ffc4905d6d312a9c76da56c724ce800", [:mix], [{:comparable, "~> 1.0", [hex: :comparable, repo: "hexpm", optional: false]}, {:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:earmark, "~> 1.4", [hex: :earmark, repo: "hexpm", optional: true]}, {:ecto, "~> 3.7", [hex: :ecto, repo: "hexpm", optional: false]}, {:ets, "~> 0.8.0", [hex: :ets, repo: "hexpm", optional: false]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: false]}, {:picosat_elixir, "~> 0.2", [hex: :picosat_elixir, repo: "hexpm", optional: false]}, {:plug, ">= 0.0.0", [hex: :plug, repo: "hexpm", optional: true]}, {:spark, "~> 1.0", [hex: :spark, repo: "hexpm", optional: false]}, {:stream_data, "~> 0.5.0", [hex: :stream_data, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.1", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "b9cfc8d83ae665bd8f5c4a3977fe3bebaf8295a0f536929a3526be9f616839a0"},
"bunt": {:hex, :bunt, "0.2.1", "e2d4792f7bc0ced7583ab54922808919518d0e57ee162901a16a1b6664ef3b14", [:mix], [], "hexpm", "a330bfb4245239787b15005e66ae6845c9cd524a288f0d141c148b02603777a5"},
"certifi": {:hex, :certifi, "2.9.0", "6f2a475689dd47f19fb74334859d460a2dc4e3252a3324bd2111b8f0429e7e21", [:rebar3], [], "hexpm", "266da46bdb06d6c6d35fde799bcb28d36d985d424ad7c08b5bb48f5b5cdd4641"},
"comparable": {:hex, :comparable, "1.0.0", "bb669e91cedd14ae9937053e5bcbc3c52bb2f22422611f43b6e38367d94a495f", [:mix], [{:typable, "~> 0.1", [hex: :typable, repo: "hexpm", optional: false]}], "hexpm", "277c11eeb1cd726e7cd41c6c199e7e52fa16ee6830b45ad4cdc62e51f62eb60c"},
"credo": {:hex, :credo, "1.7.0", "6119bee47272e85995598ee04f2ebbed3e947678dee048d10b5feca139435f75", [:mix], [{:bunt, "~> 0.2.1", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "6839fcf63d1f0d1c0f450abc8564a57c43d644077ab96f2934563e68b8a769d7"},
"dataloader": {:hex, :dataloader, "1.0.10", "a42f07641b1a0572e0b21a2a5ae1be11da486a6790f3d0d14512d96ff3e3bbe9", [:mix], [{:ecto, ">= 3.4.3 and < 4.0.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "54cd70cec09addf4b2ace14cc186a283a149fd4d3ec5475b155951bf33cd963f"},
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
"dialyxir": {:hex, :dialyxir, "1.2.0", "58344b3e87c2e7095304c81a9ae65cb68b613e28340690dfe1a5597fd08dec37", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "61072136427a851674cab81762be4dbeae7679f85b1272b6d25c3a839aff8463"},
"earmark_parser": {:hex, :earmark_parser, "1.4.31", "a93921cdc6b9b869f519213d5bc79d9e218ba768d7270d46fdcf1c01bacff9e2", [:mix], [], "hexpm", "317d367ee0335ef037a87e46c91a2269fef6306413f731e8ec11fc45a7efd059"},
"ecto": {:hex, :ecto, "3.10.1", "c6757101880e90acc6125b095853176a02da8f1afe056f91f1f90b80c9389822", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "d2ac4255f1601bdf7ac74c0ed971102c6829dc158719b94bd30041bbad77f87a"},
"ecto": {:hex, :ecto, "3.10.2", "6b887160281a61aa16843e47735b8a266caa437f80588c3ab80a8a960e6abe37", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6a895778f0d7648a4b34b486af59a1c8009041fbdf2b17f1ac215eb829c60235"},
"elixir_make": {:hex, :elixir_make, "0.7.7", "7128c60c2476019ed978210c245badf08b03dbec4f24d05790ef791da11aa17c", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}], "hexpm", "5bc19fff950fad52bbe5f211b12db9ec82c6b34a9647da0c2224b8b8464c7e6c"},
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
"ets": {:hex, :ets, "0.8.1", "8ff9bcda5682b98493f8878fc9dbd990e48d566cba8cce59f7c2a78130da29ea", [:mix], [], "hexpm", "6be41b50adb5bc5c43626f25ea2d0af1f4a242fb3fad8d53f0c67c20b78915cc"},
@ -38,7 +37,7 @@
"plug_crypto": {:hex, :plug_crypto, "1.2.5", "918772575e48e81e455818229bf719d4ab4181fcbf7f85b68a35620f78d89ced", [:mix], [], "hexpm", "26549a1d6345e2172eb1c233866756ae44a9609bd33ee6f99147ab3fd87fd842"},
"sobelow": {:hex, :sobelow, "0.12.2", "45f4d500e09f95fdb5a7b94c2838d6b26625828751d9f1127174055a78542cf5", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "2f0b617dce551db651145662b84c8da4f158e7abe049a76daaaae2282df01c5d"},
"sourceror": {:hex, :sourceror, "0.12.3", "a2ad3a1a4554b486d8a113ae7adad5646f938cad99bf8bfcef26dc0c88e8fade", [:mix], [], "hexpm", "4d4e78010ca046524e8194ffc4683422f34a96f6b82901abbb45acc79ace0316"},
"spark": {:hex, :spark, "1.1.13", "61e9bb75fd7c1a7cfc18b1d36b6f1e06844b3f1d8f492dc8f1edd2e90916b10e", [:mix], [{:nimble_options, "~> 0.5 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:sourceror, "~> 0.1", [hex: :sourceror, repo: "hexpm", optional: false]}], "hexpm", "4efe3385877f16f62e4ac6ecfa2d78ff16944207a1dbe42afb557d00a72ac901"},
"spark": {:hex, :spark, "1.1.15", "c0db345f030c928d2c9cf8dbf7574c635664d54b3afaf64ec9c1481d20c48b66", [:mix], [{:nimble_options, "~> 0.5 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:sourceror, "~> 0.1", [hex: :sourceror, repo: "hexpm", optional: false]}], "hexpm", "bd7da17b8af5acd39e49b9dbdc98a21132cade2ff70e6283e09f37a4657362b8"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"},
"stream_data": {:hex, :stream_data, "0.5.0", "b27641e58941685c75b353577dc602c9d2c12292dd84babf506c2033cd97893e", [:mix], [], "hexpm", "012bd2eec069ada4db3411f9115ccafa38540a3c78c4c0349f151fc761b9e271"},
"telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"},

View file

@ -254,11 +254,7 @@ defmodule AshGraphql.ErrorsTest do
assert %{
data: %{
"getPost" => %{
"published" => true,
"text" => "foo",
"multitenantTags" => nil
}
"getPost" => nil
},
errors: [%{message: message}]
} = result

View file

@ -666,4 +666,214 @@ defmodule AshGraphql.ReadTest do
}} ==
Absinthe.run(doc, AshGraphql.Test.Schema, variables: %{"id" => post_1.id})
end
describe "loading through types" do
test "loading through an embed works" do
AshGraphql.Test.Post
|> Ash.Changeset.new(embed_foo: %{type: "foo", foo: "fred"}, published: true)
|> AshGraphql.Test.Api.create!()
resp =
"""
query postLibrary {
postLibrary {
embedFoo{
alwaysTrue
}
}
}
"""
|> Absinthe.run(AshGraphql.Test.Schema)
assert {:ok, result} = resp
refute Map.has_key?(result, :errors)
assert %{
data: %{
"postLibrary" => [
%{
"embedFoo" => %{
"alwaysTrue" => true
}
}
]
}
} = result
end
test "loading through a union works" do
AshGraphql.Test.Post
|> Ash.Changeset.new(text: "a", embed_union: %{type: :foo, foo: "fred"}, published: true)
|> AshGraphql.Test.Api.create!()
AshGraphql.Test.Post
|> Ash.Changeset.new(text: "b", embed_union: %{type: :bar, bar: "george"}, published: true)
|> AshGraphql.Test.Api.create!()
resp =
"""
query postLibrary {
postLibrary(sort: {field: TEXT}) {
embedUnion{
...on PostEmbedUnionFoo {
value {
alwaysNil
}
}
...on PostEmbedUnionBar {
value {
alwaysFalse
}
}
}
}
}
"""
|> Absinthe.run(AshGraphql.Test.Schema)
assert {:ok, result} = resp
refute Map.has_key?(result, :errors)
assert %{
data: %{
"postLibrary" => [
%{
"embedUnion" => %{
"value" => %{
"alwaysNil" => nil
}
}
},
%{
"embedUnion" => %{
"value" => %{
"alwaysFalse" => false
}
}
}
]
}
} = result
end
test "loading through an unnested union works" do
AshGraphql.Test.Post
|> Ash.Changeset.new(
text: "a",
embed_union_unnested: %{type: :foo, foo: "fred"},
published: true
)
|> AshGraphql.Test.Api.create!()
AshGraphql.Test.Post
|> Ash.Changeset.new(
text: "b",
embed_union_unnested: %{type: :bar, bar: "george"},
published: true
)
|> AshGraphql.Test.Api.create!()
resp =
"""
query postLibrary {
postLibrary(sort: {field: TEXT}) {
embedUnionUnnested{
...on FooEmbed {
alwaysNil
}
...on BarEmbed {
alwaysFalse
}
}
}
}
"""
|> Absinthe.run(AshGraphql.Test.Schema)
assert {:ok, result} = resp
refute Map.has_key?(result, :errors)
assert %{
data: %{
"postLibrary" => [
%{
"embedUnionUnnested" => %{
"alwaysNil" => nil
}
},
%{
"embedUnionUnnested" => %{
"alwaysFalse" => false
}
}
]
}
} = result
end
test "loading through an unnested union with aliases works" do
AshGraphql.Test.Post
|> Ash.Changeset.new(
text: "a",
embed_union_unnested: %{type: :foo, foo: "fred"},
published: true
)
|> AshGraphql.Test.Api.create!()
AshGraphql.Test.Post
|> Ash.Changeset.new(
text: "b",
embed_union_unnested: %{type: :bar, bar: "george"},
published: true
)
|> AshGraphql.Test.Api.create!()
resp =
"""
query postLibrary {
postLibrary(sort: {field: TEXT}) {
foo: embedUnionUnnested{
...on FooEmbed {
alwaysNil
}
...on BarEmbed {
alwaysFalse
}
}
bar: embedUnionUnnested{
...on FooEmbed {
alwaysTrue
}
...on BarEmbed {
alwaysTrue
}
}
}
}
"""
|> Absinthe.run(AshGraphql.Test.Schema)
assert {:ok, result} = resp
refute Map.has_key?(result, :errors)
assert %{
data: %{
"postLibrary" => [
%{
"bar" => %{"alwaysTrue" => true},
"foo" => %{"alwaysNil" => nil}
},
%{
"bar" => %{"alwaysTrue" => true},
"foo" => %{"alwaysFalse" => false}
}
]
}
}
end
end
end

View file

@ -20,6 +20,11 @@ defmodule Foo do
allow_nil? false
end
end
calculations do
calculate(:always_true, :boolean, expr(true))
calculate(:always_nil, :boolean, expr(nil))
end
end
defmodule Bar do
@ -44,4 +49,9 @@ defmodule Bar do
allow_nil? false
end
end
calculations do
calculate(:always_true, :boolean, expr(true))
calculate(:always_false, :boolean, expr(false))
end
end

View file

@ -298,6 +298,8 @@ defmodule AshGraphql.Test.Post do
]
)
attribute(:embed_foo, Foo)
attribute(:embed_union, :union,
constraints: [
types: [
@ -316,6 +318,7 @@ defmodule AshGraphql.Test.Post do
)
attribute(:embed_union_new_type, AshGraphql.Types.EmbedUnionNewType)
attribute(:embed_union_unnested, AshGraphql.Types.EmbedUnionNewTypeUnnested)
attribute(:enum_new_type, AshGraphql.Types.EnumNewType)
attribute(:string_new_type, AshGraphql.Types.StringNewType)
@ -360,6 +363,7 @@ defmodule AshGraphql.Test.Post do
has_many :related_posts, AshGraphql.Test.Post do
manual(RelatedPosts)
no_attributes?(true)
end
end
end

View file

@ -59,7 +59,7 @@ defmodule AshGraphql.Test.User do
policies do
policy action_type(:create) do
actor_attribute_equals(:name, "My Name")
authorize_if(changing_attributes(name: [to: "My Name"]))
end
policy action_type(:read) do

View file

@ -0,0 +1,23 @@
defmodule AshGraphql.Types.EmbedUnionNewTypeUnnested do
@moduledoc false
use Ash.Type.NewType,
subtype_of: :union,
constraints: [
types: [
foo: [
type: Foo,
tag: :type,
tag_value: :foo
],
bar: [
type: Bar,
tag: :type,
tag_value: :bar
]
]
]
def graphql_type, do: :foo_bar_unnested
def graphql_unnested_unions(_), do: [:foo, :bar]
end