mirror of
https://github.com/ash-project/ash_graphql.git
synced 2024-09-20 05:13:33 +12:00
513c1ac68f
Step 1: update Ash Step 2: mass rename Api to Domain Step 3: Ash.Query.expr -> Ash.Expr.expr Also change ref interpolation Step 4: remove all warnings Step 5: remove registries from tests Step 6: fix filter Step 7: private? -> !public? Step 8: Ash.Calculation -> Ash.Resource.Calculation Step 9: use depend_on_resources/1 -> resources/1 Step 10: add Domain to all resources Step 11: use Ash module for all actions Step 12: add public? true all around Step 13: remove verbose? from options passed during Domain calls Step 14: add simple_sat Step 15: Ash.ErrorKind is no more, so remove code from errors Step 16: sprinkle default_accept :* around tests Step 17: replace Ash.Changeset.new/2 with Ash.Changeset.for_* Step 18: calculation fixups - Context is now a struct and arguments go under the arguments key - Function based calculations receive a list of records - Add a select to query-based loads - select -> load Step 19: pass the correct name to pass the policy in tests Step 20: Ash.Query.new/2 is no more Step 21: add AshGraphql.Resource.embedded? utility function Use that instead of Ash.Type.embedded_type?(resource_or_type) since resources are not types anymore Step 22: handle struct + instance_of: Resource in unions Resources are not type anymore so they need to be passed this way in unions Step 23: ensure we only check GraphQL actions for pagination All reads are now paginated by default, so this triggered a compilation error Step 24: swap arguments for sort on calculations Step 25: remove unused debug? option
66 lines
1.2 KiB
Elixir
66 lines
1.2 KiB
Elixir
spark_locals_without_parens = [
|
|
action: 2,
|
|
action: 3,
|
|
allow_nil?: 1,
|
|
argument_names: 1,
|
|
as_mutation?: 1,
|
|
attribute_input_types: 1,
|
|
attribute_types: 1,
|
|
authorize?: 1,
|
|
auto?: 1,
|
|
create: 2,
|
|
create: 3,
|
|
depth_limit: 1,
|
|
derive_filter?: 1,
|
|
derive_sort?: 1,
|
|
destroy: 2,
|
|
destroy: 3,
|
|
encode_primary_key?: 1,
|
|
error_handler: 1,
|
|
field_names: 1,
|
|
filterable_fields: 1,
|
|
generate_object?: 1,
|
|
get: 2,
|
|
get: 3,
|
|
hide_fields: 1,
|
|
hide_inputs: 1,
|
|
identity: 1,
|
|
keyset_field: 1,
|
|
list: 2,
|
|
list: 3,
|
|
lookup_identities: 1,
|
|
lookup_with_primary_key?: 1,
|
|
managed_relationship: 2,
|
|
managed_relationship: 3,
|
|
metadata_names: 1,
|
|
metadata_types: 1,
|
|
modify_resolution: 1,
|
|
paginate_with: 1,
|
|
primary_key_delimiter: 1,
|
|
read_action: 1,
|
|
read_one: 2,
|
|
read_one: 3,
|
|
relationships: 1,
|
|
relay?: 1,
|
|
relay_id_translations: 1,
|
|
root_level_errors?: 1,
|
|
show_fields: 1,
|
|
show_metadata: 1,
|
|
show_raised_errors?: 1,
|
|
tracer: 1,
|
|
type: 1,
|
|
type_name: 1,
|
|
types: 1,
|
|
update: 2,
|
|
update: 3,
|
|
upsert?: 1,
|
|
upsert_identity: 1
|
|
]
|
|
|
|
[
|
|
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],
|
|
locals_without_parens: spark_locals_without_parens,
|
|
export: [
|
|
locals_without_parens: spark_locals_without_parens
|
|
]
|
|
]
|