2020-06-14 19:04:18 +12:00
defmodule AshPostgres.DataLayer do
2022-08-19 06:56:36 +12:00
@manage_tenant % Spark.Dsl.Section {
2020-10-29 15:26:45 +13:00
name : :manage_tenant ,
describe : """
Configuration for the behavior of a resource that manages a tenant
""" ,
2020-12-27 19:20:12 +13:00
examples : [
"""
manage_tenant do
template [ " organization_ " , :id ]
create? true
update? false
end
"""
] ,
2020-10-29 15:26:45 +13:00
schema : [
template : [
2023-09-22 07:44:02 +12:00
type : { :wrap_list , { :or , [ :string , :atom ] } } ,
2020-10-29 15:26:45 +13:00
required : true ,
doc : """
A template that will cause the resource to create / manage the specified schema .
"""
] ,
create? : [
type : :boolean ,
default : true ,
doc : " Whether or not to automatically create a tenant when a record is created "
] ,
update? : [
type : :boolean ,
default : true ,
doc : " Whether or not to automatically update the tenant name if the record is udpated "
]
]
}
2020-10-29 16:53:28 +13:00
2022-08-19 06:56:36 +12:00
@index % Spark.Dsl.Entity {
2021-09-21 08:38:36 +12:00
name : :index ,
describe : """
Add an index to be managed by the migration generator .
""" ,
examples : [
" index [ \" column \" , \" column2 \" ], unique: true, where: \" thing = TRUE \" "
] ,
target : AshPostgres.CustomIndex ,
schema : AshPostgres.CustomIndex . schema ( ) ,
2022-12-01 13:06:51 +13:00
transform : { AshPostgres.CustomIndex , :transform , [ ] } ,
2021-09-21 08:38:36 +12:00
args : [ :fields ]
}
2022-08-19 06:56:36 +12:00
@custom_indexes % Spark.Dsl.Section {
2021-09-21 08:38:36 +12:00
name : :custom_indexes ,
describe : """
A section for configuring indexes to be created by the migration generator .
In general , prefer to use ` identities ` for simple unique constraints . This is a tool to allow
for declaring more complex indexes .
""" ,
examples : [
"""
custom_indexes do
index [ :column1 , :column2 ] , unique : true , where : " thing = TRUE "
end
"""
] ,
entities : [
@index
]
}
2022-08-19 06:56:36 +12:00
@statement % Spark.Dsl.Entity {
2022-07-22 05:34:38 +12:00
name : :statement ,
describe : """
Add a custom statement for migrations .
""" ,
examples : [
"""
statement :pgweb_idx do
up " CREATE INDEX pgweb_idx ON pgweb USING GIN (to_tsvector('english', title || ' ' || body)); "
down " DROP INDEX pgweb_idx; "
end
"""
] ,
target : AshPostgres.Statement ,
schema : AshPostgres.Statement . schema ( ) ,
args : [ :name ]
}
2022-08-19 06:56:36 +12:00
@custom_statements % Spark.Dsl.Section {
2022-07-22 05:34:38 +12:00
name : :custom_statements ,
describe : """
A section for configuring custom statements to be added to migrations .
Changing custom statements may require manual intervention , because Ash can ' t determine what order they should run
in ( i . e if they depend on table structure that you ' ve added, or vice versa). As such, any `down` statements we run
for custom statements happen first , and any ` up ` statements happen last .
Additionally , when changing a custom statement , we must make some assumptions , i . e that we should migrate
the old structure down using the previously configured ` down ` and recreate it .
This may not be desired , and so what you may end up doing is simply modifying the old migration and deleting whatever was
generated by the migration generator . As always : read your migrations after generating them!
""" ,
examples : [
"""
custom_statements do
# the name is used to detect if you remove or modify the statement
2023-03-24 10:19:45 +13:00
statement :pgweb_idx do
2022-07-22 05:34:38 +12:00
up " CREATE INDEX pgweb_idx ON pgweb USING GIN (to_tsvector('english', title || ' ' || body)); "
down " DROP INDEX pgweb_idx; "
end
end
"""
] ,
entities : [
@statement
]
}
2022-08-19 06:56:36 +12:00
@reference % Spark.Dsl.Entity {
2021-04-01 19:19:30 +13:00
name : :reference ,
describe : """
Configures the reference for a relationship in resource migrations .
Keep in mind that multiple relationships can theoretically involve the same destination and foreign keys .
In those cases , you only need to configure the ` reference ` behavior for one of them . Any conflicts will result
in an error , across this resource and any other resources that share a table with this one . For this reason ,
instead of adding a reference configuration for ` :nothing ` , its best to just leave the configuration out , as that
is the default behavior if * no * relationship anywhere has configured the behavior of that reference .
""" ,
examples : [
" reference :post, on_delete: :delete, on_update: :update, name: \" comments_to_posts_fkey \" "
] ,
args : [ :relationship ] ,
target : AshPostgres.Reference ,
schema : AshPostgres.Reference . schema ( )
}
2022-08-19 06:56:36 +12:00
@references % Spark.Dsl.Section {
2021-04-01 19:19:30 +13:00
name : :references ,
describe : """
A section for configuring the references ( foreign keys ) in resource migrations .
This section is only relevant if you are using the migration generator with this resource .
Otherwise , it has no effect .
""" ,
examples : [
"""
references do
reference :post , on_delete : :delete , on_update : :update , name : " comments_to_posts_fkey "
end
"""
] ,
entities : [ @reference ] ,
schema : [
polymorphic_on_delete : [
type : { :one_of , [ :delete , :nilify , :nothing , :restrict ] } ,
doc :
" For polymorphic resources, configures the on_delete behavior of the automatically generated foreign keys to source tables. "
] ,
polymorphic_on_update : [
type : { :one_of , [ :update , :nilify , :nothing , :restrict ] } ,
doc :
" For polymorphic resources, configures the on_update behavior of the automatically generated foreign keys to source tables. "
] ,
polymorphic_name : [
type : { :one_of , [ :update , :nilify , :nothing , :restrict ] } ,
doc :
" For polymorphic resources, configures the on_update behavior of the automatically generated foreign keys to source tables. "
]
]
}
2022-08-19 06:56:36 +12:00
@check_constraint % Spark.Dsl.Entity {
2021-04-20 06:26:41 +12:00
name : :check_constraint ,
describe : """
Add a check constraint to be validated .
If a check constraint exists on the table but not in this section , and it produces an error , a runtime error will be raised .
Provide a list of attributes instead of a single attribute to add the message to multiple attributes .
By adding the ` check ` option , the migration generator will include it when generating migrations .
""" ,
examples : [
"""
check_constraint :price , " price_must_be_positive " , check : " price > 0 " , message : " price must be positive "
"""
] ,
args : [ :attribute , :name ] ,
target : AshPostgres.CheckConstraint ,
schema : AshPostgres.CheckConstraint . schema ( )
}
2022-08-19 06:56:36 +12:00
@check_constraints % Spark.Dsl.Section {
2021-04-20 06:26:41 +12:00
name : :check_constraints ,
describe : """
A section for configuring the check constraints for a given table .
This can be used to automatically create those check constraints , or just to provide message when they are raised
""" ,
examples : [
"""
check_constraints do
check_constraint :price , " price_must_be_positive " , check : " price > 0 " , message : " price must be positive "
end
"""
] ,
entities : [ @check_constraint ]
}
2022-08-19 06:56:36 +12:00
@references % Spark.Dsl.Section {
2021-04-20 06:26:41 +12:00
name : :references ,
describe : """
A section for configuring the references ( foreign keys ) in resource migrations .
This section is only relevant if you are using the migration generator with this resource .
Otherwise , it has no effect .
""" ,
examples : [
"""
references do
reference :post , on_delete : :delete , on_update : :update , name : " comments_to_posts_fkey "
end
"""
] ,
entities : [ @reference ] ,
schema : [
polymorphic_on_delete : [
type : { :one_of , [ :delete , :nilify , :nothing , :restrict ] } ,
doc :
" For polymorphic resources, configures the on_delete behavior of the automatically generated foreign keys to source tables. "
] ,
polymorphic_on_update : [
type : { :one_of , [ :update , :nilify , :nothing , :restrict ] } ,
doc :
" For polymorphic resources, configures the on_update behavior of the automatically generated foreign keys to source tables. "
] ,
polymorphic_name : [
type : { :one_of , [ :update , :nilify , :nothing , :restrict ] } ,
doc :
" For polymorphic resources, configures the on_update behavior of the automatically generated foreign keys to source tables. "
]
]
}
2022-08-19 06:56:36 +12:00
@postgres % Spark.Dsl.Section {
2020-06-14 19:04:18 +12:00
name : :postgres ,
describe : """
Postgres data layer configuration
""" ,
2020-10-29 15:26:45 +13:00
sections : [
2021-09-21 08:38:36 +12:00
@custom_indexes ,
2022-07-22 05:34:38 +12:00
@custom_statements ,
2021-04-01 19:19:30 +13:00
@manage_tenant ,
2021-04-20 06:26:41 +12:00
@references ,
@check_constraints
2020-10-29 15:26:45 +13:00
] ,
2020-10-29 17:17:48 +13:00
modules : [
:repo
] ,
2020-12-27 19:20:12 +13:00
examples : [
"""
postgres do
repo MyApp.Repo
table " organizations "
end
"""
] ,
2020-06-14 19:04:18 +12:00
schema : [
repo : [
2023-11-15 04:56:22 +13:00
type : { :or , [ { :behaviour , Ecto.Repo } , { :fun , 2 } ] } ,
2020-06-14 19:04:18 +12:00
required : true ,
doc :
2023-11-15 04:56:22 +13:00
" The repo that will be used to fetch your data. See the `AshPostgres.Repo` documentation for more. Can also be a function that takes a resource and a type `:read | :mutate` and returns the repo "
2020-06-14 19:04:18 +12:00
] ,
2020-09-11 12:26:47 +12:00
migrate? : [
type : :boolean ,
default : true ,
doc :
" Whether or not to include this resource in the generated migrations with `mix ash.generate_migrations` "
] ,
2021-11-10 22:18:36 +13:00
migration_types : [
type : :keyword_list ,
default : [ ] ,
doc :
" A keyword list of attribute names to the ecto migration type that should be used for that attribute. Only necessary if you need to override the defaults. "
] ,
2022-11-21 20:38:00 +13:00
migration_defaults : [
type : :keyword_list ,
default : [ ] ,
doc : """
2023-09-23 08:14:25 +12:00
A keyword list of attribute names to the ecto migration default that should be used for that attribute . The string you use will be placed verbatim in the migration . Use fragments like ` fragment ( \\ \\ " now() \\ \\ " ) ` , or for ` nil ` , use ` \\ \\ " nil \\ \\ " ` .
2022-11-21 20:38:00 +13:00
"""
] ,
2020-09-20 10:08:09 +12:00
base_filter_sql : [
type : :string ,
doc :
" A raw sql version of the base_filter, e.g `representative = true`. Required if trying to create a unique constraint on a resource with a base_filter "
] ,
2023-06-12 12:33:20 +12:00
simple_join_first_aggregates : [
type : { :list , :atom } ,
default : [ ] ,
doc : """
2023-09-23 08:14:25 +12:00
A list of ` :first ` type aggregate names that can be joined to using a simple join . Use when you have a ` :first ` aggregate that uses a to - many relationship , but your ` filter ` statement ensures that there is only one result . Optimizes the generated query .
2023-06-12 12:33:20 +12:00
"""
] ,
2020-09-20 10:08:09 +12:00
skip_unique_indexes : [
2023-09-22 07:44:02 +12:00
type : { :wrap_list , :atom } ,
2020-09-20 10:08:09 +12:00
default : false ,
doc : " Skip generating unique indexes when generating migrations "
] ,
2021-01-22 09:32:26 +13:00
unique_index_names : [
2023-09-23 09:16:47 +12:00
type :
{ :list ,
{ :or ,
[ { :tuple , [ { :list , :atom } , :string ] } , { :tuple , [ { :list , :atom } , :string , :string ] } ] } } ,
2021-01-22 09:32:26 +13:00
default : [ ] ,
doc : """
2023-09-23 08:14:25 +12:00
A list of unique index names that could raise errors that are not configured in identities , or an mfa to a function that takes a changeset and returns the list . In the format ` { [ :affected , :keys ] , " name_of_constraint " } ` or ` { [ :affected , :keys ] , " name_of_constraint " , " custom error message " } `
2021-04-28 09:16:56 +12:00
"""
] ,
2022-03-21 13:35:30 +13:00
exclusion_constraint_names : [
type : :any ,
default : [ ] ,
doc : """
A list of exclusion constraint names that could raise errors . Must be in the format ` { :affected_key , " name_of_constraint " } ` or ` { :affected_key , " name_of_constraint " , " custom error message " } `
"""
] ,
2021-04-28 09:16:56 +12:00
identity_index_names : [
type : :any ,
default : [ ] ,
doc : """
2023-09-23 08:14:25 +12:00
A keyword list of identity names to the unique index name that they should use when being managed by the migration generator .
2021-03-20 11:41:16 +13:00
"""
] ,
foreign_key_names : [
2023-10-11 08:47:53 +13:00
type :
{ :list ,
{ :or ,
[
{ :tuple , [ { :or , [ :atom , :string ] } , :string ] } ,
{ :tuple , [ { :or , [ :atom , :string ] } , :string , :string ] }
] } } ,
2021-03-20 11:41:16 +13:00
default : [ ] ,
doc : """
2023-09-23 08:14:25 +12:00
A list of foreign keys that could raise errors , or an mfa to a function that takes a changeset and returns a list . In the format : ` { :key , " name_of_constraint " } ` or ` { :key , " name_of_constraint " , " custom error message " } `
2021-01-22 09:32:26 +13:00
"""
] ,
2023-02-01 20:05:04 +13:00
migration_ignore_attributes : [
type : { :list , :atom } ,
default : [ ] ,
doc : """
A list of attributes that will be ignored when generating migrations .
"""
] ,
2020-06-14 19:04:18 +12:00
table : [
type : :string ,
2022-05-14 09:41:30 +12:00
doc : """
2023-09-23 08:14:25 +12:00
The table to store and read the resource from . If this is changed , the migration generator will not remove the old table .
2022-05-14 09:41:30 +12:00
"""
] ,
schema : [
type : :string ,
doc : """
2023-09-23 08:14:25 +12:00
The schema that the table is located in . Schema - based multitenancy will supercede this option . If this is changed , the migration generator will not remove the old schema .
2022-05-14 09:41:30 +12:00
"""
2021-01-29 13:42:55 +13:00
] ,
polymorphic? : [
type : :boolean ,
default : false ,
doc : """
2023-09-23 08:14:25 +12:00
Declares this resource as polymorphic . See the [ polymorphic resources guide ] ( / documentation / topics / polymorphic_resources . md ) for more .
2021-01-29 13:42:55 +13:00
"""
2020-06-14 19:04:18 +12:00
]
]
}
2020-06-19 15:04:41 +12:00
alias Ash.Filter
2023-07-19 06:48:35 +12:00
alias Ash.Query . { BooleanExpression , Not , Ref }
2020-06-14 19:04:18 +12:00
@behaviour Ash.DataLayer
2020-12-27 19:20:12 +13:00
@sections [ @postgres ]
@moduledoc """
2022-03-29 15:30:27 +13:00
A postgres data layer that leverages Ecto ' s postgres capabilities.
2020-12-27 19:20:12 +13:00
"""
2022-08-19 06:56:36 +12:00
use Spark.Dsl.Extension ,
2020-12-27 19:20:12 +13:00
sections : @sections ,
2021-01-29 13:42:55 +13:00
transformers : [
2022-12-11 09:59:50 +13:00
AshPostgres.Transformers.ValidateReferences ,
2022-12-02 14:24:49 +13:00
AshPostgres.Transformers.EnsureTableOrPolymorphic ,
AshPostgres.Transformers.PreventMultidimensionalArrayAggregates
2021-01-29 13:42:55 +13:00
]
2020-06-14 19:04:18 +12:00
2023-09-05 05:08:14 +12:00
def migrate ( args ) do
# TODO: take args that we care about
Mix.Task . run ( " ash_postgres.migrate " , args )
end
def codegen ( args ) do
# TODO: take args that we care about
Mix.Task . run ( " ash_postgres.generate_migrations " , args )
end
def setup ( args ) do
# TODO: take args that we care about
Mix.Task . run ( " ash_postgres.create " , args )
Mix.Task . run ( " ash_postgres.migrate " , args )
Mix.Task . run ( " ash_postgres.migrate " , [ " --tenant " | args ] )
end
def tear_down ( args ) do
# TODO: take args that we care about
Mix.Task . run ( " ash_postgres.drop " , args )
end
2020-07-23 17:13:47 +12:00
import Ecto.Query , only : [ from : 2 , subquery : 1 ]
2020-06-14 19:04:18 +12:00
@impl true
2020-06-19 15:04:41 +12:00
def can? ( _ , :async_engine ) , do : true
2023-05-03 14:16:21 +12:00
def can? ( _ , :bulk_create ) , do : true
2023-04-28 13:49:13 +12:00
def can? ( _ , { :lock , :for_update } ) , do : true
2023-05-17 07:14:45 +12:00
def can? ( _ , { :lock , string } ) do
string = String . trim_trailing ( string , " NOWAIT " )
String . upcase ( string ) in [
2023-05-17 07:19:47 +12:00
" FOR UPDATE " ,
" FOR NO KEY UPDATE " ,
" FOR SHARE " ,
" FOR KEY SHARE "
2023-05-17 07:14:45 +12:00
]
end
2020-06-19 15:04:41 +12:00
def can? ( _ , :transact ) , do : true
def can? ( _ , :composite_primary_key ) , do : true
2023-08-29 08:18:56 +12:00
def can? ( _ , { :atomic , :update } ) , do : true
2023-10-12 08:17:50 +13:00
def can? ( _ , { :atomic , :upsert } ) , do : true
2020-06-19 15:04:41 +12:00
def can? ( _ , :upsert ) , do : true
2023-05-09 07:54:43 +12:00
def can? ( _ , :changeset_filter ) , do : true
2020-08-26 16:28:55 +12:00
def can? ( resource , { :join , other_resource } ) do
2021-02-23 17:53:18 +13:00
data_layer = Ash.DataLayer . data_layer ( resource )
other_data_layer = Ash.DataLayer . data_layer ( other_resource )
2022-08-24 11:56:46 +12:00
data_layer == other_data_layer and
2023-11-15 04:56:22 +13:00
AshPostgres.DataLayer.Info . repo ( resource , :read ) ==
AshPostgres.DataLayer.Info . repo ( other_resource , :read )
2020-08-26 16:28:55 +12:00
end
2021-04-30 09:31:19 +12:00
def can? ( resource , { :lateral_join , resources } ) do
2023-11-15 04:56:22 +13:00
repo = AshPostgres.DataLayer.Info . repo ( resource , :read )
2021-02-23 17:53:18 +13:00
data_layer = Ash.DataLayer . data_layer ( resource )
2021-04-30 09:31:19 +12:00
data_layer == __MODULE__ &&
Enum . all? ( resources , fn resource ->
2022-08-24 11:56:46 +12:00
Ash.DataLayer . data_layer ( resource ) == data_layer &&
2023-11-15 04:56:22 +13:00
AshPostgres.DataLayer.Info . repo ( resource , :read ) == repo
2021-04-30 09:31:19 +12:00
end )
2020-08-26 16:28:55 +12:00
end
2020-06-29 14:29:38 +12:00
def can? ( _ , :boolean_filter ) , do : true
2022-12-08 14:32:38 +13:00
def can? ( _ , { :aggregate , type } )
2023-07-13 16:13:50 +12:00
when type in [ :count , :sum , :first , :list , :avg , :max , :min , :exists , :custom ] ,
2022-12-08 14:32:38 +13:00
do : true
2020-07-23 17:13:47 +12:00
def can? ( _ , :aggregate_filter ) , do : true
def can? ( _ , :aggregate_sort ) , do : true
2021-06-04 17:48:35 +12:00
def can? ( _ , :expression_calculation ) , do : true
2021-06-06 10:13:20 +12:00
def can? ( _ , :expression_calculation_sort ) , do : true
2020-08-19 16:52:23 +12:00
def can? ( _ , :create ) , do : true
2021-04-09 16:53:50 +12:00
def can? ( _ , :select ) , do : true
2020-08-19 16:52:23 +12:00
def can? ( _ , :read ) , do : true
2023-09-06 17:18:57 +12:00
def can? ( resource , action ) when action in ~w[ update destroy ]a do
resource
|> Ash.Resource.Info . primary_key ( )
|> Enum . any? ( )
end
2020-08-19 16:52:23 +12:00
def can? ( _ , :filter ) , do : true
def can? ( _ , :limit ) , do : true
def can? ( _ , :offset ) , do : true
2020-10-29 15:26:45 +13:00
def can? ( _ , :multitenancy ) , do : true
2022-09-14 08:27:39 +12:00
def can? ( _ , { :filter_relationship , %{ manual : { module , _ } } } ) do
Spark . implements_behaviour? ( module , AshPostgres.ManualRelationship )
end
def can? ( _ , { :filter_relationship , _ } ) , do : true
def can? ( _ , { :aggregate_relationship , %{ manual : { module , _ } } } ) do
Spark . implements_behaviour? ( module , AshPostgres.ManualRelationship )
end
def can? ( _ , { :aggregate_relationship , _ } ) , do : true
2022-05-14 18:58:04 +12:00
def can? ( _ , :timeout ) , do : true
2021-01-22 09:32:26 +13:00
def can? ( _ , { :filter_expr , _ } ) , do : true
def can? ( _ , :nested_expressions ) , do : true
2023-10-14 15:47:11 +13:00
def can? ( _ , { :query_aggregate , _ } ) , do : true
2020-08-19 17:18:52 +12:00
def can? ( _ , :sort ) , do : true
2023-07-20 04:50:05 +12:00
def can? ( _ , :distinct_sort ) , do : true
2021-04-01 19:19:30 +13:00
def can? ( _ , :distinct ) , do : true
2020-07-23 17:13:47 +12:00
def can? ( _ , { :sort , _ } ) , do : true
2020-08-17 18:46:59 +12:00
def can? ( _ , _ ) , do : false
2020-06-14 19:04:18 +12:00
2020-06-30 16:16:17 +12:00
@impl true
def in_transaction? ( resource ) do
2023-11-15 04:56:22 +13:00
AshPostgres.DataLayer.Info . repo ( resource , :mutate ) . in_transaction? ( )
2020-06-30 16:16:17 +12:00
end
2020-06-14 19:04:18 +12:00
@impl true
def limit ( query , nil , _ ) , do : { :ok , query }
def limit ( query , limit , _resource ) do
{ :ok , from ( row in query , limit : ^ limit ) }
end
2020-07-08 12:01:01 +12:00
@impl true
def source ( resource ) do
2022-08-24 11:56:46 +12:00
AshPostgres.DataLayer.Info . table ( resource ) || " "
2021-01-29 13:42:55 +13:00
end
@impl true
def set_context ( resource , data_layer_query , context ) do
2022-12-05 07:16:45 +13:00
start_bindings = context [ :data_layer ] [ :start_bindings_at ] || 0
data_layer_query = from ( row in data_layer_query , as : ^ start_bindings )
2021-06-04 17:48:35 +12:00
data_layer_query =
if context [ :data_layer ] [ :table ] do
%{
data_layer_query
| from : %{ data_layer_query . from | source : { context [ :data_layer ] [ :table ] , resource } }
}
else
data_layer_query
end
2022-05-14 09:41:30 +12:00
data_layer_query =
if context [ :data_layer ] [ :schema ] do
Ecto.Query . put_query_prefix ( data_layer_query , to_string ( context [ :data_layer ] [ :schema ] ) )
else
data_layer_query
end
2021-06-04 17:48:35 +12:00
data_layer_query =
data_layer_query
|> default_bindings ( resource , context )
2023-07-27 09:32:09 +12:00
case context [ :data_layer ] [ :lateral_join_source ] do
2023-10-27 02:47:56 +13:00
{ _ , [ { %{ resource : resource } , _ , _ , _ } | rest ] } ->
2023-07-27 09:32:09 +12:00
parent =
resource
|> resource_to_query ( nil )
|> default_bindings ( resource , context )
2023-10-27 02:47:56 +13:00
parent =
case rest do
[ { resource , _ , _ , %{ name : join_relationship_name } } | _ ] ->
binding_data = %{ type : :inner , path : [ join_relationship_name ] , source : resource }
add_binding ( parent , binding_data )
_ ->
parent
end
2023-07-27 09:32:09 +12:00
ash_bindings =
data_layer_query . __ash_bindings__
|> Map . put ( :parent_bindings , Map . put ( parent . __ash_bindings__ , :parent? , true ) )
|> Map . put ( :parent_resources , [
parent . __ash_bindings__ . resource | parent . __ash_bindings__ [ :parent_resources ] || [ ]
] )
{ :ok , %{ data_layer_query | __ash_bindings__ : ash_bindings } }
_ ->
{ :ok , data_layer_query }
end
2020-07-08 12:01:01 +12:00
end
2020-06-14 19:04:18 +12:00
@impl true
def offset ( query , nil , _ ) , do : query
2020-09-20 10:08:09 +12:00
def offset ( %{ offset : old_offset } = query , 0 , _resource ) when old_offset in [ 0 , nil ] do
{ :ok , query }
end
2020-06-14 19:04:18 +12:00
def offset ( query , offset , _resource ) do
{ :ok , from ( row in query , offset : ^ offset ) }
end
@impl true
def run_query ( query , resource ) do
2023-07-20 04:50:05 +12:00
query = default_bindings ( query , resource )
2022-12-22 17:59:27 +13:00
2023-07-20 04:50:05 +12:00
with_sort_applied =
if query . __ash_bindings__ [ :sort_applied? ] do
{ :ok , query }
2022-12-15 16:37:10 +13:00
else
2023-07-20 04:50:05 +12:00
apply_sort ( query , query . __ash_bindings__ [ :sort ] , resource )
2022-12-15 16:37:10 +13:00
end
2022-05-11 14:47:21 +12:00
2023-07-20 04:50:05 +12:00
case with_sort_applied do
{ :error , error } ->
{ :error , error }
{ :ok , query } ->
query =
if query . __ash_bindings__ [ :__order__? ] && query . windows [ :order ] do
if query . distinct do
query_with_order =
from ( row in query , select_merge : %{ __order__ : over ( row_number ( ) , :order ) } )
query_without_limit_and_offset =
query_with_order
|> Ecto.Query . exclude ( :limit )
|> Ecto.Query . exclude ( :offset )
from ( row in subquery ( query_without_limit_and_offset ) ,
select : row ,
order_by : row . __order__
)
|> Map . put ( :limit , query . limit )
|> Map . put ( :offset , query . offset )
else
order_by = %{ query . windows [ :order ] | expr : query . windows [ :order ] . expr [ :order_by ] }
%{
query
| windows : Keyword . delete ( query . windows , :order ) ,
order_bys : [ order_by ]
}
end
else
%{ query | windows : Keyword . delete ( query . windows , :order ) }
end
if AshPostgres.DataLayer.Info . polymorphic? ( resource ) && no_table? ( query ) do
raise_table_error! ( resource , :read )
else
{ :ok , dynamic_repo ( resource , query ) . all ( query , repo_opts ( nil , nil , resource ) ) }
end
2021-03-22 10:58:47 +13:00
end
2023-05-18 05:17:37 +12:00
rescue
e ->
handle_raised_error ( e , __STACKTRACE__ , query , resource )
2020-10-29 15:26:45 +13:00
end
2021-03-22 10:58:47 +13:00
defp no_table? ( %{ from : %{ source : { " " , _ } } } ) , do : true
defp no_table? ( _ ) , do : false
2022-05-23 10:30:20 +12:00
defp repo_opts ( timeout , nil , resource ) do
2022-08-24 11:56:46 +12:00
if schema = AshPostgres.DataLayer.Info . schema ( resource ) do
2022-05-23 10:30:20 +12:00
[ prefix : schema ]
2021-07-25 03:28:58 +12:00
else
2022-05-23 10:30:20 +12:00
[ ]
2021-07-25 03:28:58 +12:00
end
2022-05-23 10:30:20 +12:00
|> add_timeout ( timeout )
2021-07-25 03:28:58 +12:00
end
2022-05-23 10:30:20 +12:00
defp repo_opts ( timeout , tenant , resource ) do
2021-02-23 17:53:18 +13:00
if Ash.Resource.Info . multitenancy_strategy ( resource ) == :context do
2020-10-29 15:26:45 +13:00
[ prefix : tenant ]
else
2022-08-24 11:56:46 +12:00
if schema = AshPostgres.DataLayer.Info . schema ( resource ) do
2022-05-14 09:41:30 +12:00
[ prefix : schema ]
else
[ ]
end
2020-10-29 15:26:45 +13:00
end
2022-05-23 10:30:20 +12:00
|> add_timeout ( timeout )
2020-10-29 15:26:45 +13:00
end
2022-05-23 10:30:20 +12:00
defp add_timeout ( opts , timeout ) when not is_nil ( timeout ) do
Keyword . put ( opts , :timeout , timeout )
end
defp add_timeout ( opts , _ ) , do : opts
2020-10-29 15:26:45 +13:00
2020-10-06 18:39:47 +13:00
@impl true
def functions ( resource ) do
2023-11-15 04:56:22 +13:00
config = AshPostgres.DataLayer.Info . repo ( resource , :mutate ) . config ( )
2020-10-06 18:39:47 +13:00
2022-12-22 10:12:49 +13:00
functions = [
AshPostgres.Functions.Fragment ,
AshPostgres.Functions.Like ,
AshPostgres.Functions.ILike
]
2021-01-22 09:32:26 +13:00
2023-09-12 14:34:51 +12:00
functions =
if " pg_trgm " in ( config [ :installed_extensions ] || [ ] ) do
functions ++
[
AshPostgres.Functions.TrigramSimilarity
]
else
functions
end
if " vector " in ( config [ :installed_extensions ] || [ ] ) do
2021-01-22 09:32:26 +13:00
functions ++
[
2023-09-12 14:34:51 +12:00
AshPostgres.Functions.VectorCosineDistance
2021-01-22 09:32:26 +13:00
]
2020-10-06 18:39:47 +13:00
else
2021-01-22 09:32:26 +13:00
functions
2020-10-06 18:39:47 +13:00
end
end
2020-10-18 12:13:51 +13:00
@impl true
def run_aggregate_query ( query , aggregates , resource ) do
2023-07-13 16:13:50 +12:00
{ exists , aggregates } = Enum . split_with ( aggregates , & ( &1 . kind == :exists ) )
query = default_bindings ( query , resource )
2023-02-13 17:18:08 +13:00
query =
2023-10-04 05:56:44 +13:00
if query . distinct || query . limit do
2023-04-22 19:17:38 +12:00
query =
query
|> Ecto.Query . exclude ( :select )
|> Ecto.Query . exclude ( :order_by )
|> Map . put ( :windows , [ ] )
from ( row in subquery ( query ) , as : ^ 0 , select : %{ } )
else
query
|> Ecto.Query . exclude ( :select )
|> Ecto.Query . exclude ( :order_by )
|> Map . put ( :windows , [ ] )
|> Ecto.Query . select ( %{ } )
end
2020-10-18 12:13:51 +13:00
2023-07-13 16:13:50 +12:00
query_before_select = query
2020-10-18 12:13:51 +13:00
query =
Enum . reduce (
aggregates ,
2023-02-13 17:18:08 +13:00
query ,
fn agg , query ->
2023-11-17 11:48:39 +13:00
first_relationship =
Ash.Resource.Info . relationship ( resource , agg . relationship_path |> Enum . at ( 0 ) )
2022-09-29 11:01:20 +13:00
AshPostgres.Aggregate . add_subquery_aggregate_select (
2023-02-13 17:18:08 +13:00
query ,
2022-12-08 14:32:38 +13:00
agg . relationship_path |> Enum . drop ( 1 ) ,
2022-09-29 11:01:20 +13:00
agg ,
resource ,
2023-11-17 11:48:39 +13:00
true ,
first_relationship
2022-09-29 11:01:20 +13:00
)
end
2020-10-18 12:13:51 +13:00
)
2023-10-11 02:43:09 +13:00
result =
case aggregates do
[ ] ->
%{ }
_ ->
dynamic_repo ( resource , query ) . one ( query , repo_opts ( nil , nil , resource ) )
end
{ :ok , add_exists_aggs ( result , resource , query_before_select , exists ) }
2023-07-13 16:13:50 +12:00
end
defp add_exists_aggs ( result , resource , query , exists ) do
repo = dynamic_repo ( resource , query )
repo_opts = repo_opts ( nil , nil , resource )
Enum . reduce ( exists , result , fn agg , result ->
{ :ok , filtered } =
case agg do
%{ query : %{ filter : filter } } when not is_nil ( filter ) ->
filter ( query , filter , resource )
_ ->
{ :ok , query }
end
Map . put (
result || %{ } ,
agg . name ,
repo . exists? ( filtered , repo_opts )
)
end )
2020-10-29 15:26:45 +13:00
end
@impl true
def set_tenant ( _resource , query , tenant ) do
2022-09-07 10:33:17 +12:00
{ :ok , Map . put ( Ecto.Query . put_query_prefix ( query , to_string ( tenant ) ) , :__tenant__ , tenant ) }
2020-10-18 12:13:51 +13:00
end
@impl true
def run_aggregate_query_with_lateral_join (
query ,
aggregates ,
root_data ,
destination_resource ,
2021-04-30 09:31:19 +12:00
path
2020-10-18 12:13:51 +13:00
) do
2023-07-13 16:13:50 +12:00
{ exists , aggregates } = Enum . split_with ( aggregates , & ( &1 . kind == :exists ) )
2021-05-07 19:09:49 +12:00
case lateral_join_query (
query ,
root_data ,
path
) do
{ :ok , lateral_join_query } ->
source_resource =
path
|> Enum . at ( 0 )
|> elem ( 0 )
2021-05-08 03:04:09 +12:00
|> Map . get ( :resource )
2021-05-07 19:09:49 +12:00
2022-12-08 14:32:38 +13:00
subquery = from ( row in subquery ( lateral_join_query ) , as : ^ 0 , select : %{ } )
2023-07-13 16:13:50 +12:00
subquery = default_bindings ( subquery , source_resource )
2021-05-07 19:09:49 +12:00
query =
Enum . reduce (
aggregates ,
subquery ,
2022-09-29 11:01:20 +13:00
fn agg , subquery ->
has_exists? =
Ash.Filter . find ( agg . query && agg . query . filter , fn
% Ash.Query.Exists { } -> true
_ -> false
end )
2023-11-17 11:48:39 +13:00
first_relationship =
Ash.Resource.Info . relationship (
source_resource ,
agg . relationship_path |> Enum . at ( 0 )
)
2022-09-29 11:01:20 +13:00
AshPostgres.Aggregate . add_subquery_aggregate_select (
subquery ,
2022-12-08 14:32:38 +13:00
agg . relationship_path |> Enum . drop ( 1 ) ,
2022-09-29 11:01:20 +13:00
agg ,
destination_resource ,
2023-11-17 11:48:39 +13:00
has_exists? ,
first_relationship
2022-09-29 11:01:20 +13:00
)
end
2021-05-07 19:09:49 +12:00
)
2020-10-18 12:13:51 +13:00
2023-10-11 02:43:09 +13:00
result =
case aggregates do
[ ] ->
%{ }
_ ->
dynamic_repo ( source_resource , query ) . one (
query ,
repo_opts ( nil , nil , source_resource )
)
end
{ :ok , add_exists_aggs ( result , source_resource , subquery , exists ) }
2020-10-18 12:13:51 +13:00
2021-05-07 19:09:49 +12:00
{ :error , error } ->
{ :error , error }
end
2020-10-18 12:13:51 +13:00
end
2020-08-26 16:28:55 +12:00
@impl true
def run_query_with_lateral_join (
query ,
root_data ,
_destination_resource ,
2021-04-30 09:31:19 +12:00
path
2020-08-26 16:28:55 +12:00
) do
2023-07-20 04:50:05 +12:00
with_sort_applied =
if query . __ash_bindings__ [ :sort_applied? ] do
{ :ok , query }
else
apply_sort ( query , query . __ash_bindings__ [ :sort ] , query . __ash_bindings__ . resource )
end
2021-04-30 09:31:19 +12:00
2023-07-20 04:50:05 +12:00
case with_sort_applied do
2021-05-07 19:09:49 +12:00
{ :error , error } ->
{ :error , error }
2023-07-20 04:50:05 +12:00
{ :ok , query } ->
case lateral_join_query (
query ,
root_data ,
path
) do
{ :ok , query } ->
source_resource =
path
|> Enum . at ( 0 )
|> elem ( 0 )
|> Map . get ( :resource )
{ :ok ,
dynamic_repo ( source_resource , query ) . all (
query ,
repo_opts ( nil , nil , source_resource )
) }
{ :error , error } ->
{ :error , error }
end
2021-05-07 19:09:49 +12:00
end
2020-10-18 12:13:51 +13:00
end
defp lateral_join_query (
query ,
root_data ,
2022-08-19 06:56:36 +12:00
[ { source_query , source_attribute , destination_attribute , relationship } ]
2020-10-18 12:13:51 +13:00
) do
2021-05-08 03:04:09 +12:00
source_query = Ash.Query . new ( source_query )
2020-08-26 16:28:55 +12:00
2023-07-27 09:32:09 +12:00
base_query =
2022-05-11 14:47:21 +12:00
if query . __ash_bindings__ [ :__order__? ] do
2023-07-27 09:32:09 +12:00
from ( row in query ,
select_merge : %{ __order__ : over ( row_number ( ) , :order ) }
2020-08-26 16:28:55 +12:00
)
2021-07-20 05:56:36 +12:00
else
2023-07-27 09:32:09 +12:00
query
end
base_query =
cond do
Map . get ( relationship , :manual ) ->
{ module , opts } = relationship . manual
module . ash_postgres_subquery (
opts ,
0 ,
0 ,
base_query
)
Map . get ( relationship , :no_attributes? ) ->
base_query
true ->
from ( destination in base_query ,
2021-07-20 05:56:36 +12:00
where :
2022-08-19 06:56:36 +12:00
field ( destination , ^ destination_attribute ) ==
field ( parent_as ( ^ 0 ) , ^ source_attribute )
2021-07-20 05:56:36 +12:00
)
end
2020-08-26 16:28:55 +12:00
2023-07-27 09:32:09 +12:00
subquery =
base_query
|> set_subquery_prefix ( source_query , relationship . destination )
|> subquery ( )
2021-05-08 03:04:09 +12:00
source_query . resource
|> Ash.Query . set_context ( %{ :data_layer = > source_query . context [ :data_layer ] } )
|> Ash.Query . set_tenant ( source_query . tenant )
2021-05-04 17:36:25 +12:00
|> set_lateral_join_prefix ( query )
2021-05-07 19:09:49 +12:00
|> case do
%{ valid? : true } = query ->
Ash.Query . data_layer_query ( query )
query ->
{ :error , query }
end
2020-11-03 16:59:51 +13:00
|> case do
{ :ok , data_layer_query } ->
2023-07-27 11:20:19 +12:00
source_values = Enum . map ( root_data , & Map . get ( &1 , source_attribute ) )
2023-07-27 10:50:58 +12:00
2023-07-27 11:20:19 +12:00
data_layer_query =
from ( source in data_layer_query ,
where : field ( source , ^ source_attribute ) in ^ source_values
)
2023-07-27 10:50:58 +12:00
2022-05-11 14:47:21 +12:00
if query . __ash_bindings__ [ :__order__? ] do
2021-07-20 05:56:36 +12:00
{ :ok ,
from ( source in data_layer_query ,
inner_lateral_join : destination in ^ subquery ,
2023-07-27 09:32:09 +12:00
on : true ,
2023-07-27 11:20:19 +12:00
order_by : destination . __order__ ,
2021-07-20 05:56:36 +12:00
select : destination ,
2023-07-27 09:32:09 +12:00
select_merge : %{ __lateral_join_source__ : field ( source , ^ source_attribute ) } ,
2021-07-20 05:56:36 +12:00
distinct : true
) }
else
{ :ok ,
from ( source in data_layer_query ,
inner_lateral_join : destination in ^ subquery ,
2023-07-27 09:32:09 +12:00
on : true ,
2021-07-20 05:56:36 +12:00
select : destination ,
2023-07-27 09:32:09 +12:00
select_merge : %{ __lateral_join_source__ : field ( source , ^ source_attribute ) } ,
2021-07-20 05:56:36 +12:00
distinct : true
) }
end
2020-11-03 16:59:51 +13:00
{ :error , error } ->
{ :error , error }
end
2020-08-26 16:28:55 +12:00
end
2021-04-30 09:31:19 +12:00
defp lateral_join_query (
query ,
root_data ,
[
2022-08-19 06:56:36 +12:00
{ source_query , source_attribute , source_attribute_on_join_resource , relationship } ,
{ through_resource , destination_attribute_on_join_resource , destination_attribute ,
2021-04-30 09:31:19 +12:00
through_relationship }
]
) do
2021-05-08 03:04:09 +12:00
source_query = Ash.Query . new ( source_query )
2022-08-19 06:56:36 +12:00
source_values = Enum . map ( root_data , & Map . get ( &1 , source_attribute ) )
2021-04-30 09:31:19 +12:00
2021-05-08 04:09:09 +12:00
through_resource
2021-04-30 09:31:19 +12:00
|> Ash.Query . new ( )
|> Ash.Query . set_context ( through_relationship . context )
|> Ash.Query . do_filter ( through_relationship . filter )
2022-10-15 18:03:16 +13:00
|> Ash.Query . sort ( through_relationship . sort , prepend? : true )
2021-05-08 03:04:09 +12:00
|> Ash.Query . set_tenant ( source_query . tenant )
2022-12-05 07:16:45 +13:00
|> Ash.Query . put_context ( :data_layer , %{
start_bindings_at : query . __ash_bindings__ . current
} )
2021-05-08 03:04:09 +12:00
|> set_lateral_join_prefix ( query )
2021-05-07 19:09:49 +12:00
|> case do
2022-12-05 07:16:45 +13:00
%{ valid? : true } = through_query ->
through_query
|> Ash.Query . data_layer_query ( )
2021-05-07 19:09:49 +12:00
query ->
{ :error , query }
end
2021-04-30 09:31:19 +12:00
|> case do
{ :ok , through_query } ->
2021-05-08 03:04:09 +12:00
source_query . resource
2021-04-30 09:31:19 +12:00
|> Ash.Query . new ( )
|> Ash.Query . set_context ( relationship . context )
2021-05-08 03:04:09 +12:00
|> Ash.Query . set_context ( %{ :data_layer = > source_query . context [ :data_layer ] } )
2022-12-05 07:16:45 +13:00
|> Ash.Query . put_context ( :data_layer , %{
start_bindings_at : through_query . __ash_bindings__ . current
} )
2021-05-04 17:36:25 +12:00
|> set_lateral_join_prefix ( query )
2021-04-30 09:31:19 +12:00
|> Ash.Query . do_filter ( relationship . filter )
2021-05-07 19:09:49 +12:00
|> case do
%{ valid? : true } = query ->
2022-12-05 07:16:45 +13:00
query
|> Ash.Query . data_layer_query ( )
2021-05-07 19:09:49 +12:00
query ->
{ :error , query }
end
2021-04-30 09:31:19 +12:00
|> case do
{ :ok , data_layer_query } ->
2022-05-11 14:47:21 +12:00
if query . __ash_bindings__ [ :__order__? ] do
2021-07-20 05:56:36 +12:00
subquery =
subquery (
2021-07-25 08:59:23 +12:00
from (
2021-07-28 03:13:48 +12:00
destination in query ,
2021-07-20 05:56:36 +12:00
select_merge : %{ __order__ : over ( row_number ( ) , :order ) } ,
2021-07-25 08:59:23 +12:00
join :
through in ^ set_subquery_prefix (
through_query ,
source_query ,
relationship . through
) ,
2022-12-05 07:16:45 +13:00
as : ^ query . __ash_bindings__ . current ,
2021-07-20 05:56:36 +12:00
on :
2022-08-19 06:56:36 +12:00
field ( through , ^ destination_attribute_on_join_resource ) ==
field ( destination , ^ destination_attribute ) ,
2021-07-20 05:56:36 +12:00
where :
2022-08-19 06:56:36 +12:00
field ( through , ^ source_attribute_on_join_resource ) ==
2022-12-05 07:16:45 +13:00
field (
parent_as ( ^ through_query . __ash_bindings__ . current ) ,
^ source_attribute
) ,
2022-10-15 18:03:16 +13:00
select_merge : %{
__lateral_join_source__ : field ( through , ^ source_attribute_on_join_resource )
}
2021-07-20 05:56:36 +12:00
)
2021-07-28 15:03:39 +12:00
|> set_subquery_prefix (
source_query ,
relationship . destination
)
2021-07-28 03:13:48 +12:00
)
2021-07-20 05:56:36 +12:00
{ :ok ,
from ( source in data_layer_query ,
2022-08-19 06:56:36 +12:00
where : field ( source , ^ source_attribute ) in ^ source_values ,
2021-07-20 05:56:36 +12:00
inner_lateral_join : destination in ^ subquery ,
2023-04-13 02:44:43 +12:00
on : true ,
2021-07-20 05:56:36 +12:00
select : destination ,
2023-02-08 13:15:44 +13:00
order_by : destination . __order__ ,
2021-07-20 05:56:36 +12:00
distinct : true
) }
else
subquery =
subquery (
2021-07-25 08:59:23 +12:00
from (
2021-07-28 03:13:48 +12:00
destination in query ,
2021-07-25 08:59:23 +12:00
join :
through in ^ set_subquery_prefix (
through_query ,
source_query ,
relationship . through
) ,
2022-12-05 07:16:45 +13:00
as : ^ query . __ash_bindings__ . current ,
2021-07-20 05:56:36 +12:00
on :
2022-08-19 06:56:36 +12:00
field ( through , ^ destination_attribute_on_join_resource ) ==
field ( destination , ^ destination_attribute ) ,
2021-07-20 05:56:36 +12:00
where :
2022-08-19 06:56:36 +12:00
field ( through , ^ source_attribute_on_join_resource ) ==
2022-12-05 07:16:45 +13:00
field (
parent_as ( ^ through_query . __ash_bindings__ . current ) ,
^ source_attribute
) ,
2022-10-15 18:03:16 +13:00
select_merge : %{
__lateral_join_source__ : field ( through , ^ source_attribute_on_join_resource )
}
2021-07-20 05:56:36 +12:00
)
2021-07-28 15:03:39 +12:00
|> set_subquery_prefix (
source_query ,
relationship . destination
)
2021-07-28 03:13:48 +12:00
)
2021-05-04 18:14:24 +12:00
2021-07-20 05:56:36 +12:00
{ :ok ,
from ( source in data_layer_query ,
2022-08-19 06:56:36 +12:00
where : field ( source , ^ source_attribute ) in ^ source_values ,
2021-07-20 05:56:36 +12:00
inner_lateral_join : destination in ^ subquery ,
2023-04-13 02:44:43 +12:00
on : true ,
2021-07-20 05:56:36 +12:00
select : destination ,
distinct : true
) }
end
2021-04-30 09:31:19 +12:00
{ :error , error } ->
{ :error , error }
end
{ :error , error } ->
{ :error , error }
end
end
2023-01-07 11:05:23 +13:00
@doc false
def set_subquery_prefix ( data_layer_query , source_query , resource ) do
2023-11-15 04:56:22 +13:00
config = AshPostgres.DataLayer.Info . repo ( resource , :mutate ) . config ( )
2021-07-28 15:03:39 +12:00
2021-07-25 08:59:23 +12:00
if Ash.Resource.Info . multitenancy_strategy ( resource ) == :context do
2021-07-28 15:03:39 +12:00
%{
data_layer_query
| prefix :
to_string (
2022-08-24 11:56:46 +12:00
source_query . tenant || AshPostgres.DataLayer.Info . schema ( resource ) ||
config [ :default_prefix ] ||
2021-07-28 15:03:39 +12:00
" public "
)
}
2021-07-25 08:59:23 +12:00
else
2021-07-28 15:03:39 +12:00
%{
data_layer_query
| prefix :
to_string (
2022-08-24 11:56:46 +12:00
AshPostgres.DataLayer.Info . schema ( resource ) || config [ :default_prefix ] ||
2021-07-28 15:03:39 +12:00
" public "
)
}
2021-07-25 08:59:23 +12:00
end
end
2021-05-04 17:36:25 +12:00
defp set_lateral_join_prefix ( ash_query , query ) do
if Ash.Resource.Info . multitenancy_strategy ( ash_query . resource ) == :context do
Ash.Query . set_tenant ( ash_query , query . prefix )
else
ash_query
end
end
2020-06-14 19:04:18 +12:00
@impl true
2021-12-21 16:19:24 +13:00
def resource_to_query ( resource , _ ) do
2022-12-05 07:16:45 +13:00
from ( row in { AshPostgres.DataLayer.Info . table ( resource ) || " " , resource } , [ ] )
2021-12-21 16:19:24 +13:00
end
2020-06-14 19:04:18 +12:00
2023-05-03 14:16:21 +12:00
@impl true
def bulk_create ( resource , stream , options ) do
opts = repo_opts ( nil , options [ :tenant ] , resource )
opts =
if options . return_records? do
Keyword . put ( opts , :returning , true )
else
opts
end
2023-10-12 08:17:50 +13:00
changesets = Enum . to_list ( stream )
2023-05-03 14:16:21 +12:00
opts =
if options [ :upsert? ] do
2023-10-12 08:17:50 +13:00
# Ash groups changesets by atomics before dispatching them to the data layer
# this means that all changesets have the same atomics
%{ atomics : atomics , filters : filters } = Enum . at ( changesets , 0 )
query = from ( row in resource , as : ^ 0 )
query =
query
|> default_bindings ( resource )
upsert_set =
upsert_set ( resource , changesets , options )
2023-09-26 08:32:20 +13:00
on_conflict =
2023-10-12 08:17:50 +13:00
case query_with_atomics (
resource ,
query ,
filters ,
atomics ,
%{ } ,
upsert_set
) do
:empty ->
:nothing
{ :ok , query } ->
query
{ :error , error } ->
raise Ash.Error . to_ash_error ( error )
2023-09-26 08:32:20 +13:00
end
2023-05-03 14:16:21 +12:00
opts
2023-09-26 08:32:20 +13:00
|> Keyword . put ( :on_conflict , on_conflict )
2023-05-03 14:16:21 +12:00
|> Keyword . put (
:conflict_target ,
conflict_target (
resource ,
options [ :upsert_keys ] || Ash.Resource.Info . primary_key ( resource )
)
)
else
opts
end
2023-09-23 07:42:39 +12:00
ecto_changesets = Enum . map ( changesets , & &1 . attributes )
2023-05-03 14:16:21 +12:00
2023-09-23 07:42:39 +12:00
source =
if table = Enum . at ( changesets , 0 ) . context [ :data_layer ] [ :table ] do
{ table , resource }
else
resource
end
repo = dynamic_repo ( resource , Enum . at ( changesets , 0 ) )
source
|> repo . insert_all ( ecto_changesets , opts )
2023-05-03 14:16:21 +12:00
|> case do
{ _ , nil } ->
:ok
{ _ , results } ->
2023-09-23 07:42:39 +12:00
if options [ :single? ] do
Enum . each ( results , & maybe_create_tenant! ( resource , &1 ) )
{ :ok , results }
else
{ :ok ,
Stream . zip_with ( results , changesets , fn result , changeset ->
if ! opts [ :upsert? ] do
maybe_create_tenant! ( resource , result )
end
Ash.Resource . put_metadata (
result ,
:bulk_create_index ,
changeset . context . bulk_create . index
)
end ) }
end
2023-05-03 14:16:21 +12:00
end
rescue
e ->
2023-05-23 13:17:30 +12:00
changeset = Ash.Changeset . new ( resource )
handle_raised_error (
e ,
__STACKTRACE__ ,
2023-09-23 07:42:39 +12:00
{ :bulk_create , ecto_changeset ( changeset . data , changeset , :create , false ) } ,
2023-05-23 13:17:30 +12:00
resource
)
2023-05-03 14:16:21 +12:00
end
2023-10-12 08:17:50 +13:00
defp upsert_set ( resource , changesets , options ) do
attributes_changing_anywhere =
changesets |> Enum . flat_map ( & Map . keys ( &1 . attributes ) ) |> Enum . uniq ( )
update_defaults = update_defaults ( resource )
# We can't reference EXCLUDED if at least one of the changesets in the stream is not
# changing the value (and we wouldn't want to even if we could as it would be unnecessary)
upsert_fields =
( options [ :upsert_fields ] || [ ] ) |> Enum . filter ( & ( &1 in attributes_changing_anywhere ) )
fields_to_upsert =
2023-11-16 10:08:44 +13:00
upsert_fields --
2023-10-12 08:17:50 +13:00
Keyword . keys ( Enum . at ( changesets , 0 ) . atomics )
Enum . map ( fields_to_upsert , fn upsert_field ->
# for safety, we check once more at the end that all values in
# upsert_fields are names of attributes. This is because
# below we use `literal/1` to bring them into the query
if is_nil ( resource . __schema__ ( :type , upsert_field ) ) do
raise " Only attribute names can be used in upsert_fields "
end
case Keyword . fetch ( update_defaults , upsert_field ) do
{ :ok , default } ->
if upsert_field in upsert_fields do
{ upsert_field ,
Ecto.Query . dynamic (
[ ] ,
fragment (
" COALESCE(EXCLUDED.?, ?) " ,
literal ( ^ to_string ( upsert_field ) ) ,
^ default
)
) }
else
{ upsert_field , default }
end
:error ->
{ upsert_field ,
Ecto.Query . dynamic (
[ ] ,
fragment ( " EXCLUDED.? " , literal ( ^ to_string ( upsert_field ) ) )
) }
end
end )
end
2020-06-14 19:04:18 +12:00
@impl true
def create ( resource , changeset ) do
2023-09-23 07:42:39 +12:00
changeset = %{
changeset
| data :
Map . update! (
changeset . data ,
:__meta__ ,
& Map . put ( &1 , :source , table ( resource , changeset ) )
)
}
2020-10-29 15:26:45 +13:00
2023-09-23 07:42:39 +12:00
case bulk_create ( resource , [ changeset ] , %{
single? : true ,
tenant : changeset . tenant ,
return_records? : true
} ) do
{ :ok , [ result ] } ->
2021-01-27 09:07:26 +13:00
{ :ok , result }
2020-10-29 15:26:45 +13:00
{ :error , error } ->
{ :error , error }
end
2020-06-14 19:04:18 +12:00
end
2021-01-27 09:07:26 +13:00
defp maybe_create_tenant! ( resource , result ) do
2022-08-24 11:56:46 +12:00
if AshPostgres.DataLayer.Info . manage_tenant_create? ( resource ) do
2020-10-29 15:26:45 +13:00
tenant_name = tenant_name ( resource , result )
2022-08-24 11:56:46 +12:00
AshPostgres.MultiTenancy . create_tenant! (
tenant_name ,
2023-11-15 04:56:22 +13:00
AshPostgres.DataLayer.Info . repo ( resource , :read )
2022-08-24 11:56:46 +12:00
)
2020-10-29 15:26:45 +13:00
else
:ok
end
end
defp maybe_update_tenant ( resource , changeset , result ) do
2022-08-24 11:56:46 +12:00
if AshPostgres.DataLayer.Info . manage_tenant_update? ( resource ) do
2020-10-29 15:26:45 +13:00
changing_tenant_name? =
resource
2022-08-24 11:56:46 +12:00
|> AshPostgres.DataLayer.Info . manage_tenant_template ( )
2020-10-29 15:26:45 +13:00
|> Enum . filter ( & is_atom / 1 )
|> Enum . any? ( & Ash.Changeset . changing_attribute? ( changeset , &1 ) )
if changing_tenant_name? do
old_tenant_name = tenant_name ( resource , changeset . data )
new_tenant_name = tenant_name ( resource , result )
2022-08-24 11:56:46 +12:00
AshPostgres.MultiTenancy . rename_tenant (
2023-11-15 04:56:22 +13:00
AshPostgres.DataLayer.Info . repo ( resource , :read ) ,
2022-08-24 11:56:46 +12:00
old_tenant_name ,
new_tenant_name
)
2020-10-29 15:26:45 +13:00
end
end
:ok
end
defp tenant_name ( resource , result ) do
resource
2022-08-24 11:56:46 +12:00
|> AshPostgres.DataLayer.Info . manage_tenant_template ( )
2020-10-29 15:26:45 +13:00
|> Enum . map_join ( fn item ->
if is_binary ( item ) do
item
else
result
|> Map . get ( item )
|> to_string ( )
end
end )
end
2021-01-22 09:32:26 +13:00
defp handle_errors ( { :error , % Ecto.Changeset { errors : errors } } ) do
{ :error , Enum . map ( errors , & to_ash_error / 1 ) }
end
defp to_ash_error ( { field , { message , vars } } ) do
2021-06-24 09:21:09 +12:00
Ash.Error.Changes.InvalidAttribute . exception (
field : field ,
message : message ,
private_vars : vars
)
2021-01-22 09:32:26 +13:00
end
2023-09-23 07:42:39 +12:00
defp ecto_changeset ( record , changeset , type , table_error? \\ true ) do
2023-05-09 07:54:43 +12:00
filters =
2023-05-16 07:59:21 +12:00
if changeset . action_type == :create do
2023-05-09 07:54:43 +12:00
%{ }
else
Map . get ( changeset , :filters , %{ } )
end
2023-08-29 08:18:56 +12:00
filters =
if changeset . action_type == :create do
filters
else
changeset . resource
|> Ash.Resource.Info . primary_key ( )
|> Enum . reduce ( filters , fn key , filters ->
Map . put ( filters , key , Map . get ( record , key ) )
end )
end
2023-01-28 14:54:40 +13:00
attributes =
changeset . resource
|> Ash.Resource.Info . attributes ( )
|> Enum . map ( & &1 . name )
2023-08-29 08:18:56 +12:00
attributes_to_change =
Enum . reject ( attributes , fn attribute ->
Keyword . has_key? ( changeset . atomics , attribute )
end )
2021-03-20 11:41:16 +13:00
ecto_changeset =
record
2023-01-28 14:54:40 +13:00
|> to_ecto ( )
2023-09-23 07:42:39 +12:00
|> set_table ( changeset , type , table_error? )
2023-08-29 08:18:56 +12:00
|> Ecto.Changeset . change ( Map . take ( changeset . attributes , attributes_to_change ) )
2023-05-09 07:54:43 +12:00
|> Map . update! ( :filters , & Map . merge ( &1 , filters ) )
2021-04-20 06:26:41 +12:00
|> add_configured_foreign_key_constraints ( record . __struct__ )
2021-04-28 09:16:56 +12:00
|> add_unique_indexes ( record . __struct__ , changeset )
2021-04-20 06:26:41 +12:00
|> add_check_constraints ( record . __struct__ )
2022-03-21 13:35:30 +13:00
|> add_exclusion_constraints ( record . __struct__ )
2021-03-20 11:41:16 +13:00
case type do
:create ->
ecto_changeset
|> add_my_foreign_key_constraints ( record . __struct__ )
type when type in [ :upsert , :update ] ->
ecto_changeset
|> add_my_foreign_key_constraints ( record . __struct__ )
|> add_related_foreign_key_constraints ( record . __struct__ )
:delete ->
ecto_changeset
|> add_related_foreign_key_constraints ( record . __struct__ )
end
2021-01-22 09:32:26 +13:00
end
2023-05-09 07:54:43 +12:00
defp handle_raised_error (
% Ecto.StaleEntryError { changeset : %{ data : % resource { } , filters : filters } } ,
2023-05-18 05:17:37 +12:00
stacktrace ,
context ,
resource
2023-05-09 07:54:43 +12:00
) do
handle_raised_error (
Ash.Error.Changes.StaleRecord . exception ( resource : resource , filters : filters ) ,
2023-05-18 05:17:37 +12:00
stacktrace ,
context ,
resource
)
end
defp handle_raised_error (
% Postgrex.Error {
postgres : %{
code : :lock_not_available ,
message : message
}
} ,
stacktrace ,
context ,
resource
) do
handle_raised_error (
Ash.Error.Invalid.Unavailable . exception (
resource : resource ,
source : inspect ( context , pretty : true ) ,
reason : message
) ,
stacktrace ,
context ,
resource
2023-05-09 07:54:43 +12:00
)
end
2023-05-23 13:17:30 +12:00
defp handle_raised_error (
% Postgrex.Error { } = error ,
stacktrace ,
{ :bulk_create , fake_changeset } ,
_resource
) do
case Ecto.Adapters.Postgres.Connection . to_constraints ( error , [ ] ) do
[ ] ->
{ :error , Ash.Error . to_ash_error ( error , stacktrace ) }
constraints ->
{ :error ,
fake_changeset
|> constraints_to_errors ( :insert , constraints )
|> Ash.Error . to_ash_error ( ) }
end
end
2023-06-29 03:29:54 +12:00
defp handle_raised_error ( % Ecto.Query.CastError { } = e , stacktrace , context , resource ) do
handle_raised_error (
Ash.Error.Query.InvalidFilterValue . exception ( value : e . value , context : context ) ,
stacktrace ,
context ,
resource
)
end
2023-08-29 08:18:56 +12:00
defp handle_raised_error (
% Postgrex.Error { } = error ,
stacktrace ,
%{ constraints : user_constraints } ,
_resource
) do
case Ecto.Adapters.Postgres.Connection . to_constraints ( error , [ ] ) do
[ { type , constraint } ] ->
user_constraint =
Enum . find ( user_constraints , fn c ->
case { c . type , c . constraint , c . match } do
{ ^ type , ^ constraint , :exact } -> true
{ ^ type , cc , :suffix } -> String . ends_with? ( constraint , cc )
{ ^ type , cc , :prefix } -> String . starts_with? ( constraint , cc )
{ ^ type , % Regex { } = r , _match } -> Regex . match? ( r , constraint )
_ -> false
end
end )
case user_constraint do
%{ field : field , error_message : error_message , error_type : error_type } ->
{ :error ,
to_ash_error (
{ field , { error_message , [ constraint : error_type , constraint_name : constraint ] } }
) }
nil ->
reraise error , stacktrace
end
_ ->
reraise error , stacktrace
end
end
defp handle_raised_error ( error , stacktrace , _ecto_changeset , _resource ) do
2023-10-12 09:26:48 +13:00
{ :error , Ash.Error . to_ash_error ( error , stacktrace ) }
2023-05-09 07:54:43 +12:00
end
2023-05-23 13:17:30 +12:00
defp constraints_to_errors ( %{ constraints : user_constraints } = changeset , action , constraints ) do
Enum . map ( constraints , fn { type , constraint } ->
user_constraint =
Enum . find ( user_constraints , fn c ->
case { c . type , c . constraint , c . match } do
{ ^ type , ^ constraint , :exact } -> true
{ ^ type , cc , :suffix } -> String . ends_with? ( constraint , cc )
{ ^ type , cc , :prefix } -> String . starts_with? ( constraint , cc )
{ ^ type , % Regex { } = r , _match } -> Regex . match? ( r , constraint )
_ -> false
end
end )
case user_constraint do
%{ field : field , error_message : error_message , type : type , constraint : constraint } ->
Ash.Error.Changes.InvalidAttribute . exception (
field : field ,
message : error_message ,
private_vars : [
constraint : constraint ,
constraint_type : type
]
)
nil ->
Ecto.ConstraintError . exception (
action : action ,
type : type ,
constraint : constraint ,
changeset : changeset
)
end
end )
end
2023-09-23 07:42:39 +12:00
defp set_table ( record , changeset , operation , table_error? ) do
2022-08-24 11:56:46 +12:00
if AshPostgres.DataLayer.Info . polymorphic? ( record . __struct__ ) do
table =
changeset . context [ :data_layer ] [ :table ] ||
AshPostgres.DataLayer.Info . table ( record . __struct__ )
2021-01-29 13:42:55 +13:00
2022-05-14 09:41:30 +12:00
record =
if table do
Ecto . put_meta ( record , source : table )
else
2023-09-23 07:42:39 +12:00
if table_error? do
raise_table_error! ( changeset . resource , operation )
else
record
end
2022-05-14 09:41:30 +12:00
end
2022-08-24 11:56:46 +12:00
prefix =
changeset . context [ :data_layer ] [ :schema ] ||
AshPostgres.DataLayer.Info . schema ( record . __struct__ )
2022-05-14 09:41:30 +12:00
if prefix do
Ecto . put_meta ( record , prefix : table )
2021-01-29 13:42:55 +13:00
else
2022-05-14 09:41:30 +12:00
record
2021-01-29 13:42:55 +13:00
end
else
record
end
end
2023-01-30 14:27:43 +13:00
def from_ecto ( { :ok , result } ) , do : { :ok , from_ecto ( result ) }
def from_ecto ( { :error , _ } = other ) , do : other
2023-01-28 14:54:40 +13:00
2023-01-30 14:27:43 +13:00
def from_ecto ( nil ) , do : nil
2023-01-30 12:20:59 +13:00
2023-01-30 14:27:43 +13:00
def from_ecto ( value ) when is_list ( value ) do
2023-01-30 12:20:59 +13:00
Enum . map ( value , & from_ecto / 1 )
end
2023-01-30 14:27:43 +13:00
def from_ecto ( % resource { } = record ) do
if Spark.Dsl . is? ( resource , Ash.Resource ) do
empty = struct ( resource )
2023-01-28 14:54:40 +13:00
2023-01-30 14:27:43 +13:00
resource
|> Ash.Resource.Info . relationships ( )
|> Enum . reduce ( record , fn relationship , record ->
case Map . get ( record , relationship . name ) do
% Ecto.Association.NotLoaded { } ->
Map . put ( record , relationship . name , Map . get ( empty , relationship . name ) )
value ->
Map . put ( record , relationship . name , from_ecto ( value ) )
end
end )
else
record
end
2023-01-28 14:54:40 +13:00
end
2023-01-30 14:27:43 +13:00
def from_ecto ( other ) , do : other
2023-01-28 14:54:40 +13:00
2023-01-30 14:27:43 +13:00
def to_ecto ( nil ) , do : nil
2023-01-30 12:20:59 +13:00
2023-01-30 14:27:43 +13:00
def to_ecto ( value ) when is_list ( value ) do
2023-01-30 12:20:59 +13:00
Enum . map ( value , & to_ecto / 1 )
end
2023-01-30 14:27:43 +13:00
def to_ecto ( % resource { } = record ) do
if Spark.Dsl . is? ( resource , Ash.Resource ) do
resource
|> Ash.Resource.Info . relationships ( )
|> Enum . reduce ( record , fn relationship , record ->
value =
case Map . get ( record , relationship . name ) do
% Ash.NotLoaded { } ->
% Ecto.Association.NotLoaded {
2023-03-21 09:11:05 +13:00
__field__ : relationship . name ,
2023-01-30 14:27:43 +13:00
__cardinality__ : relationship . cardinality
}
value ->
to_ecto ( value )
end
Map . put ( record , relationship . name , value )
end )
else
record
end
2023-01-28 14:54:40 +13:00
end
2023-01-30 14:27:43 +13:00
def to_ecto ( other ) , do : other
2023-01-30 12:20:59 +13:00
2021-04-20 06:26:41 +12:00
defp add_check_constraints ( changeset , resource ) do
resource
2022-08-24 11:56:46 +12:00
|> AshPostgres.DataLayer.Info . check_constraints ( )
2021-04-20 06:26:41 +12:00
|> Enum . reduce ( changeset , fn constraint , changeset ->
constraint . attribute
|> List . wrap ( )
|> Enum . reduce ( changeset , fn attribute , changeset ->
Ecto.Changeset . check_constraint ( changeset , attribute ,
name : constraint . name ,
message : constraint . message || " is invalid "
)
end )
end )
end
2022-03-21 13:35:30 +13:00
defp add_exclusion_constraints ( changeset , resource ) do
resource
2022-08-24 11:56:46 +12:00
|> AshPostgres.DataLayer.Info . exclusion_constraint_names ( )
2022-03-21 13:35:30 +13:00
|> Enum . reduce ( changeset , fn constraint , changeset ->
case constraint do
{ key , name } ->
Ecto.Changeset . exclusion_constraint ( changeset , key , name : name )
{ key , name , message } ->
Ecto.Changeset . exclusion_constraint ( changeset , key , name : name , message : message )
end
end )
end
2021-03-20 11:41:16 +13:00
defp add_related_foreign_key_constraints ( changeset , resource ) do
# TODO: this doesn't guarantee us to get all of them, because if something is related to this
# schema and there is no back-relation, then this won't catch it's foreign key constraints
resource
|> Ash.Resource.Info . relationships ( )
|> Enum . map ( & &1 . destination )
|> Enum . uniq ( )
|> Enum . flat_map ( fn related ->
related
|> Ash.Resource.Info . relationships ( )
|> Enum . filter ( & ( &1 . destination == resource ) )
2023-07-28 05:57:42 +12:00
|> Enum . map ( & Map . take ( &1 , [ :source , :source_attribute , :destination_attribute , :name ] ) )
2021-03-20 11:41:16 +13:00
end )
|> Enum . reduce ( changeset , fn %{
source : source ,
2022-08-19 06:56:36 +12:00
source_attribute : source_attribute ,
2023-07-28 05:57:42 +12:00
destination_attribute : destination_attribute ,
name : relationship_name
2021-03-20 11:41:16 +13:00
} ,
changeset ->
2023-07-28 05:57:42 +12:00
case AshPostgres.DataLayer.Info . reference ( resource , relationship_name ) do
%{ name : name } when not is_nil ( name ) ->
Ecto.Changeset . foreign_key_constraint ( changeset , destination_attribute ,
name : name ,
message : " would leave records behind "
)
_ ->
Ecto.Changeset . foreign_key_constraint ( changeset , destination_attribute ,
name : " #{ AshPostgres.DataLayer.Info . table ( source ) } _ #{ source_attribute } _fkey " ,
message : " would leave records behind "
)
end
2021-03-20 11:41:16 +13:00
end )
end
defp add_my_foreign_key_constraints ( changeset , resource ) do
resource
|> Ash.Resource.Info . relationships ( )
2022-08-19 06:56:36 +12:00
|> Enum . reduce ( changeset , & Ecto.Changeset . foreign_key_constraint ( &2 , &1 . source_attribute ) )
2021-03-20 11:41:16 +13:00
end
defp add_configured_foreign_key_constraints ( changeset , resource ) do
resource
2022-08-24 11:56:46 +12:00
|> AshPostgres.DataLayer.Info . foreign_key_names ( )
2021-03-20 11:41:16 +13:00
|> case do
{ m , f , a } -> List . wrap ( apply ( m , f , [ changeset | a ] ) )
value -> List . wrap ( value )
end
|> Enum . reduce ( changeset , fn
{ key , name } , changeset ->
Ecto.Changeset . foreign_key_constraint ( changeset , key , name : name )
{ key , name , message } , changeset ->
Ecto.Changeset . foreign_key_constraint ( changeset , key , name : name , message : message )
end )
end
2021-04-28 09:16:56 +12:00
defp add_unique_indexes ( changeset , resource , ash_changeset ) do
2021-01-22 09:32:26 +13:00
changeset =
resource
2021-02-23 17:53:18 +13:00
|> Ash.Resource.Info . identities ( )
2021-01-22 09:32:26 +13:00
|> Enum . reduce ( changeset , fn identity , changeset ->
2021-01-27 09:07:26 +13:00
name =
2022-08-24 11:56:46 +12:00
AshPostgres.DataLayer.Info . identity_index_names ( resource ) [ identity . name ] ||
2021-04-28 09:16:56 +12:00
" #{ table ( resource , ash_changeset ) } _ #{ identity . name } _index "
2021-01-22 09:32:26 +13:00
2021-01-27 09:07:26 +13:00
opts =
if Map . get ( identity , :message ) do
[ name : name , message : identity . message ]
else
[ name : name ]
end
Ecto.Changeset . unique_constraint ( changeset , identity . keys , opts )
2021-01-22 09:32:26 +13:00
end )
2022-11-26 08:06:22 +13:00
changeset =
resource
|> AshPostgres.DataLayer.Info . custom_indexes ( )
|> Enum . reduce ( changeset , fn index , changeset ->
opts =
if index . message do
[ name : index . name , message : index . message ]
else
[ name : index . name ]
end
Ecto.Changeset . unique_constraint ( changeset , index . fields , opts )
end )
2021-01-22 09:32:26 +13:00
names =
resource
2022-08-24 11:56:46 +12:00
|> AshPostgres.DataLayer.Info . unique_index_names ( )
2021-01-22 09:32:26 +13:00
|> case do
2021-01-27 09:07:26 +13:00
{ m , f , a } -> List . wrap ( apply ( m , f , [ changeset | a ] ) )
2021-01-22 09:32:26 +13:00
value -> List . wrap ( value )
end
2023-09-06 17:18:57 +12:00
names =
case Ash.Resource.Info . primary_key ( resource ) do
2023-09-23 07:42:39 +12:00
[ ] ->
names
fields ->
if table = table ( resource , ash_changeset ) do
[ { fields , table <> " _pkey " } | names ]
else
[ ]
end
2023-09-06 17:18:57 +12:00
end
2021-01-27 09:07:26 +13:00
2021-03-20 11:41:16 +13:00
Enum . reduce ( names , changeset , fn
{ keys , name } , changeset ->
Ecto.Changeset . unique_constraint ( changeset , List . wrap ( keys ) , name : name )
{ keys , name , message } , changeset ->
Ecto.Changeset . unique_constraint ( changeset , List . wrap ( keys ) , name : name , message : message )
2021-01-22 09:32:26 +13:00
end )
2020-07-13 16:41:38 +12:00
end
2020-06-14 19:04:18 +12:00
@impl true
2021-05-19 15:04:37 +12:00
def upsert ( resource , changeset , keys \\ nil ) do
2022-08-24 11:56:46 +12:00
if AshPostgres.DataLayer.Info . manage_tenant_update? ( resource ) do
2020-10-29 15:26:45 +13:00
{ :error , " Cannot currently upsert a resource that owns a tenant " }
else
2023-10-12 08:17:50 +13:00
keys = keys || Ash.Resource.Info . primary_key ( keys )
2023-09-23 07:42:39 +12:00
2023-11-16 10:08:44 +13:00
update_defaults = update_defaults ( resource )
2023-09-23 07:42:39 +12:00
explicitly_changing_attributes =
2023-11-16 10:08:44 +13:00
changeset . attributes
|> Map . keys ( )
|> Enum . concat ( Keyword . keys ( update_defaults ) )
|> Kernel . -- ( Map . get ( changeset , :defaults , [ ] ) )
|> Kernel . -- ( keys )
2023-09-23 07:42:39 +12:00
2023-10-12 08:17:50 +13:00
upsert_fields =
changeset . context [ :private ] [ :upsert_fields ] || explicitly_changing_attributes
2023-09-26 08:32:20 +13:00
2023-09-23 07:42:39 +12:00
case bulk_create ( resource , [ changeset ] , %{
single? : true ,
upsert? : true ,
tenant : changeset . tenant ,
upsert_keys : keys ,
2023-10-12 08:17:50 +13:00
upsert_fields : upsert_fields ,
2023-09-23 07:42:39 +12:00
return_records? : true
} ) do
{ :ok , [ result ] } ->
{ :ok , result }
{ :error , error } ->
{ :error , error }
end
2020-10-29 15:26:45 +13:00
end
2020-06-14 19:04:18 +12:00
end
2023-05-03 14:16:21 +12:00
defp conflict_target ( resource , keys ) do
if Ash.Resource.Info . base_filter ( resource ) do
base_filter_sql =
AshPostgres.DataLayer.Info . base_filter_sql ( resource ) ||
raise """
Cannot use upserts with resources that have a base_filter without also adding ` base_filter_sql ` in the postgres section .
"""
sources =
Enum . map ( keys , fn key ->
2023-07-19 11:21:45 +12:00
~s( " #{ Ash.Resource.Info . attribute ( resource , key ) . source || key } " )
2023-05-03 14:16:21 +12:00
end )
{ :unsafe_fragment , " ( " <> Enum . join ( sources , " , " ) <> " ) WHERE ( #{ base_filter_sql } ) " }
else
keys
end
end
2023-10-12 08:17:50 +13:00
defp update_defaults ( resource ) do
2022-07-07 06:44:18 +12:00
attributes =
2023-10-12 08:17:50 +13:00
resource
2022-07-07 06:44:18 +12:00
|> Ash.Resource.Info . attributes ( )
|> Enum . reject ( & is_nil ( &1 . update_default ) )
attributes
|> static_defaults ( )
|> Enum . concat ( lazy_matching_defaults ( attributes ) )
|> Enum . concat ( lazy_non_matching_defaults ( attributes ) )
end
defp static_defaults ( attributes ) do
attributes
|> Enum . reject ( & get_default_fun ( &1 ) )
|> Enum . map ( & { &1 . name , &1 . update_default } )
end
defp lazy_non_matching_defaults ( attributes ) do
attributes
|> Enum . filter ( & ( ! &1 . match_other_defaults? && get_default_fun ( &1 ) ) )
2022-12-14 11:17:57 +13:00
|> Enum . map ( fn attribute ->
2022-12-14 11:06:39 +13:00
default_value =
2022-12-14 11:17:57 +13:00
case attribute . update_default do
2022-12-14 11:06:39 +13:00
function when is_function ( function ) ->
function . ( )
{ m , f , a } when is_atom ( m ) and is_atom ( f ) and is_list ( a ) ->
apply ( m , f , a )
end
2022-12-14 11:14:12 +13:00
{ attribute . name , default_value }
2022-12-14 11:06:39 +13:00
end )
2022-07-07 06:44:18 +12:00
end
defp lazy_matching_defaults ( attributes ) do
attributes
|> Enum . filter ( & ( &1 . match_other_defaults? && get_default_fun ( &1 ) ) )
|> Enum . group_by ( & &1 . update_default )
|> Enum . flat_map ( fn { default_fun , attributes } ->
default_value =
case default_fun do
function when is_function ( function ) ->
function . ( )
{ m , f , a } when is_atom ( m ) and is_atom ( f ) and is_list ( a ) ->
apply ( m , f , a )
end
Enum . map ( attributes , & { &1 . name , default_value } )
end )
end
defp get_default_fun ( attribute ) do
if is_function ( attribute . update_default ) or match? ( { _ , _ , _ } , attribute . update_default ) do
attribute . update_default
end
end
2020-06-14 19:04:18 +12:00
@impl true
def update ( resource , changeset ) do
2023-08-29 08:18:56 +12:00
ecto_changeset =
changeset . data
|> Map . update! ( :__meta__ , & Map . put ( &1 , :source , table ( resource , changeset ) ) )
|> ecto_changeset ( changeset , :update )
2020-10-29 15:26:45 +13:00
2023-08-29 08:18:56 +12:00
try do
query = from ( row in resource , as : ^ 0 )
2023-09-01 08:03:23 +12:00
select = Keyword . keys ( changeset . atomics ) ++ Ash.Resource.Info . primary_key ( resource )
2023-08-29 08:18:56 +12:00
query =
query
|> default_bindings ( resource , changeset . context )
2023-09-01 08:03:23 +12:00
|> Ecto.Query . select ( ^ select )
2023-08-29 08:18:56 +12:00
2023-10-12 08:17:50 +13:00
case query_with_atomics (
resource ,
query ,
ecto_changeset . filters ,
changeset . atomics ,
ecto_changeset . changes ,
[ ]
) do
:empty ->
{ :ok , changeset . data }
{ :ok , query } ->
repo_opts = repo_opts ( changeset . timeout , changeset . tenant , changeset . resource )
repo_opts =
Keyword . put ( repo_opts , :returning , Keyword . keys ( changeset . atomics ) )
result =
dynamic_repo ( resource , changeset ) . update_all (
query ,
[ ] ,
repo_opts
2023-08-29 08:18:56 +12:00
)
2023-10-12 08:17:50 +13:00
case result do
{ 0 , [ ] } ->
{ :error ,
Ash.Error.Changes.StaleRecord . exception (
resource : resource ,
filters : ecto_changeset . filters
) }
{ 1 , [ result ] } ->
record =
changeset . data
|> Map . merge ( changeset . attributes )
|> Map . merge ( Map . take ( result , Keyword . keys ( changeset . atomics ) ) )
maybe_update_tenant ( resource , changeset , record )
{ :ok , record }
2023-08-29 08:18:56 +12:00
end
2023-10-12 08:17:50 +13:00
{ :error , error } ->
{ :error , error }
end
rescue
e ->
handle_raised_error ( e , __STACKTRACE__ , ecto_changeset , resource )
end
end
defp query_with_atomics (
resource ,
query ,
filters ,
atomics ,
updating_one_changes ,
existing_set
) do
query =
Enum . reduce ( filters , query , fn { key , value } , query ->
from ( row in query ,
where : field ( row , ^ key ) == ^ value
)
end )
atomics_result =
Enum . reduce_while ( atomics , { :ok , query , [ ] } , fn { field , expr } , { :ok , query , set } ->
used_calculations =
Ash.Filter . used_calculations (
expr ,
resource
)
used_aggregates =
expr
|> AshPostgres.Aggregate . used_aggregates (
resource ,
used_calculations ,
[ ]
)
|> Enum . map ( fn aggregate ->
%{ aggregate | load : aggregate . name }
end )
with { :ok , query } <-
AshPostgres.Join . join_all_relationships (
query ,
% Ash.Filter {
resource : resource ,
expression : expr
} ,
left_only? : true
) ,
{ :ok , query } <-
AshPostgres.Aggregate . add_aggregates ( query , used_aggregates , resource , false , 0 ) ,
dynamic <-
AshPostgres.Expr . dynamic_expr ( query , expr , query . __ash_bindings__ ) do
{ :cont , { :ok , query , Keyword . put ( set , field , dynamic ) } }
else
other ->
{ :halt , other }
end
end )
case atomics_result do
{ :ok , query , dynamics } ->
{ params , set , count } =
updating_one_changes
|> Map . to_list ( )
|> Enum . reduce ( { [ ] , [ ] , 0 } , fn { key , value } , { params , set , count } ->
{ [ { value , { 0 , key } } | params ] , [ { key , { :^ , [ ] , [ count ] } } | set ] , count + 1 }
end )
{ params , set , _ } =
Enum . reduce (
dynamics ++ existing_set ,
{ params , set , count } ,
fn { key , value } , { params , set , count } ->
2023-08-29 08:18:56 +12:00
case AshPostgres.Expr . dynamic_expr ( query , value , query . __ash_bindings__ ) do
% Ecto.Query.DynamicExpr { } = dynamic ->
result =
Ecto.Query.Builder.Dynamic . partially_expand (
:select ,
query ,
dynamic ,
params ,
count
)
expr = elem ( result , 0 )
new_params = elem ( result , 1 )
new_count =
result |> Tuple . to_list ( ) |> List . last ( )
{ new_params , [ { key , expr } | set ] , new_count }
other ->
{ [ { other , { 0 , key } } | params ] , [ { key , { :^ , [ ] , [ count ] } } | set ] , count + 1 }
end
2023-10-12 08:17:50 +13:00
end
)
2023-09-01 05:04:08 +12:00
2023-10-12 08:17:50 +13:00
case set do
[ ] ->
:empty
2023-08-29 08:18:56 +12:00
2023-10-12 08:17:50 +13:00
set ->
{ :ok ,
Map . put ( query , :updates , [
% Ecto.Query.QueryExpr {
# why do I have to reverse the `set`???
# it breaks if I don't
expr : [ set : Enum . reverse ( set ) ] ,
params : Enum . reverse ( params )
}
] ) }
end
2023-08-29 08:18:56 +12:00
2023-10-12 08:17:50 +13:00
{ :error , error } ->
{ :error , error }
2020-10-29 15:26:45 +13:00
end
2020-06-14 19:04:18 +12:00
end
@impl true
2020-10-29 15:26:45 +13:00
def destroy ( resource , %{ data : record } = changeset ) do
2023-08-29 08:18:56 +12:00
ecto_changeset = ecto_changeset ( record , changeset , :delete )
2020-10-29 15:26:45 +13:00
2023-08-29 08:18:56 +12:00
try do
ecto_changeset
|> dynamic_repo ( resource , changeset ) . delete (
repo_opts ( changeset . timeout , changeset . tenant , changeset . resource )
)
|> from_ecto ( )
|> case do
{ :ok , _record } ->
:ok
{ :error , error } ->
handle_errors ( { :error , error } )
end
rescue
e ->
handle_raised_error ( e , __STACKTRACE__ , ecto_changeset , resource )
2020-06-14 19:04:18 +12:00
end
end
2023-04-28 13:49:13 +12:00
@impl true
def lock ( query , :for_update , _ ) do
2023-08-05 10:53:09 +12:00
if query . distinct do
new_query =
Ecto.Query . lock ( %{ query | distinct : nil } , [ { ^ 0 , a } ] , fragment ( " FOR UPDATE OF ? " , a ) )
q = from ( row in subquery ( new_query ) , [ ] )
{ :ok , %{ q | distinct : query . distinct } }
else
{ :ok , Ecto.Query . lock ( query , [ { ^ 0 , a } ] , fragment ( " FOR UPDATE OF ? " , a ) ) }
end
2023-04-28 13:49:13 +12:00
end
2023-05-17 07:14:45 +12:00
@locks [
2023-05-17 07:20:27 +12:00
" FOR UPDATE " ,
" FOR NO KEY UPDATE " ,
" FOR SHARE " ,
" FOR KEY SHARE "
2023-05-17 07:14:45 +12:00
]
2023-07-26 15:08:56 +12:00
for lock <- @locks do
frag = " #{ lock } OF ? "
2023-05-17 07:14:45 +12:00
def lock ( query , unquote ( lock ) , _ ) do
2023-07-26 15:08:56 +12:00
{ :ok , Ecto.Query . lock ( query , [ { ^ 0 , a } ] , fragment ( unquote ( frag ) , a ) ) }
end
frag = " #{ lock } OF ? NOWAIT "
lock = " #{ lock } NOWAIT "
def lock ( query , unquote ( lock ) , _ ) do
{ :ok , Ecto.Query . lock ( query , [ { ^ 0 , a } ] , fragment ( unquote ( frag ) , a ) ) }
2023-05-17 07:14:45 +12:00
end
end
2020-06-14 19:04:18 +12:00
@impl true
2023-07-20 04:50:05 +12:00
def sort ( query , sort , _resource ) do
{ :ok , Map . update! ( query , :__ash_bindings__ , & Map . put ( &1 , :sort , sort ) ) }
2020-06-14 19:04:18 +12:00
end
2021-04-09 16:53:50 +12:00
@impl true
def select ( query , select , resource ) do
query = default_bindings ( query , resource )
{ :ok ,
from ( row in query ,
2022-04-08 11:59:43 +12:00
select : struct ( row , ^ Enum . uniq ( select ) )
2021-04-09 16:53:50 +12:00
) }
end
2023-07-20 04:50:05 +12:00
@impl true
def distinct_sort ( query , sort , _ ) when sort in [ nil , [ ] ] do
{ :ok , query }
end
def distinct_sort ( query , sort , _ ) do
{ :ok , Map . update! ( query , :__ash_bindings__ , & Map . put ( &1 , :distinct_sort , sort ) ) }
end
2022-05-11 14:47:21 +12:00
# If the order by does not match the initial sort clause, then we use a subquery
# to limit to only distinct rows. This may not perform that well, so we may need
# to come up with alternatives here.
2021-04-01 19:19:30 +13:00
@impl true
2023-07-20 04:50:05 +12:00
def distinct ( query , empty , resource ) when empty in [ nil , [ ] ] do
query |> apply_sort ( query . __ash_bindings__ [ :sort ] , resource )
2022-05-11 14:47:21 +12:00
end
2021-04-01 19:19:30 +13:00
2022-05-11 14:47:21 +12:00
def distinct ( query , distinct_on , resource ) do
case get_distinct_statement ( query , distinct_on ) do
{ :ok , distinct_statement } ->
2023-07-20 04:50:05 +12:00
%{ query | distinct : distinct_statement }
|> apply_sort ( query . __ash_bindings__ [ :sort ] , resource )
2022-05-11 14:47:21 +12:00
{ :error , distinct_statement } ->
2023-07-20 04:50:05 +12:00
query
|> Ecto.Query . exclude ( :order_by )
|> default_bindings ( resource )
|> Map . put ( :distinct , distinct_statement )
|> apply_sort (
query . __ash_bindings__ [ :distinct_sort ] || query . __ash_bindings__ [ :sort ] ,
resource ,
2023-11-17 11:48:39 +13:00
:direct
2023-07-20 04:50:05 +12:00
)
|> case do
{ :ok , distinct_query } ->
on =
Enum . reduce ( Ash.Resource.Info . primary_key ( resource ) , nil , fn key , dynamic ->
if dynamic do
Ecto.Query . dynamic (
[ row , distinct ] ,
^ dynamic and field ( row , ^ key ) == field ( distinct , ^ key )
)
else
Ecto.Query . dynamic ( [ row , distinct ] , field ( row , ^ key ) == field ( distinct , ^ key ) )
end
end )
joined_query_source =
Enum . reduce (
[
:join ,
:order_by ,
:group_by ,
:having ,
:distinct ,
:select ,
:combinations ,
:with_ctes ,
:limit ,
:offset ,
:lock ,
:preload ,
2023-08-05 14:52:56 +12:00
:update ,
:where
2023-07-20 04:50:05 +12:00
] ,
query ,
& Ecto.Query . exclude ( &2 , &1 )
2022-05-11 14:47:21 +12:00
)
2023-07-20 04:50:05 +12:00
joined_query =
from ( row in joined_query_source ,
join : distinct in subquery ( distinct_query ) ,
on : ^ on
)
from ( [ row , distinct ] in joined_query ,
select : distinct
)
|> default_bindings ( resource )
|> apply_sort ( query . __ash_bindings__ [ :sort ] , resource )
|> case do
{ :ok , joined_query } ->
{ :ok ,
Map . update! (
joined_query ,
:__ash_bindings__ ,
& Map . put ( &1 , :__order__? , query . __ash_bindings__ [ :__order__? ] || false )
) }
{ :error , error } ->
{ :error , error }
2022-05-11 14:47:21 +12:00
end
2021-04-01 19:19:30 +13:00
2023-07-20 04:50:05 +12:00
{ :error , error } ->
{ :error , error }
end
end
end
2023-07-19 06:48:35 +12:00
2023-11-17 11:48:39 +13:00
defp apply_sort ( query , sort , resource , type \\ :window )
2022-05-11 14:47:21 +12:00
2023-07-20 04:50:05 +12:00
defp apply_sort ( query , sort , _resource , _ ) when sort in [ nil , [ ] ] do
{ :ok , query |> set_sort_applied ( ) }
end
2022-05-11 14:47:21 +12:00
2023-11-17 11:48:39 +13:00
defp apply_sort ( query , sort , resource , type ) do
AshPostgres.Sort . sort ( query , sort , resource , [ ] , 0 , type )
2022-05-11 14:47:21 +12:00
end
2023-07-20 04:50:05 +12:00
defp set_sort_applied ( query ) do
Map . update! ( query , :__ash_bindings__ , & Map . put ( &1 , :sort_applied? , true ) )
end
2022-05-11 14:47:21 +12:00
2023-07-20 04:50:05 +12:00
defp get_distinct_statement ( query , distinct_on ) do
has_distinct_sort? = match? ( %{ __ash_bindings__ : %{ distinct_sort : _ } } , query )
2023-07-19 06:48:35 +12:00
2023-07-20 04:50:05 +12:00
if has_distinct_sort? do
{ :error , default_distinct_statement ( query , distinct_on ) }
2022-05-11 14:47:21 +12:00
else
2023-07-20 04:50:05 +12:00
sort = query . __ash_bindings__ [ :sort ] || [ ]
2023-07-19 06:48:35 +12:00
2023-07-20 04:50:05 +12:00
distinct =
query . distinct ||
% Ecto.Query.QueryExpr {
expr : [ ] ,
params : [ ]
}
2021-04-01 19:19:30 +13:00
2023-07-20 04:50:05 +12:00
if sort == [ ] do
{ :ok , default_distinct_statement ( query , distinct_on ) }
else
distinct_on
|> Enum . reduce_while ( { sort , [ ] , [ ] , Enum . count ( distinct . params ) } , fn
_ , { [ ] , _distinct_statement , _ , _count } ->
{ :halt , :error }
distinct_on , { [ order_by | rest_order_by ] , distinct_statement , params , count } ->
case order_by do
{ ^ distinct_on , order } ->
{ distinct_expr , params , count } =
distinct_on_expr ( query , distinct_on , params , count )
{ :cont ,
{ rest_order_by , [ { order , distinct_expr } | distinct_statement ] , params , count } }
_ ->
{ :halt , :error }
end
end )
|> case do
:error ->
{ :error , default_distinct_statement ( query , distinct_on ) }
{ _ , result , params , _ } ->
{ :ok ,
%{
distinct
| expr : distinct . expr ++ Enum . reverse ( result ) ,
params : distinct . params ++ Enum . reverse ( params )
} }
end
2022-05-11 14:47:21 +12:00
end
end
2021-04-01 19:19:30 +13:00
end
2022-05-11 14:47:21 +12:00
defp default_distinct_statement ( query , distinct_on ) do
distinct =
query . distinct ||
% Ecto.Query.QueryExpr {
expr : [ ]
}
2023-07-19 06:48:35 +12:00
{ expr , params , _ } =
Enum . reduce ( distinct_on , { [ ] , [ ] , Enum . count ( distinct . params ) } , fn
{ distinct_on_field , order } , { expr , params , count } ->
{ distinct_expr , params , count } =
distinct_on_expr ( query , distinct_on_field , params , count )
{ [ { order , distinct_expr } | expr ] , params , count }
distinct_on_field , { expr , params , count } ->
{ distinct_expr , params , count } =
distinct_on_expr ( query , distinct_on_field , params , count )
{ [ { :asc , distinct_expr } | expr ] , params , count }
2022-05-11 14:47:21 +12:00
end )
2023-07-19 06:48:35 +12:00
%{
distinct
| expr : distinct . expr ++ Enum . reverse ( expr ) ,
params : distinct . params ++ Enum . reverse ( params )
}
end
defp distinct_on_expr ( query , field , params , count ) do
resource = query . __ash_bindings__ . resource
ref =
case field do
% Ash.Query.Calculation { } = calc ->
% Ref { attribute : calc , relationship_path : [ ] , resource : resource }
field ->
% Ref {
attribute : Ash.Resource.Info . field ( resource , field ) ,
relationship_path : [ ] ,
resource : resource
}
end
dynamic = AshPostgres.Expr . dynamic_expr ( query , ref , query . __ash_bindings__ )
result =
Ecto.Query.Builder.Dynamic . partially_expand (
:distinct ,
query ,
dynamic ,
params ,
count
)
expr = elem ( result , 0 )
new_params = elem ( result , 1 )
new_count = result |> Tuple . to_list ( ) |> List . last ( )
{ expr , new_params , new_count }
2022-05-11 14:47:21 +12:00
end
2020-06-14 19:04:18 +12:00
@impl true
2023-01-05 06:36:01 +13:00
def filter ( query , filter , resource , opts \\ [ ] ) do
2021-12-21 16:19:24 +13:00
query = default_bindings ( query , resource )
2020-06-19 15:04:41 +12:00
2023-03-18 10:06:06 +13:00
used_calculations =
Ash.Filter . used_calculations (
filter ,
resource
)
used_aggregates =
filter
|> AshPostgres.Aggregate . used_aggregates (
resource ,
used_calculations ,
[ ]
)
|> Enum . map ( fn aggregate ->
%{ aggregate | load : aggregate . name }
end )
2021-05-07 19:09:49 +12:00
query
2023-01-05 06:36:01 +13:00
|> AshPostgres.Join . join_all_relationships ( filter , opts )
2021-05-07 19:09:49 +12:00
|> case do
{ :ok , query } ->
2023-03-18 10:06:06 +13:00
query
2023-06-06 10:32:50 +12:00
|> AshPostgres.Aggregate . add_aggregates ( used_aggregates , resource , false , 0 )
2023-03-18 10:06:06 +13:00
|> case do
{ :ok , query } ->
{ :ok , add_filter_expression ( query , filter ) }
{ :error , error } ->
{ :error , error }
end
2020-06-14 19:04:18 +12:00
2021-05-07 19:09:49 +12:00
{ :error , error } ->
{ :error , error }
end
2020-06-19 15:04:41 +12:00
end
2021-12-21 16:19:24 +13:00
@doc false
2023-11-17 11:48:39 +13:00
def default_bindings ( query , resource , context \\ %{ } )
def default_bindings ( %{ __ash_bindings__ : _ } = query , _resource , _context ) , do : query
def default_bindings ( query , resource , context ) do
2022-12-05 07:16:45 +13:00
start_bindings = context [ :data_layer ] [ :start_bindings_at ] || 0
2020-07-23 17:13:47 +12:00
Map . put_new ( query , :__ash_bindings__ , %{
2022-07-21 06:19:06 +12:00
resource : resource ,
2022-12-05 07:16:45 +13:00
current : Enum . count ( query . joins ) + 1 + start_bindings ,
2022-12-08 14:32:38 +13:00
in_group? : false ,
2021-06-04 17:48:35 +12:00
calculations : %{ } ,
2023-03-18 10:06:06 +13:00
parent_resources : [ ] ,
2021-06-06 10:13:20 +12:00
aggregate_defs : %{ } ,
2023-08-23 06:51:31 +12:00
current_aggregate_name : :aggregate_0 ,
aggregate_names : %{ } ,
2021-06-04 17:48:35 +12:00
context : context ,
2022-12-05 07:16:45 +13:00
bindings : %{ start_bindings = > %{ path : [ ] , type : :root , source : resource } }
2020-07-23 17:13:47 +12:00
} )
end
@impl true
2022-01-14 08:11:30 +13:00
def add_aggregates ( query , aggregates , resource ) do
2023-06-06 10:32:50 +12:00
AshPostgres.Aggregate . add_aggregates ( query , aggregates , resource , true , 0 )
2020-07-23 17:13:47 +12:00
end
2021-06-04 17:48:35 +12:00
@impl true
2022-01-14 08:11:30 +13:00
def add_calculations ( query , calculations , resource ) do
2023-06-06 10:32:50 +12:00
AshPostgres.Calculation . add_calculations ( query , calculations , resource , 0 )
2021-06-04 17:48:35 +12:00
end
2021-12-21 16:19:24 +13:00
@doc false
2022-12-08 14:32:38 +13:00
def get_binding ( resource , path , query , type , name_match \\ nil )
2022-09-29 11:01:20 +13:00
def get_binding ( resource , path , %{ __ash_bindings__ : _ } = query , type , name_match ) do
2022-12-08 14:32:38 +13:00
types = List . wrap ( type )
Enum . find_value ( query . __ash_bindings__ . bindings , fn
{ binding , %{ path : candidate_path , type : binding_type } = data } ->
if binding_type in types do
2022-09-29 11:01:20 +13:00
if name_match do
2022-12-08 14:32:38 +13:00
if data [ :name ] == name_match do
if Ash.SatSolver . synonymous_relationship_paths? ( resource , candidate_path , path ) do
binding
end
2022-09-29 11:01:20 +13:00
end
else
2022-12-08 14:32:38 +13:00
if Ash.SatSolver . synonymous_relationship_paths? ( resource , candidate_path , path ) do
binding
2023-02-09 08:46:29 +13:00
else
false
2022-12-08 14:32:38 +13:00
end
2022-09-29 11:01:20 +13:00
end
2022-12-08 14:32:38 +13:00
end
2021-06-04 17:48:35 +12:00
2022-12-08 14:32:38 +13:00
_ ->
nil
2021-12-21 16:19:24 +13:00
end )
2020-07-23 17:13:47 +12:00
end
2022-09-29 11:01:20 +13:00
def get_binding ( _ , _ , _ , _ , _ ) , do : nil
2021-06-04 17:48:35 +12:00
2021-12-21 16:19:24 +13:00
defp add_filter_expression ( query , filter ) do
filter
|> split_and_statements ( )
|> Enum . reduce ( query , fn filter , query ->
2022-01-25 11:59:31 +13:00
dynamic = AshPostgres.Expr . dynamic_expr ( query , filter , query . __ash_bindings__ )
2021-06-04 17:48:35 +12:00
2021-12-21 16:19:24 +13:00
Ecto.Query . where ( query , ^ dynamic )
end )
2021-09-14 04:58:23 +12:00
end
2021-12-21 16:19:24 +13:00
defp split_and_statements ( % Filter { expression : expression } ) do
split_and_statements ( expression )
2021-12-18 07:25:14 +13:00
end
2021-12-21 16:19:24 +13:00
defp split_and_statements ( % BooleanExpression { op : :and , left : left , right : right } ) do
split_and_statements ( left ) ++ split_and_statements ( right )
2021-09-14 04:58:23 +12:00
end
2021-12-21 16:19:24 +13:00
defp split_and_statements ( % Not { expression : % Not { expression : expression } } ) do
split_and_statements ( expression )
2020-07-23 17:13:47 +12:00
end
2021-12-21 16:19:24 +13:00
defp split_and_statements ( % Not {
expression : % BooleanExpression { op : :or , left : left , right : right }
} ) do
split_and_statements ( % BooleanExpression {
op : :and ,
left : % Not { expression : left } ,
right : % Not { expression : right }
} )
2020-08-09 08:19:18 +12:00
end
2021-12-21 16:19:24 +13:00
defp split_and_statements ( other ) , do : [ other ]
2020-07-23 17:13:47 +12:00
2021-12-21 16:19:24 +13:00
@doc false
2022-01-25 11:59:31 +13:00
def add_binding ( query , data , additional_bindings \\ 0 ) do
2021-12-21 16:19:24 +13:00
current = query . __ash_bindings__ . current
bindings = query . __ash_bindings__ . bindings
2020-07-23 17:13:47 +12:00
2021-12-21 16:19:24 +13:00
new_ash_bindings = %{
query . __ash_bindings__
| bindings : Map . put ( bindings , current , data ) ,
2022-01-25 11:59:31 +13:00
current : current + 1 + additional_bindings
2020-07-23 17:13:47 +12:00
}
2021-07-02 07:28:27 +12:00
2021-12-21 16:19:24 +13:00
%{ query | __ash_bindings__ : new_ash_bindings }
2021-04-27 08:45:47 +12:00
end
2023-02-09 08:46:29 +13:00
def add_known_binding ( query , data , known_binding ) do
bindings = query . __ash_bindings__ . bindings
new_ash_bindings = %{
query . __ash_bindings__
| bindings : Map . put ( bindings , known_binding , data )
}
%{ query | __ash_bindings__ : new_ash_bindings }
end
2021-12-21 16:19:24 +13:00
@impl true
2022-12-01 13:06:51 +13:00
def transaction ( resource , func , timeout \\ nil , reason \\ %{ type : :custom , metadata : %{ } } ) do
2023-09-23 07:42:39 +12:00
repo =
case reason [ :data_layer_context ] do
%{ repo : repo } when not is_nil ( repo ) ->
repo
_ ->
2023-11-15 04:56:22 +13:00
AshPostgres.DataLayer.Info . repo ( resource , :read )
2023-09-23 07:42:39 +12:00
end
2022-12-01 13:06:51 +13:00
func = fn ->
2023-09-23 07:42:39 +12:00
repo . on_transaction_begin ( reason )
2022-12-01 13:06:51 +13:00
func . ( )
end
2022-04-18 16:23:09 +12:00
if timeout do
2023-09-23 07:42:39 +12:00
repo . transaction ( func , timeout : timeout )
2022-04-18 16:23:09 +12:00
else
2023-09-23 07:42:39 +12:00
repo . transaction ( func )
2022-04-18 16:23:09 +12:00
end
2020-06-14 19:04:18 +12:00
end
@impl true
2020-07-08 12:01:01 +12:00
def rollback ( resource , term ) do
2023-11-15 04:56:22 +13:00
AshPostgres.DataLayer.Info . repo ( resource , :mutate ) . rollback ( term )
2020-06-14 19:04:18 +12:00
end
2021-02-06 12:59:33 +13:00
defp table ( resource , changeset ) do
2022-08-24 11:56:46 +12:00
changeset . context [ :data_layer ] [ :table ] || AshPostgres.DataLayer.Info . table ( resource )
2021-02-06 12:59:33 +13:00
end
2021-03-22 10:58:47 +13:00
defp raise_table_error! ( resource , operation ) do
2022-08-24 11:56:46 +12:00
if AshPostgres.DataLayer.Info . polymorphic? ( resource ) do
2021-03-22 10:58:47 +13:00
raise """
Could not determine table for #{operation} on #{inspect(resource)}.
Polymorphic resources require that the ` data_layer [ :table ] ` context is provided .
See the guide on polymorphic resources for more information .
"""
else
raise """
Could not determine table for #{operation} on #{inspect(resource)}.
"""
end
end
2023-01-20 03:33:19 +13:00
defp dynamic_repo ( resource , %{ __ash_bindings__ : %{ context : %{ data_layer : %{ repo : repo } } } } ) do
2023-11-15 04:56:22 +13:00
repo || AshPostgres.DataLayer.Info . repo ( resource , :read )
2023-01-20 03:33:19 +13:00
end
2023-11-15 04:56:22 +13:00
defp dynamic_repo ( resource , % struct { context : %{ data_layer : %{ repo : repo } } } ) do
type = struct_to_repo_type ( struct )
repo || AshPostgres.DataLayer.Info . repo ( resource , type )
end
defp dynamic_repo ( resource , % struct { } ) do
AshPostgres.DataLayer.Info . repo ( resource , struct_to_repo_type ( struct ) )
2023-01-20 03:33:19 +13:00
end
2023-11-15 04:56:22 +13:00
defp struct_to_repo_type ( struct ) do
case struct do
Ash.Changeset -> :mutate
Ash.Query -> :read
Ecto.Query -> :read
Ecto.Changeset -> :mutate
end
2023-01-20 03:33:19 +13:00
end
2020-06-14 19:04:18 +12:00
end