diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index f347dcb..181e583 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,10 +1,9 @@ --- name: Proposal about: Suggest an idea for this project -title: '' +title: "" labels: enhancement, needs review -assignees: '' - +assignees: "" --- **Is your feature request related to a problem? Please describe.** @@ -29,7 +28,7 @@ For example Or ```elixir - Api.read(:resource, bar: 10) # <- Adding `bar` here would cause + Ash.read(:resource, bar: 10) # <- Adding `bar` here would cause ``` **Additional context** diff --git a/README.md b/README.md index 289eab0..75119e3 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ Then, configure each of your `Ash.Resource` resources by adding `use Ash.Resourc ```elixir defmodule MyApp.SomeResource do - use Ash.Resource, data_layer: AshSqlite.DataLayer + use Ash.Resource, domain: MyDomain, data_layer: AshSqlite.DataLayer sqlite do repo MyApp.Repo diff --git a/config/config.exs b/config/config.exs index 6991908..9765e4f 100644 --- a/config/config.exs +++ b/config/config.exs @@ -15,8 +15,8 @@ if Mix.env() == :dev do end if Mix.env() == :test do - config :ash, :validate_api_resource_inclusion?, false - config :ash, :validate_api_config_inclusion?, false + config :ash, :validate_domain_resource_inclusion?, false + config :ash, :validate_domain_config_inclusion?, false config :ash_sqlite, AshSqlite.TestRepo, database: Path.join(__DIR__, "../test/test.db"), @@ -27,8 +27,8 @@ if Mix.env() == :test do config :ash_sqlite, ecto_repos: [AshSqlite.TestRepo], - ash_apis: [ - AshSqlite.Test.Api + ash_domains: [ + AshSqlite.Test.Domain ] config :logger, level: :warning diff --git a/documentation/dsls/DSL:-AshSqlite.DataLayer.cheatmd b/documentation/dsls/DSL:-AshSqlite.DataLayer.cheatmd deleted file mode 100644 index 6c75a5f..0000000 --- a/documentation/dsls/DSL:-AshSqlite.DataLayer.cheatmd +++ /dev/null @@ -1,1004 +0,0 @@ - -# DSL: AshSqlite.DataLayer - -A sqlite data layer that leverages Ecto's sqlite capabilities. - - -## sqlite -Sqlite data layer configuration - - -### Nested DSLs - * [custom_indexes](#sqlite-custom_indexes) - * index - * [custom_statements](#sqlite-custom_statements) - * statement - * [references](#sqlite-references) - * reference - - -### Examples -``` -sqlite do - repo MyApp.Repo - table "organizations" -end - -``` - - - - -### Options - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDefaultDocs
- - - repo - - - * - - - atom - - - - The repo that will be used to fetch your data. See the `AshSqlite.Repo` documentation for more -
- - - migrate? - - - - - boolean - - true - - Whether or not to include this resource in the generated migrations with `mix ash.generate_migrations` -
- - - migration_types - - - - - Keyword.t - - [] - - A keyword list of attribute names to the ecto migration type that should be used for that attribute. Only necessary if you need to override the defaults. -
- - - migration_defaults - - - - - Keyword.t - - [] - - A keyword list of attribute names to the ecto migration default that should be used for that attribute. The string you use will be placed verbatim in the migration. Use fragments like `fragment(\\"now()\\")`, or for `nil`, use `\\"nil\\"`. - -
- - - base_filter_sql - - - - - String.t - - - - A raw sql version of the base_filter, e.g `representative = true`. Required if trying to create a unique constraint on a resource with a base_filter -
- - - skip_unique_indexes - - - - - list(atom) | atom - - false - - Skip generating unique indexes when generating migrations -
- - - unique_index_names - - - - - list({list(atom), String.t} | {list(atom), String.t, String.t}) - - [] - - A list of unique index names that could raise errors that are not configured in identities, or an mfa to a function that takes a changeset and returns the list. In the format `{[:affected, :keys], "name_of_constraint"}` or `{[:affected, :keys], "name_of_constraint", "custom error message"}` - -
- - - exclusion_constraint_names - - - - - `any` - - [] - - A list of exclusion constraint names that could raise errors. Must be in the format `{:affected_key, "name_of_constraint"}` or `{:affected_key, "name_of_constraint", "custom error message"}` - -
- - - identity_index_names - - - - - `any` - - [] - - A keyword list of identity names to the unique index name that they should use when being managed by the migration generator. - -
- - - foreign_key_names - - - - - list({atom, String.t} | {String.t, String.t}) - - [] - - A list of foreign keys that could raise errors, or an mfa to a function that takes a changeset and returns a list. In the format: `{:key, "name_of_constraint"}` or `{:key, "name_of_constraint", "custom error message"}` - -
- - - migration_ignore_attributes - - - - - list(atom) - - [] - - A list of attributes that will be ignored when generating migrations. - -
- - - table - - - - - String.t - - - - The table to store and read the resource from. If this is changed, the migration generator will not remove the old table. - -
- - - polymorphic? - - - - - boolean - - false - - Declares this resource as polymorphic. See the [polymorphic resources guide](/documentation/topics/polymorphic_resources.md) for more. - -
- - -## sqlite.custom_indexes -A section for configuring indexes to be created by the migration generator. - -In general, prefer to use `identities` for simple unique constraints. This is a tool to allow -for declaring more complex indexes. - - -### Nested DSLs - * [index](#sqlite-custom_indexes-index) - - -### Examples -``` -custom_indexes do - index [:column1, :column2], unique: true, where: "thing = TRUE" -end - -``` - - - - -## sqlite.custom_indexes.index -```elixir -index fields -``` - - -Add an index to be managed by the migration generator. - - - - -### Examples -``` -index ["column", "column2"], unique: true, where: "thing = TRUE" -``` - - - -### Arguments - - - - - - - - - - - - - - - - - - - -
NameTypeDefaultDocs
- - - fields - - - - - list(atom | String.t) | atom | String.t - - - - The fields to include in the index. -
-### Options - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDefaultDocs
- - - name - - - - - String.t - - - - the name of the index. Defaults to "#{table}_#{column}_index". -
- - - unique - - - - - boolean - - false - - indicates whether the index should be unique. -
- - - using - - - - - String.t - - - - configures the index type. -
- - - where - - - - - String.t - - - - specify conditions for a partial index. -
- - - message - - - - - String.t - - - - A custom message to use for unique indexes that have been violated -
- - - include - - - - - list(String.t) - - - - specify fields for a covering index. This is not supported by all databases. For more information on SQLite support, please read the official docs. -
- - - - - -### Introspection - -Target: `AshSqlite.CustomIndex` - - -## sqlite.custom_statements -A section for configuring custom statements to be added to migrations. - -Changing custom statements may require manual intervention, because Ash can't determine what order they should run -in (i.e if they depend on table structure that you've added, or vice versa). As such, any `down` statements we run -for custom statements happen first, and any `up` statements happen last. - -Additionally, when changing a custom statement, we must make some assumptions, i.e that we should migrate -the old structure down using the previously configured `down` and recreate it. - -This may not be desired, and so what you may end up doing is simply modifying the old migration and deleting whatever was -generated by the migration generator. As always: read your migrations after generating them! - - -### Nested DSLs - * [statement](#sqlite-custom_statements-statement) - - -### Examples -``` -custom_statements do - # the name is used to detect if you remove or modify the statement - statement :pgweb_idx do - up "CREATE INDEX pgweb_idx ON pgweb USING GIN (to_tsvector('english', title || ' ' || body));" - down "DROP INDEX pgweb_idx;" - end -end - -``` - - - - -## sqlite.custom_statements.statement -```elixir -statement name -``` - - -Add a custom statement for migrations. - - - - -### Examples -``` -statement :pgweb_idx do - up "CREATE INDEX pgweb_idx ON pgweb USING GIN (to_tsvector('english', title || ' ' || body));" - down "DROP INDEX pgweb_idx;" -end - -``` - - - -### Arguments - - - - - - - - - - - - - - - - - - - -
NameTypeDefaultDocs
- - - name - - - * - - - atom - - - - The name of the statement, must be unique within the resource - -
-### Options - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDefaultDocs
- - - up - - - * - - - String.t - - - - How to create the structure of the statement - -
- - - down - - - * - - - String.t - - - - How to tear down the structure of the statement -
- - - code? - - - - - boolean - - false - - By default, we place the strings inside of ecto migration's `execute/1` function and assume they are sql. Use this option if you want to provide custom elixir code to be placed directly in the migrations - -
- - - - - -### Introspection - -Target: `AshSqlite.Statement` - - -## sqlite.references -A section for configuring the references (foreign keys) in resource migrations. - -This section is only relevant if you are using the migration generator with this resource. -Otherwise, it has no effect. - - -### Nested DSLs - * [reference](#sqlite-references-reference) - - -### Examples -``` -references do - reference :post, on_delete: :delete, on_update: :update, name: "comments_to_posts_fkey" -end - -``` - - - - -### Options - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDefaultDocs
- - - polymorphic_on_delete - - - - - :delete | :nilify | :nothing | :restrict - - - - For polymorphic resources, configures the on_delete behavior of the automatically generated foreign keys to source tables. -
- - - polymorphic_on_update - - - - - :update | :nilify | :nothing | :restrict - - - - For polymorphic resources, configures the on_update behavior of the automatically generated foreign keys to source tables. -
- - - polymorphic_name - - - - - :update | :nilify | :nothing | :restrict - - - - For polymorphic resources, configures the on_update behavior of the automatically generated foreign keys to source tables. -
- - - -## sqlite.references.reference -```elixir -reference relationship -``` - - -Configures the reference for a relationship in resource migrations. - -Keep in mind that multiple relationships can theoretically involve the same destination and foreign keys. -In those cases, you only need to configure the `reference` behavior for one of them. Any conflicts will result -in an error, across this resource and any other resources that share a table with this one. For this reason, -instead of adding a reference configuration for `:nothing`, its best to just leave the configuration out, as that -is the default behavior if *no* relationship anywhere has configured the behavior of that reference. - - - - -### Examples -``` -reference :post, on_delete: :delete, on_update: :update, name: "comments_to_posts_fkey" -``` - - - -### Arguments - - - - - - - - - - - - - - - - - - - -
NameTypeDefaultDocs
- - - relationship - - - * - - - atom - - - - The relationship to be configured -
-### Options - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDefaultDocs
- - - ignore? - - - - - boolean - - - - If set to true, no reference is created for the given relationship. This is useful if you need to define it in some custom way -
- - - on_delete - - - - - :delete | :nilify | :nothing | :restrict - - - - What should happen to records of this resource when the referenced record of the *destination* resource is deleted. - -
- - - on_update - - - - - :update | :nilify | :nothing | :restrict - - - - What should happen to records of this resource when the referenced destination_attribute of the *destination* record is update. - -
- - - deferrable - - - - - false | true | :initially - - false - - Wether or not the constraint is deferrable. This only affects the migration generator. - -
- - - name - - - - - String.t - - - - The name of the foreign key to generate in the database. Defaults to __fkey - - - - -
- - - - - -### Introspection - -Target: `AshSqlite.Reference` - - - - - - diff --git a/documentation/dsls/DSL:-AshSqlite.DataLayer.md b/documentation/dsls/DSL:-AshSqlite.DataLayer.md new file mode 100644 index 0000000..8236e7c --- /dev/null +++ b/documentation/dsls/DSL:-AshSqlite.DataLayer.md @@ -0,0 +1,280 @@ + +# DSL: AshSqlite.DataLayer + +A sqlite data layer that leverages Ecto's sqlite capabilities. + + +## sqlite +Sqlite data layer configuration + + +### Nested DSLs + * [custom_indexes](#sqlite-custom_indexes) + * index + * [custom_statements](#sqlite-custom_statements) + * statement + * [references](#sqlite-references) + * reference + + +### Examples +``` +sqlite do + repo MyApp.Repo + table "organizations" +end + +``` + + + + +### Options + +| Name | Type | Default | Docs | +|------|------|---------|------| +| [`repo`](#sqlite-repo){: #sqlite-repo .spark-required} | `atom` | | The repo that will be used to fetch your data. See the `AshSqlite.Repo` documentation for more | +| [`migrate?`](#sqlite-migrate?){: #sqlite-migrate? } | `boolean` | `true` | Whether or not to include this resource in the generated migrations with `mix ash.generate_migrations` | +| [`migration_types`](#sqlite-migration_types){: #sqlite-migration_types } | `keyword` | `[]` | A keyword list of attribute names to the ecto migration type that should be used for that attribute. Only necessary if you need to override the defaults. | +| [`migration_defaults`](#sqlite-migration_defaults){: #sqlite-migration_defaults } | `keyword` | `[]` | A keyword list of attribute names to the ecto migration default that should be used for that attribute. The string you use will be placed verbatim in the migration. Use fragments like `fragment(\\"now()\\")`, or for `nil`, use `\\"nil\\"`. | +| [`base_filter_sql`](#sqlite-base_filter_sql){: #sqlite-base_filter_sql } | `String.t` | | A raw sql version of the base_filter, e.g `representative = true`. Required if trying to create a unique constraint on a resource with a base_filter | +| [`skip_unique_indexes`](#sqlite-skip_unique_indexes){: #sqlite-skip_unique_indexes } | `atom \| list(atom)` | `false` | Skip generating unique indexes when generating migrations | +| [`unique_index_names`](#sqlite-unique_index_names){: #sqlite-unique_index_names } | `list({list(atom), String.t} \| {list(atom), String.t, String.t})` | `[]` | A list of unique index names that could raise errors that are not configured in identities, or an mfa to a function that takes a changeset and returns the list. In the format `{[:affected, :keys], "name_of_constraint"}` or `{[:affected, :keys], "name_of_constraint", "custom error message"}` | +| [`exclusion_constraint_names`](#sqlite-exclusion_constraint_names){: #sqlite-exclusion_constraint_names } | `any` | `[]` | A list of exclusion constraint names that could raise errors. Must be in the format `{:affected_key, "name_of_constraint"}` or `{:affected_key, "name_of_constraint", "custom error message"}` | +| [`identity_index_names`](#sqlite-identity_index_names){: #sqlite-identity_index_names } | `any` | `[]` | A keyword list of identity names to the unique index name that they should use when being managed by the migration generator. | +| [`foreign_key_names`](#sqlite-foreign_key_names){: #sqlite-foreign_key_names } | `list({atom, String.t} \| {String.t, String.t})` | `[]` | A list of foreign keys that could raise errors, or an mfa to a function that takes a changeset and returns a list. In the format: `{:key, "name_of_constraint"}` or `{:key, "name_of_constraint", "custom error message"}` | +| [`migration_ignore_attributes`](#sqlite-migration_ignore_attributes){: #sqlite-migration_ignore_attributes } | `list(atom)` | `[]` | A list of attributes that will be ignored when generating migrations. | +| [`table`](#sqlite-table){: #sqlite-table } | `String.t` | | The table to store and read the resource from. If this is changed, the migration generator will not remove the old table. | +| [`polymorphic?`](#sqlite-polymorphic?){: #sqlite-polymorphic? } | `boolean` | `false` | Declares this resource as polymorphic. See the [polymorphic resources guide](/documentation/topics/polymorphic_resources.md) for more. | + + +## sqlite.custom_indexes +A section for configuring indexes to be created by the migration generator. + +In general, prefer to use `identities` for simple unique constraints. This is a tool to allow +for declaring more complex indexes. + + +### Nested DSLs + * [index](#sqlite-custom_indexes-index) + + +### Examples +``` +custom_indexes do + index [:column1, :column2], unique: true, where: "thing = TRUE" +end + +``` + + + + +## sqlite.custom_indexes.index +```elixir +index fields +``` + + +Add an index to be managed by the migration generator. + + + + +### Examples +``` +index ["column", "column2"], unique: true, where: "thing = TRUE" +``` + + + +### Arguments + +| Name | Type | Default | Docs | +|------|------|---------|------| +| [`fields`](#sqlite-custom_indexes-index-fields){: #sqlite-custom_indexes-index-fields } | `atom \| String.t \| list(atom \| String.t)` | | The fields to include in the index. | +### Options + +| Name | Type | Default | Docs | +|------|------|---------|------| +| [`name`](#sqlite-custom_indexes-index-name){: #sqlite-custom_indexes-index-name } | `String.t` | | the name of the index. Defaults to "#{table}_#{column}_index". | +| [`unique`](#sqlite-custom_indexes-index-unique){: #sqlite-custom_indexes-index-unique } | `boolean` | `false` | indicates whether the index should be unique. | +| [`using`](#sqlite-custom_indexes-index-using){: #sqlite-custom_indexes-index-using } | `String.t` | | configures the index type. | +| [`where`](#sqlite-custom_indexes-index-where){: #sqlite-custom_indexes-index-where } | `String.t` | | specify conditions for a partial index. | +| [`message`](#sqlite-custom_indexes-index-message){: #sqlite-custom_indexes-index-message } | `String.t` | | A custom message to use for unique indexes that have been violated | +| [`include`](#sqlite-custom_indexes-index-include){: #sqlite-custom_indexes-index-include } | `list(String.t)` | | specify fields for a covering index. This is not supported by all databases. For more information on SQLite support, please read the official docs. | + + + + + +### Introspection + +Target: `AshSqlite.CustomIndex` + + +## sqlite.custom_statements +A section for configuring custom statements to be added to migrations. + +Changing custom statements may require manual intervention, because Ash can't determine what order they should run +in (i.e if they depend on table structure that you've added, or vice versa). As such, any `down` statements we run +for custom statements happen first, and any `up` statements happen last. + +Additionally, when changing a custom statement, we must make some assumptions, i.e that we should migrate +the old structure down using the previously configured `down` and recreate it. + +This may not be desired, and so what you may end up doing is simply modifying the old migration and deleting whatever was +generated by the migration generator. As always: read your migrations after generating them! + + +### Nested DSLs + * [statement](#sqlite-custom_statements-statement) + + +### Examples +``` +custom_statements do + # the name is used to detect if you remove or modify the statement + statement :pgweb_idx do + up "CREATE INDEX pgweb_idx ON pgweb USING GIN (to_tsvector('english', title || ' ' || body));" + down "DROP INDEX pgweb_idx;" + end +end + +``` + + + + +## sqlite.custom_statements.statement +```elixir +statement name +``` + + +Add a custom statement for migrations. + + + + +### Examples +``` +statement :pgweb_idx do + up "CREATE INDEX pgweb_idx ON pgweb USING GIN (to_tsvector('english', title || ' ' || body));" + down "DROP INDEX pgweb_idx;" +end + +``` + + + +### Arguments + +| Name | Type | Default | Docs | +|------|------|---------|------| +| [`name`](#sqlite-custom_statements-statement-name){: #sqlite-custom_statements-statement-name .spark-required} | `atom` | | The name of the statement, must be unique within the resource | +### Options + +| Name | Type | Default | Docs | +|------|------|---------|------| +| [`up`](#sqlite-custom_statements-statement-up){: #sqlite-custom_statements-statement-up .spark-required} | `String.t` | | How to create the structure of the statement | +| [`down`](#sqlite-custom_statements-statement-down){: #sqlite-custom_statements-statement-down .spark-required} | `String.t` | | How to tear down the structure of the statement | +| [`code?`](#sqlite-custom_statements-statement-code?){: #sqlite-custom_statements-statement-code? } | `boolean` | `false` | By default, we place the strings inside of ecto migration's `execute/1` function and assume they are sql. Use this option if you want to provide custom elixir code to be placed directly in the migrations | + + + + + +### Introspection + +Target: `AshSqlite.Statement` + + +## sqlite.references +A section for configuring the references (foreign keys) in resource migrations. + +This section is only relevant if you are using the migration generator with this resource. +Otherwise, it has no effect. + + +### Nested DSLs + * [reference](#sqlite-references-reference) + + +### Examples +``` +references do + reference :post, on_delete: :delete, on_update: :update, name: "comments_to_posts_fkey" +end + +``` + + + + +### Options + +| Name | Type | Default | Docs | +|------|------|---------|------| +| [`polymorphic_on_delete`](#sqlite-references-polymorphic_on_delete){: #sqlite-references-polymorphic_on_delete } | `:delete \| :nilify \| :nothing \| :restrict` | | For polymorphic resources, configures the on_delete behavior of the automatically generated foreign keys to source tables. | +| [`polymorphic_on_update`](#sqlite-references-polymorphic_on_update){: #sqlite-references-polymorphic_on_update } | `:update \| :nilify \| :nothing \| :restrict` | | For polymorphic resources, configures the on_update behavior of the automatically generated foreign keys to source tables. | +| [`polymorphic_name`](#sqlite-references-polymorphic_name){: #sqlite-references-polymorphic_name } | `:update \| :nilify \| :nothing \| :restrict` | | For polymorphic resources, configures the on_update behavior of the automatically generated foreign keys to source tables. | + + + +## sqlite.references.reference +```elixir +reference relationship +``` + + +Configures the reference for a relationship in resource migrations. + +Keep in mind that multiple relationships can theoretically involve the same destination and foreign keys. +In those cases, you only need to configure the `reference` behavior for one of them. Any conflicts will result +in an error, across this resource and any other resources that share a table with this one. For this reason, +instead of adding a reference configuration for `:nothing`, its best to just leave the configuration out, as that +is the default behavior if *no* relationship anywhere has configured the behavior of that reference. + + + + +### Examples +``` +reference :post, on_delete: :delete, on_update: :update, name: "comments_to_posts_fkey" +``` + + + +### Arguments + +| Name | Type | Default | Docs | +|------|------|---------|------| +| [`relationship`](#sqlite-references-reference-relationship){: #sqlite-references-reference-relationship .spark-required} | `atom` | | The relationship to be configured | +### Options + +| Name | Type | Default | Docs | +|------|------|---------|------| +| [`ignore?`](#sqlite-references-reference-ignore?){: #sqlite-references-reference-ignore? } | `boolean` | | If set to true, no reference is created for the given relationship. This is useful if you need to define it in some custom way | +| [`on_delete`](#sqlite-references-reference-on_delete){: #sqlite-references-reference-on_delete } | `:delete \| :nilify \| :nothing \| :restrict` | | What should happen to records of this resource when the referenced record of the *destination* resource is deleted. | +| [`on_update`](#sqlite-references-reference-on_update){: #sqlite-references-reference-on_update } | `:update \| :nilify \| :nothing \| :restrict` | | What should happen to records of this resource when the referenced destination_attribute of the *destination* record is update. | +| [`deferrable`](#sqlite-references-reference-deferrable){: #sqlite-references-reference-deferrable } | `false \| true \| :initially` | `false` | Wether or not the constraint is deferrable. This only affects the migration generator. | +| [`name`](#sqlite-references-reference-name){: #sqlite-references-reference-name } | `String.t` | | The name of the foreign key to generate in the database. Defaults to __fkey | + + + + + +### Introspection + +Target: `AshSqlite.Reference` + + + + + + + + diff --git a/documentation/topics/migrations_and_tasks.md b/documentation/topics/migrations_and_tasks.md index 0f89637..c5986b7 100644 --- a/documentation/topics/migrations_and_tasks.md +++ b/documentation/topics/migrations_and_tasks.md @@ -86,17 +86,17 @@ defmodule MyApp.Release do end defp repos do - apis() - |> Enum.flat_map(fn api -> - api - |> Ash.Api.Info.resources() + domains() + |> Enum.flat_map(fn domain -> + domain + |> Ash.Domain.Info.resources() |> Enum.map(&AshSqlite.repo/1) end) |> Enum.uniq() end - defp apis do - Application.fetch_env!(:my_app, :ash_apis) + defp domains do + Application.fetch_env!(:my_app, :ash_domains) end defp load_app do diff --git a/documentation/topics/polymorphic_resources.md b/documentation/topics/polymorphic_resources.md index 54c28b2..2616fe9 100644 --- a/documentation/topics/polymorphic_resources.md +++ b/documentation/topics/polymorphic_resources.md @@ -5,6 +5,7 @@ To support leveraging the same resource backed by multiple tables (useful for th ```elixir defmodule MyApp.Reaction do use Ash.Resource, + domain: MyApp.Domain, data_layer: AshSqlite.DataLayer sqlite do @@ -24,6 +25,7 @@ Then, in your related resources, you set the table context like so: ```elixir defmodule MyApp.Post do use Ash.Resource, + domain: MyApp.Domain, data_layer: AshSqlite.DataLayer ... @@ -37,6 +39,7 @@ end defmodule MyApp.Comment do use Ash.Resource, + domain: MyApp.Domain, data_layer: AshSqlite.DataLayer ... diff --git a/documentation/tutorials/get-started-with-sqlite.md b/documentation/tutorials/get-started-with-sqlite.md index fe779ee..9d4fa76 100644 --- a/documentation/tutorials/get-started-with-sqlite.md +++ b/documentation/tutorials/get-started-with-sqlite.md @@ -136,6 +136,7 @@ Now we can add the data layer to our resources. The basic configuration for a re # in lib/helpdesk/support/resources/ticket.ex use Ash.Resource, + domain: MyApp.Domain, data_layer: AshSqlite.DataLayer sqlite do @@ -148,6 +149,7 @@ Now we can add the data layer to our resources. The basic configuration for a re # in lib/helpdesk/support/resources/representative.ex use Ash.Resource, + domain: MyApp.Domain, data_layer: AshSqlite.DataLayer sqlite do diff --git a/lib/aggregate.ex b/lib/aggregate.ex deleted file mode 100644 index f6ca44c..0000000 --- a/lib/aggregate.ex +++ /dev/null @@ -1,336 +0,0 @@ -defmodule AshSqlite.Aggregate do - @moduledoc false - - require Ecto.Query - - def add_subquery_aggregate_select( - query, - relationship_path, - %{kind: :first} = aggregate, - resource, - is_single? - ) do - query = AshSqlite.DataLayer.default_bindings(query, aggregate.resource) - - ref = %Ash.Query.Ref{ - attribute: aggregate_field(aggregate, resource, relationship_path, query), - relationship_path: relationship_path, - resource: query.__ash_bindings__.resource - } - - type = AshSqlite.Types.parameterized_type(aggregate.type, aggregate.constraints) - - binding = - AshSqlite.DataLayer.get_binding( - query.__ash_bindings__.resource, - relationship_path, - query, - [:left, :inner, :root] - ) - - field = AshSqlite.Expr.dynamic_expr(query, ref, query.__ash_bindings__, false) - - sorted = - if has_sort?(aggregate.query) do - {:ok, sort_expr} = - AshSqlite.Sort.sort( - query, - aggregate.query.sort, - Ash.Resource.Info.related( - query.__ash_bindings__.resource, - relationship_path - ), - relationship_path, - binding, - true - ) - - question_marks = Enum.map(sort_expr, fn _ -> " ? " end) - - {:ok, expr} = - AshSqlite.Functions.Fragment.casted_new( - ["array_agg(? ORDER BY #{question_marks})", field] ++ sort_expr - ) - - AshSqlite.Expr.dynamic_expr(query, expr, query.__ash_bindings__, false) - else - Ecto.Query.dynamic( - [row], - fragment("array_agg(?)", ^field) - ) - end - - filtered = filter_field(sorted, query, aggregate, relationship_path, is_single?) - - value = Ecto.Query.dynamic(fragment("(?)[1]", ^filtered)) - - with_default = - if aggregate.default_value do - if type do - Ecto.Query.dynamic(coalesce(^value, type(^aggregate.default_value, ^type))) - else - Ecto.Query.dynamic(coalesce(^value, ^aggregate.default_value)) - end - else - value - end - - casted = - if type do - Ecto.Query.dynamic(type(^with_default, ^type)) - else - with_default - end - - select_or_merge(query, aggregate.name, casted) - end - - def add_subquery_aggregate_select( - query, - relationship_path, - %{kind: :list} = aggregate, - resource, - is_single? - ) do - query = AshSqlite.DataLayer.default_bindings(query, aggregate.resource) - type = AshSqlite.Types.parameterized_type(aggregate.type, aggregate.constraints) - - binding = - AshSqlite.DataLayer.get_binding( - query.__ash_bindings__.resource, - relationship_path, - query, - [:left, :inner, :root] - ) - - ref = %Ash.Query.Ref{ - attribute: aggregate_field(aggregate, resource, relationship_path, query), - relationship_path: relationship_path, - resource: query.__ash_bindings__.resource - } - - field = AshSqlite.Expr.dynamic_expr(query, ref, query.__ash_bindings__, false) - - sorted = - if has_sort?(aggregate.query) do - {:ok, sort_expr} = - AshSqlite.Sort.sort( - query, - aggregate.query.sort, - Ash.Resource.Info.related( - query.__ash_bindings__.resource, - relationship_path - ), - relationship_path, - binding, - true - ) - - question_marks = Enum.map(sort_expr, fn _ -> " ? " end) - - distinct = - if Map.get(aggregate, :uniq?) do - "DISTINCT " - else - "" - end - - {:ok, expr} = - AshSqlite.Functions.Fragment.casted_new( - ["array_agg(#{distinct}? ORDER BY #{question_marks})", field] ++ sort_expr - ) - - AshSqlite.Expr.dynamic_expr(query, expr, query.__ash_bindings__, false) - else - if Map.get(aggregate, :uniq?) do - Ecto.Query.dynamic( - [row], - fragment("array_agg(DISTINCT ?)", ^field) - ) - else - Ecto.Query.dynamic( - [row], - fragment("array_agg(?)", ^field) - ) - end - end - - filtered = filter_field(sorted, query, aggregate, relationship_path, is_single?) - - with_default = - if aggregate.default_value do - if type do - Ecto.Query.dynamic(coalesce(^filtered, type(^aggregate.default_value, ^type))) - else - Ecto.Query.dynamic(coalesce(^filtered, ^aggregate.default_value)) - end - else - filtered - end - - cast = - if type do - Ecto.Query.dynamic(type(^with_default, ^type)) - else - with_default - end - - select_or_merge(query, aggregate.name, cast) - end - - def add_subquery_aggregate_select( - query, - relationship_path, - %{kind: kind} = aggregate, - resource, - is_single? - ) - when kind in [:count, :sum, :avg, :max, :min, :custom] do - query = AshSqlite.DataLayer.default_bindings(query, aggregate.resource) - - ref = %Ash.Query.Ref{ - attribute: aggregate_field(aggregate, resource, relationship_path, query), - relationship_path: relationship_path, - resource: resource - } - - field = - if kind == :custom do - # we won't use this if its custom so don't try to make one - nil - else - AshSqlite.Expr.dynamic_expr(query, ref, query.__ash_bindings__, false) - end - - type = AshSqlite.Types.parameterized_type(aggregate.type, aggregate.constraints) - - binding = - AshSqlite.DataLayer.get_binding( - query.__ash_bindings__.resource, - relationship_path, - query, - [:left, :inner, :root] - ) - - field = - case kind do - :count -> - if Map.get(aggregate, :uniq?) do - Ecto.Query.dynamic([row], count(^field, :distinct)) - else - Ecto.Query.dynamic([row], count(^field)) - end - - :sum -> - Ecto.Query.dynamic([row], sum(^field)) - - :avg -> - Ecto.Query.dynamic([row], avg(^field)) - - :max -> - Ecto.Query.dynamic([row], max(^field)) - - :min -> - Ecto.Query.dynamic([row], min(^field)) - - :custom -> - {module, opts} = aggregate.implementation - - module.dynamic(opts, binding) - end - - filtered = filter_field(field, query, aggregate, relationship_path, is_single?) - - with_default = - if aggregate.default_value do - if type do - Ecto.Query.dynamic(coalesce(^filtered, type(^aggregate.default_value, ^type))) - else - Ecto.Query.dynamic(coalesce(^filtered, ^aggregate.default_value)) - end - else - filtered - end - - cast = - if type do - Ecto.Query.dynamic(type(^with_default, ^type)) - else - with_default - end - - select_or_merge(query, aggregate.name, cast) - end - - defp filter_field(field, _query, _aggregate, _relationship_path, true) do - field - end - - defp filter_field(field, query, aggregate, relationship_path, _is_single?) do - if has_filter?(aggregate.query) do - filter = - Ash.Filter.move_to_relationship_path( - aggregate.query.filter, - relationship_path - ) - - expr = - AshSqlite.Expr.dynamic_expr( - query, - filter, - query.__ash_bindings__, - false, - AshSqlite.Types.parameterized_type(aggregate.type, aggregate.constraints) - ) - - Ecto.Query.dynamic(filter(^field, ^expr)) - else - field - end - end - - defp has_filter?(nil), do: false - defp has_filter?(%{filter: nil}), do: false - defp has_filter?(%{filter: %Ash.Filter{expression: nil}}), do: false - defp has_filter?(%{filter: %Ash.Filter{}}), do: true - defp has_filter?(_), do: false - - defp has_sort?(nil), do: false - defp has_sort?(%{sort: nil}), do: false - defp has_sort?(%{sort: []}), do: false - defp has_sort?(%{sort: _}), do: true - defp has_sort?(_), do: false - - defp select_or_merge(query, aggregate_name, casted) do - query = - if query.select do - query - else - Ecto.Query.select(query, %{}) - end - - Ecto.Query.select_merge(query, ^%{aggregate_name => casted}) - end - - defp aggregate_field(aggregate, resource, _relationship_path, _query) do - case Ash.Resource.Info.field( - resource, - aggregate.field || List.first(Ash.Resource.Info.primary_key(resource)) - ) do - %Ash.Resource.Calculation{calculation: {module, opts}} = calculation -> - {:ok, query_calc} = - Ash.Query.Calculation.new( - calculation.name, - module, - opts, - calculation.type, - Map.get(aggregate, :context, %{}) - ) - - query_calc - - other -> - other - end - end -end diff --git a/lib/calculation.ex b/lib/calculation.ex deleted file mode 100644 index afcf79f..0000000 --- a/lib/calculation.ex +++ /dev/null @@ -1,83 +0,0 @@ -defmodule AshSqlite.Calculation do - @moduledoc false - - require Ecto.Query - - def add_calculations(query, [], _, _), do: {:ok, query} - - def add_calculations(query, calculations, resource, _source_binding) do - query = AshSqlite.DataLayer.default_bindings(query, resource) - - {:ok, query} = - AshSqlite.Join.join_all_relationships( - query, - %Ash.Filter{ - resource: resource, - expression: Enum.map(calculations, &elem(&1, 1)) - }, - left_only?: true - ) - - query = - if query.select do - query - else - Ecto.Query.select_merge(query, %{}) - end - - dynamics = - Enum.map(calculations, fn {calculation, expression} -> - type = - AshSqlite.Types.parameterized_type( - calculation.type, - Map.get(calculation, :constraints, []) - ) - - expr = - AshSqlite.Expr.dynamic_expr( - query, - expression, - query.__ash_bindings__, - false, - type - ) - - expr = - if type do - Ecto.Query.dynamic(type(^expr, ^type)) - else - expr - end - - {calculation.load, calculation.name, expr} - end) - - {:ok, add_calculation_selects(query, dynamics)} - end - - defp add_calculation_selects(query, dynamics) do - {in_calculations, in_body} = - Enum.split_with(dynamics, fn {load, _name, _dynamic} -> is_nil(load) end) - - calcs = - in_body - |> Map.new(fn {load, _, dynamic} -> - {load, dynamic} - end) - - calcs = - if Enum.empty?(in_calculations) do - calcs - else - Map.put( - calcs, - :calculations, - Map.new(in_calculations, fn {_, name, dynamic} -> - {name, dynamic} - end) - ) - end - - Ecto.Query.select_merge(query, ^calcs) - end -end diff --git a/lib/data_layer.ex b/lib/data_layer.ex index 5733fc3..8e832f8 100644 --- a/lib/data_layer.ex +++ b/lib/data_layer.ex @@ -282,9 +282,6 @@ defmodule AshSqlite.DataLayer do ] } - alias Ash.Filter - alias Ash.Query.{BooleanExpression, Not} - @behaviour Ash.DataLayer @sections [@sqlite] @@ -380,6 +377,8 @@ defmodule AshSqlite.DataLayer do def can?(_, {:aggregate_relationship, _}), do: false def can?(_, :timeout), do: true + def can?(_, {:filter_expr, %Ash.Query.Function.StringJoin{}}), do: false + def can?(_, {:filter_expr, %Ash.Query.Function.Contains{}}), do: false def can?(_, {:filter_expr, _}), do: true def can?(_, :nested_expressions), do: true def can?(_, {:query_aggregate, _}), do: true @@ -416,7 +415,13 @@ defmodule AshSqlite.DataLayer do data_layer_query end - {:ok, default_bindings(data_layer_query, resource, context)} + {:ok, + AshSql.Bindings.default_bindings( + data_layer_query, + resource, + AshSqlite.SqlImplementation, + context + )} end @impl true @@ -433,10 +438,10 @@ defmodule AshSqlite.DataLayer do @impl true def run_aggregate_query(query, aggregates, resource) do {exists, aggregates} = Enum.split_with(aggregates, &(&1.kind == :exists)) - query = default_bindings(query, resource) + query = AshSql.Bindings.default_bindings(query, resource, AshSqlite.SqlImplementation) query = - if query.distinct || query.limit do + if query.limit do query = query |> Ecto.Query.exclude(:select) @@ -459,12 +464,13 @@ defmodule AshSqlite.DataLayer do aggregates, query, fn agg, query -> - AshSqlite.Aggregate.add_subquery_aggregate_select( + AshSql.Aggregate.add_subquery_aggregate_select( query, agg.relationship_path |> Enum.drop(1), agg, resource, - true + true, + Ash.Resource.Info.relationship(resource, agg.relationship_path |> Enum.at(1)) ) end ) @@ -505,13 +511,11 @@ defmodule AshSqlite.DataLayer do @impl true def run_query(query, resource) do - query = default_bindings(query, resource) - with_sort_applied = if query.__ash_bindings__[:sort_applied?] do {:ok, query} else - apply_sort(query, query.__ash_bindings__[:sort], resource) + AshSql.Sort.apply_sort(query, query.__ash_bindings__[:sort], resource) end case with_sort_applied do @@ -568,7 +572,6 @@ defmodule AshSqlite.DataLayer do @impl true def functions(_resource) do [ - AshSqlite.Functions.Fragment, AshSqlite.Functions.Like, AshSqlite.Functions.ILike ] @@ -601,22 +604,22 @@ defmodule AshSqlite.DataLayer do if options[:upsert?] do # Ash groups changesets by atomics before dispatching them to the data layer # this means that all changesets have the same atomics - %{atomics: atomics, filters: filters} = Enum.at(changesets, 0) + %{atomics: atomics, filter: filter} = Enum.at(changesets, 0) query = from(row in resource, as: ^0) query = query - |> default_bindings(resource) + |> AshSql.Bindings.default_bindings(resource, AshSqlite.SqlImplementation) upsert_set = upsert_set(resource, changesets, options) on_conflict = - case query_with_atomics( + case AshSql.Atomics.query_with_atomics( resource, query, - filters, + filter, atomics, %{}, upsert_set @@ -1292,13 +1295,17 @@ defmodule AshSqlite.DataLayer do query = query - |> default_bindings(resource, changeset.context) + |> AshSql.Bindings.default_bindings( + resource, + AshSqlite.SqlImplementation, + changeset.context + ) |> Ecto.Query.select(^select) - case query_with_atomics( + case AshSql.Atomics.query_with_atomics( resource, query, - ecto_changeset.filters, + changeset.filter, changeset.atomics, ecto_changeset.changes, [] @@ -1324,7 +1331,7 @@ defmodule AshSqlite.DataLayer do {:error, Ash.Error.Changes.StaleRecord.exception( resource: resource, - filters: ecto_changeset.filters + filters: changeset.filter )} {1, [result]} -> @@ -1345,101 +1352,6 @@ defmodule AshSqlite.DataLayer do end end - defp query_with_atomics( - resource, - query, - filters, - atomics, - updating_one_changes, - existing_set - ) do - query = - Enum.reduce(filters, query, fn {key, value}, query -> - from(row in query, - where: field(row, ^key) == ^value - ) - end) - - atomics_result = - Enum.reduce_while(atomics, {:ok, query, []}, fn {field, expr}, {:ok, query, set} -> - with {:ok, query} <- - AshSqlite.Join.join_all_relationships( - query, - %Ash.Filter{ - resource: resource, - expression: expr - }, - left_only?: true - ), - dynamic <- - AshSqlite.Expr.dynamic_expr(query, expr, query.__ash_bindings__) do - {:cont, {:ok, query, Keyword.put(set, field, dynamic)}} - else - other -> - {:halt, other} - end - end) - - case atomics_result do - {:ok, query, dynamics} -> - {params, set, count} = - updating_one_changes - |> Map.to_list() - |> Enum.reduce({[], [], 0}, fn {key, value}, {params, set, count} -> - {[{value, {0, key}} | params], [{key, {:^, [], [count]}} | set], count + 1} - end) - - {params, set, _} = - Enum.reduce( - dynamics ++ existing_set, - {params, set, count}, - fn {key, value}, {params, set, count} -> - case AshSqlite.Expr.dynamic_expr(query, value, query.__ash_bindings__) do - %Ecto.Query.DynamicExpr{} = dynamic -> - result = - Ecto.Query.Builder.Dynamic.partially_expand( - :select, - query, - dynamic, - params, - count - ) - - expr = elem(result, 0) - new_params = elem(result, 1) - - new_count = - result |> Tuple.to_list() |> List.last() - - {new_params, [{key, expr} | set], new_count} - - other -> - {[{other, {0, key}} | params], [{key, {:^, [], [count]}} | set], count + 1} - end - end - ) - - case set do - [] -> - :empty - - set -> - {:ok, - Map.put(query, :updates, [ - %Ecto.Query.QueryExpr{ - # why do I have to reverse the `set`??? - # it breaks if I don't - expr: [set: Enum.reverse(set)], - params: Enum.reverse(params) - } - ])} - end - - {:error, error} -> - {:error, error} - end - end - @impl true def destroy(resource, %{data: record} = changeset) do ecto_changeset = ecto_changeset(record, changeset, :delete) @@ -1470,7 +1382,7 @@ defmodule AshSqlite.DataLayer do @impl true def select(query, select, resource) do - query = default_bindings(query, resource) + query = AshSql.Bindings.default_bindings(query, resource, AshSqlite.SqlImplementation) {:ok, from(row in query, @@ -1478,64 +1390,27 @@ defmodule AshSqlite.DataLayer do )} end - defp apply_sort(query, sort, _resource) when sort in [nil, []] do - {:ok, query |> set_sort_applied()} - end - - defp apply_sort(query, sort, resource) do - query - |> AshSqlite.Sort.sort(sort, resource, [], 0) - |> case do - {:ok, query} -> - {:ok, query |> set_sort_applied()} - - {:error, error} -> - {:error, error} - end - end - @doc false def unwrap_one([thing]), do: thing def unwrap_one([]), do: nil def unwrap_one(other), do: other - defp set_sort_applied(query) do - Map.update!(query, :__ash_bindings__, &Map.put(&1, :sort_applied?, true)) - end - @impl true - def filter(query, filter, resource, opts \\ []) do - query = default_bindings(query, resource) - + def filter(query, filter, _resource, opts \\ []) do query - |> AshSqlite.Join.join_all_relationships(filter, opts) + |> AshSql.Join.join_all_relationships(filter, opts) |> case do {:ok, query} -> - {:ok, add_filter_expression(query, filter)} + {:ok, AshSql.Filter.add_filter_expression(query, filter)} {:error, error} -> {:error, error} end end - @doc false - def default_bindings(query, resource, context \\ %{}) do - start_bindings = context[:data_layer][:start_bindings_at] || 0 - - Map.put_new(query, :__ash_bindings__, %{ - resource: resource, - current: Enum.count(query.joins) + 1 + start_bindings, - in_group?: false, - calculations: %{}, - parent_resources: [], - context: context, - bindings: %{start_bindings => %{path: [], type: :root, source: resource}} - }) - end - @impl true def add_calculations(query, calculations, resource) do - AshSqlite.Calculation.add_calculations(query, calculations, resource, 0) + AshSql.Calculation.add_calculations(query, calculations, resource, 0, true) end @doc false @@ -1569,40 +1444,6 @@ defmodule AshSqlite.DataLayer do def get_binding(_, _, _, _, _), do: nil - defp add_filter_expression(query, filter) do - filter - |> split_and_statements() - |> Enum.reduce(query, fn filter, query -> - dynamic = AshSqlite.Expr.dynamic_expr(query, filter, query.__ash_bindings__) - - Ecto.Query.where(query, ^dynamic) - end) - end - - defp split_and_statements(%Filter{expression: expression}) do - split_and_statements(expression) - end - - defp split_and_statements(%BooleanExpression{op: :and, left: left, right: right}) do - split_and_statements(left) ++ split_and_statements(right) - end - - defp split_and_statements(%Not{expression: %Not{expression: expression}}) do - split_and_statements(expression) - end - - defp split_and_statements(%Not{ - expression: %BooleanExpression{op: :or, left: left, right: right} - }) do - split_and_statements(%BooleanExpression{ - op: :and, - left: %Not{expression: left}, - right: %Not{expression: right} - }) - end - - defp split_and_statements(other), do: [other] - @doc false def add_binding(query, data, additional_bindings \\ 0) do current = query.__ash_bindings__.current diff --git a/lib/expr.ex b/lib/expr.ex deleted file mode 100644 index 916bdcd..0000000 --- a/lib/expr.ex +++ /dev/null @@ -1,1366 +0,0 @@ -defmodule AshSqlite.Expr do - @moduledoc false - - alias Ash.Filter - alias Ash.Query.{BooleanExpression, Exists, Not, Ref} - alias Ash.Query.Operator.IsNil - - alias Ash.Query.Function.{ - Ago, - DateAdd, - DateTimeAdd, - FromNow, - GetPath, - If, - Now, - Today, - Type - } - - alias AshSqlite.Functions.{Fragment, ILike, Like} - - require Ecto.Query - - def dynamic_expr(query, expr, bindings, embedded? \\ false, type \\ nil) - - def dynamic_expr(query, %Filter{expression: expression}, bindings, embedded?, type) do - dynamic_expr(query, expression, bindings, embedded?, type) - end - - # A nil filter means "everything" - def dynamic_expr(_, nil, _, _, _), do: true - # A true filter means "everything" - def dynamic_expr(_, true, _, _, _), do: true - # A false filter means "nothing" - def dynamic_expr(_, false, _, _, _), do: false - - def dynamic_expr(query, expression, bindings, embedded?, type) do - do_dynamic_expr(query, expression, bindings, embedded?, type) - end - - defp do_dynamic_expr(query, expr, bindings, embedded?, type \\ nil) - - defp do_dynamic_expr(_, {:embed, other}, _bindings, _true, _type) do - other - end - - defp do_dynamic_expr(query, %Not{expression: expression}, bindings, embedded?, _type) do - new_expression = do_dynamic_expr(query, expression, bindings, embedded?, :boolean) - Ecto.Query.dynamic(not (^new_expression)) - end - - defp do_dynamic_expr( - query, - %Like{arguments: [arg1, arg2], embedded?: pred_embedded?}, - bindings, - embedded?, - _type - ) do - arg1 = do_dynamic_expr(query, arg1, bindings, pred_embedded? || embedded?, :string) - arg2 = do_dynamic_expr(query, arg2, bindings, pred_embedded? || embedded?, :string) - - Ecto.Query.dynamic(like(^arg1, ^arg2)) - end - - defp do_dynamic_expr( - query, - %ILike{arguments: [arg1, arg2], embedded?: pred_embedded?}, - bindings, - embedded?, - _type - ) do - arg1 = do_dynamic_expr(query, arg1, bindings, pred_embedded? || embedded?, :ci_string) - arg2 = do_dynamic_expr(query, arg2, bindings, pred_embedded? || embedded?, :string) - - # Not ideal, but better than not having it. - Ecto.Query.dynamic(like(fragment("LOWER(?)", ^arg1), fragment("LOWER(?)", ^arg2))) - end - - defp do_dynamic_expr( - query, - %IsNil{left: left, right: right, embedded?: pred_embedded?}, - bindings, - embedded?, - _type - ) do - left_expr = do_dynamic_expr(query, left, bindings, pred_embedded? || embedded?) - right_expr = do_dynamic_expr(query, right, bindings, pred_embedded? || embedded?, :boolean) - Ecto.Query.dynamic(is_nil(^left_expr) == ^right_expr) - end - - defp do_dynamic_expr( - query, - %Ago{arguments: [left, right], embedded?: pred_embedded?}, - bindings, - embedded?, - _type - ) - when is_binary(right) or is_atom(right) do - left = do_dynamic_expr(query, left, bindings, pred_embedded? || embedded?, :integer) - - Ecto.Query.dynamic( - fragment("(?)", datetime_add(^DateTime.utc_now(), ^left * -1, ^to_string(right))) - ) - end - - defp do_dynamic_expr( - query, - %FromNow{arguments: [left, right], embedded?: pred_embedded?}, - bindings, - embedded?, - _type - ) - when is_binary(right) or is_atom(right) do - left = do_dynamic_expr(query, left, bindings, pred_embedded? || embedded?, :integer) - - Ecto.Query.dynamic( - fragment("(?)", datetime_add(^DateTime.utc_now(), ^left, ^to_string(right))) - ) - end - - defp do_dynamic_expr( - query, - %DateTimeAdd{arguments: [datetime, amount, interval], embedded?: pred_embedded?}, - bindings, - embedded?, - _type - ) - when is_binary(interval) or is_atom(interval) do - datetime = do_dynamic_expr(query, datetime, bindings, pred_embedded? || embedded?) - amount = do_dynamic_expr(query, amount, bindings, pred_embedded? || embedded?, :integer) - Ecto.Query.dynamic(fragment("(?)", datetime_add(^datetime, ^amount, ^to_string(interval)))) - end - - defp do_dynamic_expr( - query, - %DateAdd{arguments: [date, amount, interval], embedded?: pred_embedded?}, - bindings, - embedded?, - _type - ) - when is_binary(interval) or is_atom(interval) do - date = do_dynamic_expr(query, date, bindings, pred_embedded? || embedded?) - amount = do_dynamic_expr(query, amount, bindings, pred_embedded? || embedded?, :integer) - Ecto.Query.dynamic(fragment("(?)", datetime_add(^date, ^amount, ^to_string(interval)))) - end - - defp do_dynamic_expr( - query, - %GetPath{ - arguments: [%Ref{attribute: %{type: type}}, right] - } = get_path, - bindings, - embedded?, - nil - ) - when is_atom(type) and is_list(right) do - if Ash.Type.embedded_type?(type) do - type = determine_type_at_path(type, right) - - do_get_path(query, get_path, bindings, embedded?, type) - else - do_get_path(query, get_path, bindings, embedded?) - end - end - - defp do_dynamic_expr( - query, - %GetPath{ - arguments: [%Ref{attribute: %{type: {:array, type}}}, right] - } = get_path, - bindings, - embedded?, - nil - ) - when is_atom(type) and is_list(right) do - if Ash.Type.embedded_type?(type) do - type = determine_type_at_path(type, right) - do_get_path(query, get_path, bindings, embedded?, type) - else - do_get_path(query, get_path, bindings, embedded?) - end - end - - defp do_dynamic_expr( - query, - %GetPath{} = get_path, - bindings, - embedded?, - type - ) do - do_get_path(query, get_path, bindings, embedded?, type) - end - - # Can't support contains without also supporting case insensitive - # strings - - # defp do_dynamic_expr( - # query, - # %Contains{arguments: [left, %Ash.CiString{} = right], embedded?: pred_embedded?}, - # bindings, - # embedded?, - # type - # ) do - # do_dynamic_expr( - # query, - # %Fragment{ - # embedded?: pred_embedded?, - # arguments: [ - # raw: "(instr((", - # expr: left, - # raw: " COLLATE NOCASE), (", - # expr: right, - # raw: ")) > 0)" - # ] - # }, - # bindings, - # embedded?, - # type - # ) - # end - - # defp do_dynamic_expr( - # query, - # %Contains{arguments: [left, right], embedded?: pred_embedded?}, - # bindings, - # embedded?, - # type - # ) do - # do_dynamic_expr( - # query, - # %Fragment{ - # embedded?: pred_embedded?, - # arguments: [ - # raw: "(instr((", - # expr: left, - # raw: "), (", - # expr: right, - # raw: ")) > 0)" - # ] - # }, - # bindings, - # embedded?, - # type - # ) - # end - - defp do_dynamic_expr( - query, - %If{arguments: [condition, when_true, when_false], embedded?: pred_embedded?}, - bindings, - embedded?, - type - ) do - [condition_type, when_true_type, when_false_type] = - case AshSqlite.Types.determine_types(If, [condition, when_true, when_false]) do - [condition_type, when_true] -> - [condition_type, when_true, nil] - - [condition_type, when_true, when_false] -> - [condition_type, when_true, when_false] - end - |> case do - [condition_type, nil, nil] -> - [condition_type, type, type] - - [condition_type, when_true, nil] -> - [condition_type, when_true, type] - - [condition_type, nil, when_false] -> - [condition_type, type, when_false] - - [condition_type, when_true, when_false] -> - [condition_type, when_true, when_false] - end - - condition = - do_dynamic_expr(query, condition, bindings, pred_embedded? || embedded?, condition_type) - - when_true = - do_dynamic_expr(query, when_true, bindings, pred_embedded? || embedded?, when_true_type) - - when_false = - do_dynamic_expr( - query, - when_false, - bindings, - pred_embedded? || embedded?, - when_false_type - ) - - do_dynamic_expr( - query, - %Fragment{ - embedded?: pred_embedded?, - arguments: [ - raw: "(CASE WHEN ", - casted_expr: condition, - raw: " THEN ", - casted_expr: when_true, - raw: " ELSE ", - casted_expr: when_false, - raw: " END)" - ] - }, - bindings, - embedded?, - type - ) - end - - # Wow, even this doesn't work, because of course it doesn't. - # Doing string joining properly requires a recursive "if not empty" check - # that honestly I don't have the energy to do right now. - # There are commented out tests for this in the calculation tests, make sure those pass, - # whoever feels like fixing this. - # defp do_dynamic_expr( - # _query, - # %StringJoin{arguments: [values | _], embedded?: _pred_embedded?} = string_join, - # _bindings, - # _embedded?, - # _type - # ) - # when not is_list(values) do - # raise "SQLite can only join literal lists, not dynamic values. i.e `string_join([foo, bar])`, - # but not `string_join(something)`. Got #{inspect(string_join)}" - # end - - # defp do_dynamic_expr( - # query, - # %StringJoin{arguments: [values, joiner], embedded?: pred_embedded?}, - # bindings, - # embedded?, - # type - # ) do - # # Not optimal, but it works - # last_value = :lists.last(values) - - # values = - # values - # |> :lists.droplast() - # |> Enum.map(&{:not_last, &1}) - # |> Enum.concat([{:last, last_value}]) - - # do_dynamic_expr( - # query, - # %Fragment{ - # embedded?: pred_embedded?, - # arguments: - # Enum.reduce(values, [raw: "("], fn - # {:last, value}, acc -> - # acc ++ - # [ - # raw: "COALESCE(", - # expr: value, - # raw: ", '')" - # ] - - # {:not_last, value}, acc -> - # acc ++ - # [ - # raw: "(CASE ", - # expr: value, - # raw: " WHEN NULL THEN '' ELSE ", - # expr: value, - # raw: " || ", - # expr: joiner, - # raw: " END) || " - # ] - # end) - # |> Enum.concat(raw: ")") - # }, - # bindings, - # embedded?, - # type - # ) - # end - - # defp do_dynamic_expr( - # query, - # %StringJoin{arguments: [values], embedded?: pred_embedded?}, - # bindings, - # embedded?, - # type - # ) do - # do_dynamic_expr( - # query, - # %Fragment{ - # embedded?: pred_embedded?, - # arguments: - # Enum.reduce(values, {[raw: "("], true}, fn value, {acc, first?} -> - # add = - # if first? do - # [expr: value] - # else - # [raw: " || COALESCE(", expr: value, raw: ", '')"] - # end - - # {acc ++ add, false} - # end) - # |> elem(0) - # |> Enum.concat(raw: ")") - # }, - # bindings, - # embedded?, - # type - # ) - # end - - # Sorry :( - # This is bad to do, but is the only reasonable way I could find. - defp do_dynamic_expr( - query, - %Fragment{arguments: arguments, embedded?: pred_embedded?}, - bindings, - embedded?, - _type - ) do - arguments = - case arguments do - [{:raw, _} | _] -> - arguments - - arguments -> - [{:raw, ""} | arguments] - end - - arguments = - case List.last(arguments) do - nil -> - arguments - - {:raw, _} -> - arguments - - _ -> - arguments ++ [{:raw, ""}] - end - - {params, fragment_data, _} = - Enum.reduce(arguments, {[], [], 0}, fn - {:raw, str}, {params, fragment_data, count} -> - {params, [{:raw, str} | fragment_data], count} - - {:casted_expr, dynamic}, {params, fragment_data, count} -> - {item, params, count} = - {{:^, [], [count]}, [{dynamic, :any} | params], count + 1} - - {params, [{:expr, item} | fragment_data], count} - - {:expr, expr}, {params, fragment_data, count} -> - dynamic = do_dynamic_expr(query, expr, bindings, pred_embedded? || embedded?) - - type = - if is_binary(expr) do - :string - else - :any - end - - {item, params, count} = - {{:^, [], [count]}, [{dynamic, type} | params], count + 1} - - {params, [{:expr, item} | fragment_data], count} - end) - - %Ecto.Query.DynamicExpr{ - fun: fn _query -> - {{:fragment, [], Enum.reverse(fragment_data)}, Enum.reverse(params), [], %{}} - end, - binding: [], - file: __ENV__.file, - line: __ENV__.line - } - end - - defp do_dynamic_expr( - query, - %BooleanExpression{op: op, left: left, right: right}, - bindings, - embedded?, - _type - ) do - left_expr = do_dynamic_expr(query, left, bindings, embedded?, :boolean) - right_expr = do_dynamic_expr(query, right, bindings, embedded?, :boolean) - - case op do - :and -> - Ecto.Query.dynamic(^left_expr and ^right_expr) - - :or -> - Ecto.Query.dynamic(^left_expr or ^right_expr) - end - end - - defp do_dynamic_expr( - query, - %Ash.Query.Function.Minus{arguments: [arg], embedded?: pred_embedded?}, - bindings, - embedded?, - type - ) do - [determined_type] = AshSqlite.Types.determine_types(Ash.Query.Function.Minus, [arg]) - - expr = - do_dynamic_expr(query, arg, bindings, pred_embedded? || embedded?, determined_type || type) - - Ecto.Query.dynamic(-(^expr)) - end - - # Honestly we need to either 1. not type cast or 2. build in type compatibility concepts - # instead of `:same` we need an `ANY COMPATIBLE` equivalent. - @cast_operands_for [:<>] - - defp do_dynamic_expr( - query, - %mod{ - __predicate__?: _, - left: left, - right: right, - embedded?: pred_embedded?, - operator: operator - }, - bindings, - embedded?, - type - ) do - [left_type, right_type] = - mod - |> AshSqlite.Types.determine_types([left, right]) - - left_expr = - if left_type && operator in @cast_operands_for do - left_expr = do_dynamic_expr(query, left, bindings, pred_embedded? || embedded?) - - type_expr(left_expr, left_type) - else - do_dynamic_expr(query, left, bindings, pred_embedded? || embedded?, left_type) - end - - right_expr = - if right_type && operator in @cast_operands_for do - right_expr = do_dynamic_expr(query, right, bindings, pred_embedded? || embedded?) - type_expr(right_expr, right_type) - else - do_dynamic_expr(query, right, bindings, pred_embedded? || embedded?, right_type) - end - - case operator do - :== -> - Ecto.Query.dynamic(^left_expr == ^right_expr) - - :!= -> - Ecto.Query.dynamic(^left_expr != ^right_expr) - - :> -> - Ecto.Query.dynamic(^left_expr > ^right_expr) - - :< -> - Ecto.Query.dynamic(^left_expr < ^right_expr) - - :>= -> - Ecto.Query.dynamic(^left_expr >= ^right_expr) - - :<= -> - Ecto.Query.dynamic(^left_expr <= ^right_expr) - - :in -> - Ecto.Query.dynamic(^left_expr in ^right_expr) - - :+ -> - Ecto.Query.dynamic(^left_expr + ^right_expr) - - :- -> - Ecto.Query.dynamic(^left_expr - ^right_expr) - - :/ -> - Ecto.Query.dynamic(type(^left_expr, :decimal) / type(^right_expr, :decimal)) - - :* -> - Ecto.Query.dynamic(^left_expr * ^right_expr) - - :<> -> - do_dynamic_expr( - query, - %Fragment{ - embedded?: pred_embedded?, - arguments: [ - raw: "(", - casted_expr: left_expr, - raw: " || ", - casted_expr: right_expr, - raw: ")" - ] - }, - bindings, - embedded?, - type - ) - - :|| -> - do_dynamic_expr( - query, - %Fragment{ - embedded?: pred_embedded?, - arguments: [ - raw: "(CASE WHEN (", - casted_expr: left_expr, - raw: " == FALSE OR ", - casted_expr: left_expr, - raw: " IS NULL) THEN ", - casted_expr: right_expr, - raw: " ELSE ", - casted_expr: left_expr, - raw: "END)" - ] - }, - bindings, - embedded?, - type - ) - - :&& -> - do_dynamic_expr( - query, - %Fragment{ - embedded?: pred_embedded?, - arguments: [ - raw: "(CASE WHEN (", - casted_expr: left_expr, - raw: " == FALSE OR ", - casted_expr: left_expr, - raw: " IS NULL) THEN ", - casted_expr: left_expr, - raw: " ELSE ", - casted_expr: right_expr, - raw: "END)" - ] - }, - bindings, - embedded?, - type - ) - - other -> - raise "Operator not implemented #{other}" - end - end - - defp do_dynamic_expr(query, %MapSet{} = mapset, bindings, embedded?, type) do - do_dynamic_expr(query, Enum.to_list(mapset), bindings, embedded?, type) - end - - defp do_dynamic_expr( - query, - %Ash.CiString{string: string}, - bindings, - embedded?, - type - ) do - string = do_dynamic_expr(query, string, bindings, embedded?) - - do_dynamic_expr( - query, - %Fragment{ - embedded?: embedded?, - arguments: [ - raw: "(", - casted_expr: string, - raw: "collate nocase)" - ] - }, - bindings, - embedded?, - type - ) - end - - defp do_dynamic_expr( - query, - %Ref{ - attribute: %Ash.Query.Calculation{} = calculation, - relationship_path: [], - resource: resource - }, - bindings, - embedded?, - _type - ) do - calculation = %{calculation | load: calculation.name} - - type = - AshSqlite.Types.parameterized_type( - calculation.type, - Map.get(calculation, :constraints, []) - ) - - case Ash.Filter.hydrate_refs( - calculation.module.expression(calculation.opts, calculation.context), - %{ - resource: resource, - calculations: %{}, - public?: false - } - ) do - {:ok, expression} -> - do_dynamic_expr( - query, - expression, - bindings, - embedded?, - type - ) - - {:error, error} -> - raise """ - Failed to hydrate references in #{inspect(calculation.module.expression(calculation.opts, calculation.context))} - - #{inspect(error)} - """ - end - end - - defp do_dynamic_expr( - _query, - %Ref{ - attribute: %Ash.Resource.Calculation{} = calculation - }, - _bindings, - _embedded?, - _type - ) do - raise "cannot build expression from resource calculation! #{calculation.name}" - end - - defp do_dynamic_expr( - query, - %Ref{ - attribute: %Ash.Query.Calculation{} = calculation, - relationship_path: relationship_path - } = ref, - bindings, - embedded?, - _type - ) do - binding_to_replace = - Enum.find_value(bindings.bindings, fn {i, binding} -> - if binding.path == relationship_path do - i - end - end) - - if is_nil(binding_to_replace) do - raise """ - Error building calculation reference: #{inspect(relationship_path)} is not available in bindings. - - In reference: #{ref} - """ - end - - temp_bindings = - bindings.bindings - |> Map.delete(0) - |> Map.update!(binding_to_replace, &Map.merge(&1, %{path: [], type: :root})) - - type = - AshSqlite.Types.parameterized_type( - calculation.type, - Map.get(calculation, :constraints, []) - ) - - case Ash.Filter.hydrate_refs( - calculation.module.expression(calculation.opts, calculation.context), - %{ - resource: ref.resource, - calculations: %{}, - public?: false - } - ) do - {:ok, hydrated} -> - expr = - do_dynamic_expr( - query, - hydrated, - %{bindings | bindings: temp_bindings}, - embedded?, - type - ) - - type_expr(expr, type) - - _ -> - raise "Failed to hydrate references in #{inspect(calculation.module.expression(calculation.opts, calculation.context))}" - end - end - - defp do_dynamic_expr( - query, - %Type{arguments: [arg1, arg2, constraints]}, - bindings, - embedded?, - _type - ) do - arg2 = Ash.Type.get_type(arg2) - arg1 = maybe_uuid_to_binary(arg2, arg1, arg1) - type = AshSqlite.Types.parameterized_type(arg2, constraints) - - query - |> do_dynamic_expr(arg1, bindings, embedded?, type) - |> type_expr(type) - end - - defp do_dynamic_expr( - query, - %Now{embedded?: pred_embedded?}, - bindings, - embedded?, - type - ) do - do_dynamic_expr( - query, - DateTime.utc_now(), - bindings, - embedded? || pred_embedded?, - type - ) - end - - defp do_dynamic_expr( - query, - %Today{embedded?: pred_embedded?}, - bindings, - embedded?, - type - ) do - do_dynamic_expr( - query, - Date.utc_today(), - bindings, - embedded? || pred_embedded?, - type - ) - end - - defp do_dynamic_expr( - query, - %Ash.Query.Parent{expr: expr}, - bindings, - embedded?, - type - ) do - if !bindings[:parent_bindings] do - raise "Used `parent/1` without parent context. AshSqlite is not capable of supporting `parent/1` in relationship where clauses yet." - end - - parent? = Map.get(bindings.parent_bindings, :parent_is_parent_as?, true) - - do_dynamic_expr( - %{ - query - | __ash_bindings__: Map.put(bindings.parent_bindings, :parent?, parent?) - }, - expr, - bindings, - embedded?, - type - ) - end - - defp do_dynamic_expr( - query, - %Exists{at_path: at_path, path: [first | rest], expr: expr}, - bindings, - _embedded?, - _type - ) do - resource = Ash.Resource.Info.related(query.__ash_bindings__.resource, at_path) - first_relationship = Ash.Resource.Info.relationship(resource, first) - - last_relationship = - Enum.reduce(rest, first_relationship, fn name, relationship -> - Ash.Resource.Info.relationship(relationship.destination, name) - end) - - {:ok, expr} = - Ash.Filter.hydrate_refs(expr, %{ - resource: last_relationship.destination, - parent_stack: [ - query.__ash_bindings__.resource - | query.__ash_bindings__[:parent_resources] || [] - ], - calculations: %{}, - public?: false - }) - - filter = - %Ash.Filter{expression: expr, resource: first_relationship.destination} - |> nest_expression(rest) - - {:ok, source} = - AshSqlite.Join.maybe_get_resource_query( - first_relationship.destination, - first_relationship, - query, - [first_relationship.name] - ) - - {:ok, filtered} = - source - |> set_parent_path(query) - |> AshSqlite.DataLayer.filter( - filter, - first_relationship.destination, - no_this?: true - ) - - free_binding = filtered.__ash_bindings__.current - - exists_query = - cond do - Map.get(first_relationship, :manual) -> - {module, opts} = first_relationship.manual - - [pkey_attr | _] = Ash.Resource.Info.primary_key(first_relationship.destination) - - pkey_attr = Ash.Resource.Info.attribute(first_relationship.destination, pkey_attr) - - source_ref = - ref_binding( - %Ref{ - attribute: pkey_attr, - relationship_path: at_path, - resource: resource - }, - bindings - ) - - {:ok, subquery} = - module.ash_sqlite_subquery( - opts, - source_ref, - 0, - filtered - ) - - subquery - - first_relationship.type == :many_to_many -> - source_ref = - ref_binding( - %Ref{ - attribute: - Ash.Resource.Info.attribute(resource, first_relationship.source_attribute), - relationship_path: at_path, - resource: resource - }, - bindings - ) - - through_relationship = - Ash.Resource.Info.relationship(resource, first_relationship.join_relationship) - - through_bindings = - query - |> Map.delete(:__ash_bindings__) - |> AshSqlite.DataLayer.default_bindings( - query.__ash_bindings__.resource, - query.__ash_bindings__.context - ) - |> Map.get(:__ash_bindings__) - |> Map.put(:bindings, %{ - free_binding => %{path: [], source: first_relationship.through, type: :root} - }) - - {:ok, through} = - AshSqlite.Join.maybe_get_resource_query( - first_relationship.through, - through_relationship, - query, - [first_relationship.join_relationship], - through_bindings, - nil, - false - ) - - Ecto.Query.from(destination in filtered, - join: through in ^through, - as: ^free_binding, - on: - field(through, ^first_relationship.destination_attribute_on_join_resource) == - field(destination, ^first_relationship.destination_attribute), - on: - field(parent_as(^source_ref), ^first_relationship.source_attribute) == - field(through, ^first_relationship.source_attribute_on_join_resource) - ) - - Map.get(first_relationship, :no_attributes?) -> - filtered - - true -> - source_ref = - ref_binding( - %Ref{ - attribute: - Ash.Resource.Info.attribute(resource, first_relationship.source_attribute), - relationship_path: at_path, - resource: resource - }, - bindings - ) - - Ecto.Query.from(destination in filtered, - where: - field(parent_as(^source_ref), ^first_relationship.source_attribute) == - field(destination, ^first_relationship.destination_attribute) - ) - end - - exists_query = - exists_query - |> Ecto.Query.exclude(:select) - |> Ecto.Query.select(1) - - Ecto.Query.dynamic(exists(Ecto.Query.subquery(exists_query))) - end - - defp do_dynamic_expr( - query, - %Ref{ - attribute: %Ash.Resource.Attribute{ - name: name, - type: attr_type, - constraints: constraints - } - } = ref, - bindings, - _embedded?, - expr_type - ) do - ref_binding = ref_binding(ref, bindings) - - if is_nil(ref_binding) do - raise "Error while building reference: #{inspect(ref)}" - end - - constraints = - if attr_type do - constraints - end - - case AshSqlite.Types.parameterized_type(attr_type || expr_type, constraints) do - nil -> - if query.__ash_bindings__[:parent?] do - Ecto.Query.dynamic(field(parent_as(^ref_binding), ^name)) - else - Ecto.Query.dynamic(field(as(^ref_binding), ^name)) - end - - type -> - dynamic = - if query.__ash_bindings__[:parent?] do - Ecto.Query.dynamic(field(parent_as(^ref_binding), ^name)) - else - Ecto.Query.dynamic(field(as(^ref_binding), ^name)) - end - - type_expr(dynamic, type) - end - end - - defp do_dynamic_expr(query, value, bindings, embedded?, _type) - when is_map(value) and not is_struct(value) do - Map.new(value, fn {key, value} -> - {key, do_dynamic_expr(query, value, bindings, embedded?)} - end) - end - - defp do_dynamic_expr(query, other, bindings, true, type) do - if other && is_atom(other) && !is_boolean(other) do - to_string(other) - else - if Ash.Filter.TemplateHelpers.expr?(other) do - if is_list(other) do - list_expr(query, other, bindings, true, type) - else - raise "Unsupported expression in AshSqlite query: #{inspect(other)}" - end - else - maybe_sanitize_list(query, other, bindings, true, type) - end - end - end - - defp do_dynamic_expr(query, value, bindings, embedded?, {:in, type}) when is_list(value) do - list_expr(query, value, bindings, embedded?, {:array, type}) - end - - defp do_dynamic_expr(query, value, bindings, embedded?, type) - when not is_nil(value) and is_atom(value) and not is_boolean(value) do - do_dynamic_expr(query, to_string(value), bindings, embedded?, type) - end - - defp do_dynamic_expr(query, value, bindings, false, type) when type == nil or type == :any do - if is_list(value) do - list_expr(query, value, bindings, false, type) - else - maybe_sanitize_list(query, value, bindings, true, type) - end - end - - defp do_dynamic_expr(query, value, bindings, false, type) do - if Ash.Filter.TemplateHelpers.expr?(value) do - if is_list(value) do - list_expr(query, value, bindings, false, type) - else - raise "Unsupported expression in AshSqlite query: #{inspect(value)}" - end - else - case maybe_sanitize_list(query, value, bindings, true, type) do - ^value -> - type_expr(value, type) - - value -> - value - end - end - end - - defp type_expr(expr, type) do - case type do - {:parameterized, inner_type, constraints} -> - if inner_type.type(constraints) == :ci_string do - Ecto.Query.dynamic(fragment("(? COLLATE NOCASE)", ^expr)) - else - Ecto.Query.dynamic(type(^expr, ^type)) - end - - nil -> - expr - - type -> - Ecto.Query.dynamic(type(^expr, ^type)) - end - end - - defp list_expr(query, value, bindings, embedded?, type) do - type = - case type do - {:array, type} -> type - {:in, type} -> type - _ -> nil - end - - {params, exprs, _} = - Enum.reduce(value, {[], [], 0}, fn value, {params, data, count} -> - case do_dynamic_expr(query, value, bindings, embedded?, type) do - %Ecto.Query.DynamicExpr{} = dynamic -> - result = - Ecto.Query.Builder.Dynamic.partially_expand( - :select, - query, - dynamic, - params, - count - ) - - expr = elem(result, 0) - new_params = elem(result, 1) - new_count = result |> Tuple.to_list() |> List.last() - - {new_params, [expr | data], new_count} - - other -> - {params, [other | data], count} - end - end) - - %Ecto.Query.DynamicExpr{ - fun: fn _query -> - {Enum.reverse(exprs), Enum.reverse(params), [], []} - end, - binding: [], - file: __ENV__.file, - line: __ENV__.line - } - end - - defp maybe_uuid_to_binary({:array, type}, value, _original_value) when is_list(value) do - Enum.map(value, &maybe_uuid_to_binary(type, &1, &1)) - end - - defp maybe_uuid_to_binary(type, value, original_value) - when type in [ - Ash.Type.UUID.EctoType, - :uuid - ] and is_binary(value) do - case Ecto.UUID.dump(value) do - {:ok, encoded} -> encoded - _ -> original_value - end - end - - defp maybe_uuid_to_binary(_type, _value, original_value), do: original_value - - defp maybe_sanitize_list(query, value, bindings, embedded?, type) do - if is_list(value) do - Enum.map(value, &do_dynamic_expr(query, &1, bindings, embedded?, type)) - else - value - end - end - - defp ref_binding(%{attribute: %Ash.Resource.Attribute{}} = ref, bindings) do - Enum.find_value(bindings.bindings, fn {binding, data} -> - data.path == ref.relationship_path && data.type in [:inner, :left, :root] && binding - end) - end - - defp do_get_path( - query, - %GetPath{arguments: [left, right], embedded?: pred_embedded?}, - bindings, - embedded?, - type \\ nil - ) do - path = "$." <> Enum.join(right, ".") - - expr = - do_dynamic_expr( - query, - %Fragment{ - embedded?: pred_embedded?, - arguments: [ - raw: "json_extract(", - expr: left, - raw: ", ", - expr: path, - raw: ")" - ] - }, - bindings, - embedded? - ) - - if type do - type_expr(expr, type) - else - expr - end - end - - defp determine_type_at_path(type, path) do - path - |> Enum.reject(&is_integer/1) - |> do_determine_type_at_path(type) - |> case do - nil -> - nil - - {type, constraints} -> - AshSqlite.Types.parameterized_type(type, constraints) - end - end - - defp do_determine_type_at_path([], _), do: nil - - defp do_determine_type_at_path([item], type) do - case Ash.Resource.Info.attribute(type, item) do - nil -> - nil - - %{type: {:array, type}, constraints: constraints} -> - constraints = constraints[:items] || [] - - {type, constraints} - - %{type: type, constraints: constraints} -> - {type, constraints} - end - end - - defp do_determine_type_at_path([item | rest], type) do - case Ash.Resource.Info.attribute(type, item) do - nil -> - nil - - %{type: {:array, type}} -> - if Ash.Type.embedded_type?(type) do - type - else - nil - end - - %{type: type} -> - if Ash.Type.embedded_type?(type) do - type - else - nil - end - end - |> case do - nil -> - nil - - type -> - do_determine_type_at_path(rest, type) - end - end - - defp set_parent_path(query, parent) do - # This is a stupid name. Its actually the path we *remove* when stepping up a level. I.e the child's path - Map.update!(query, :__ash_bindings__, fn ash_bindings -> - ash_bindings - |> Map.put(:parent_bindings, parent.__ash_bindings__) - |> Map.put(:parent_resources, [ - parent.__ash_bindings__.resource | parent.__ash_bindings__[:parent_resources] || [] - ]) - end) - end - - defp nest_expression(expression, relationship_path) do - case expression do - {key, value} when is_atom(key) -> - {key, nest_expression(value, relationship_path)} - - %Not{expression: expression} = not_expr -> - %{not_expr | expression: nest_expression(expression, relationship_path)} - - %BooleanExpression{left: left, right: right} = expression -> - %{ - expression - | left: nest_expression(left, relationship_path), - right: nest_expression(right, relationship_path) - } - - %{__operator__?: true, left: left, right: right} = op -> - left = nest_expression(left, relationship_path) - right = nest_expression(right, relationship_path) - %{op | left: left, right: right} - - %Ref{} = ref -> - add_to_ref_path(ref, relationship_path) - - %{__function__?: true, arguments: args} = func -> - %{func | arguments: Enum.map(args, &nest_expression(&1, relationship_path))} - - %Ash.Query.Exists{} = exists -> - %{exists | at_path: relationship_path ++ exists.at_path} - - %Ash.Query.Parent{} = parent -> - parent - - %Ash.Query.Call{args: args} = call -> - %{call | args: Enum.map(args, &nest_expression(&1, relationship_path))} - - %Ash.Filter{expression: expression} = filter -> - %{filter | expression: nest_expression(expression, relationship_path)} - - other -> - other - end - end - - defp add_to_ref_path(%Ref{relationship_path: relationship_path} = ref, to_add) do - %{ref | relationship_path: to_add ++ relationship_path} - end -end diff --git a/lib/functions/fragment.ex b/lib/functions/fragment.ex deleted file mode 100644 index 1b0130e..0000000 --- a/lib/functions/fragment.ex +++ /dev/null @@ -1,72 +0,0 @@ -defmodule AshSqlite.Functions.Fragment do - @moduledoc """ - A function that maps to ecto's `fragment` function - - https://hexdocs.pm/ecto/Ecto.Query.API.html#fragment/1 - """ - - use Ash.Query.Function, name: :fragment - - def private?, do: true - - # Varargs is special, and should only be used in rare circumstances (like this one) - # no type casting or help can be provided for these functions. - def args, do: :var_args - - def new([fragment | _]) when not is_binary(fragment) do - {:error, "First argument to `fragment` must be a string."} - end - - def new([fragment | rest]) do - split = split_fragment(fragment) - - if Enum.count(split, &(&1 == :slot)) != length(rest) do - {:error, - "fragment(...) expects extra arguments in the same amount of question marks in string. " <> - "It received #{Enum.count(split, &(&1 == :slot))} extra argument(s) but expected #{length(rest)}"} - else - {:ok, %__MODULE__{arguments: merge_fragment(split, rest)}} - end - end - - def casted_new([fragment | _]) when not is_binary(fragment) do - {:error, "First argument to `fragment` must be a string."} - end - - def casted_new([fragment | rest]) do - split = split_fragment(fragment) - - if Enum.count(split, &(&1 == :slot)) != length(rest) do - {:error, - "fragment(...) expects extra arguments in the same amount of question marks in string. " <> - "It received #{Enum.count(split, &(&1 == :slot))} extra argument(s) but expected #{length(rest)}"} - else - {:ok, %__MODULE__{arguments: merge_fragment(split, rest, :casted_expr)}} - end - end - - defp merge_fragment(expr, args, tag \\ :expr) - defp merge_fragment([], [], _tag), do: [] - - defp merge_fragment([:slot | rest], [arg | rest_args], tag) do - [{tag, arg} | merge_fragment(rest, rest_args, tag)] - end - - defp merge_fragment([val | rest], rest_args, tag) do - [{:raw, val} | merge_fragment(rest, rest_args, tag)] - end - - defp split_fragment(frag, consumed \\ "") - - defp split_fragment(<<>>, consumed), - do: [consumed] - - defp split_fragment(<>, consumed), - do: [consumed, :slot | split_fragment(rest, "")] - - defp split_fragment(<>, consumed), - do: split_fragment(rest, consumed <> <>) - - defp split_fragment(<>, consumed), - do: split_fragment(rest, consumed <> <>) -end diff --git a/lib/join.ex b/lib/join.ex deleted file mode 100644 index dafdb86..0000000 --- a/lib/join.ex +++ /dev/null @@ -1,734 +0,0 @@ -defmodule AshSqlite.Join do - @moduledoc false - import Ecto.Query, only: [from: 2] - - alias Ash.Query.{BooleanExpression, Not, Ref} - - @known_inner_join_operators [ - Eq, - GreaterThan, - GreaterThanOrEqual, - In, - LessThanOrEqual, - LessThan, - NotEq - ] - |> Enum.map(&Module.concat(Ash.Query.Operator, &1)) - - @known_inner_join_functions [ - Ago, - Contains - ] - |> Enum.map(&Module.concat(Ash.Query.Function, &1)) - - @known_inner_join_predicates @known_inner_join_functions ++ @known_inner_join_operators - - def join_all_relationships( - query, - filter, - opts \\ [], - relationship_paths \\ nil, - path \\ [], - source \\ nil - ) do - relationship_paths = - cond do - relationship_paths -> - relationship_paths - - opts[:no_this?] -> - filter - |> Ash.Filter.map(fn - %Ash.Query.Parent{} -> - nil - - other -> - other - end) - |> Ash.Filter.relationship_paths() - |> to_joins(filter) - - true -> - filter - |> Ash.Filter.relationship_paths() - |> to_joins(filter) - end - - Enum.reduce_while(relationship_paths, {:ok, query}, fn - {_join_type, []}, {:ok, query} -> - {:cont, {:ok, query}} - - {join_type, [relationship | rest_rels]}, {:ok, query} -> - source = source || relationship.source - - current_path = path ++ [relationship] - - current_join_type = join_type - - look_for_join_types = - case join_type do - :left -> - [:left, :inner] - - :inner -> - [:left, :inner] - - other -> - [other] - end - - case get_binding(source, Enum.map(current_path, & &1.name), query, look_for_join_types) do - binding when is_integer(binding) -> - case join_all_relationships( - query, - filter, - opts, - [{join_type, rest_rels}], - current_path, - source - ) do - {:ok, query} -> - {:cont, {:ok, query}} - - {:error, error} -> - {:halt, {:error, error}} - end - - nil -> - case join_relationship( - query, - relationship, - Enum.map(path, & &1.name), - current_join_type, - source, - filter - ) do - {:ok, joined_query} -> - joined_query_with_distinct = add_distinct(relationship, join_type, joined_query) - - case join_all_relationships( - joined_query_with_distinct, - filter, - opts, - [{join_type, rest_rels}], - current_path, - source - ) do - {:ok, query} -> - {:cont, {:ok, query}} - - {:error, error} -> - {:halt, {:error, error}} - end - - {:error, error} -> - {:halt, {:error, error}} - end - end - end) - end - - defp to_joins(paths, filter) do - paths - |> Enum.map(fn path -> - if can_inner_join?(path, filter) do - {:inner, - AshSqlite.Join.relationship_path_to_relationships( - filter.resource, - path - )} - else - {:left, - AshSqlite.Join.relationship_path_to_relationships( - filter.resource, - path - )} - end - end) - end - - def relationship_path_to_relationships(resource, path, acc \\ []) - def relationship_path_to_relationships(_resource, [], acc), do: Enum.reverse(acc) - - def relationship_path_to_relationships(resource, [relationship | rest], acc) do - relationship = Ash.Resource.Info.relationship(resource, relationship) - - relationship_path_to_relationships(relationship.destination, rest, [relationship | acc]) - end - - def maybe_get_resource_query( - resource, - relationship, - root_query, - path \\ [], - bindings \\ nil, - start_binding \\ nil, - is_subquery? \\ true - ) do - resource - |> Ash.Query.new(nil, base_filter?: false) - |> Ash.Query.set_context(%{data_layer: %{start_bindings_at: start_binding}}) - |> Ash.Query.set_context((bindings || root_query.__ash_bindings__).context) - |> Ash.Query.set_context(relationship.context) - |> case do - %{valid?: true} = query -> - ash_query = query - - initial_query = AshSqlite.DataLayer.resource_to_query(resource, nil) - - case Ash.Query.data_layer_query(query, - initial_query: initial_query - ) do - {:ok, query} -> - query = - query - |> do_base_filter( - root_query, - ash_query, - resource, - path, - bindings - ) - |> do_relationship_filter( - relationship, - root_query, - ash_query, - resource, - path, - bindings, - is_subquery? - ) - - {:ok, query} - - {:error, error} -> - {:error, error} - end - - query -> - {:error, query} - end - end - - defp do_relationship_filter(query, %{filter: nil}, _, _, _, _, _, _), do: query - - defp do_relationship_filter( - query, - relationship, - root_query, - ash_query, - resource, - path, - bindings, - is_subquery? - ) do - context = - ash_query.context - |> Map.update( - :parent_stack, - [relationship.source], - &[&1 | relationship.source] - ) - |> Map.put(:resource, relationship.destination) - - filter = - resource - |> Ash.Filter.parse!( - relationship.filter, - %{}, - context - ) - - {:ok, filter} = Ash.Filter.hydrate_refs(filter, context) - - base_bindings = bindings || query.__ash_bindings__ - - parent_binding = - case :lists.droplast(path) do - [] -> - base_bindings.bindings - |> Enum.find_value(fn {key, %{type: type}} -> - if type == :root do - key - end - end) - - path -> - get_binding( - root_query.__ash_bindings__.resource, - path, - %{query | __ash_bindings__: base_bindings}, - [ - :inner, - :left - ] - ) - end - - parent_bindings = %{ - base_bindings - | resource: relationship.source, - calculations: %{}, - parent_resources: [], - context: relationship.context, - current: parent_binding + 1 - } - - parent_bindings = - if bindings do - Map.put(parent_bindings, :parent_is_parent_as?, !is_subquery?) - else - parent_bindings - |> Map.update!(:bindings, &Map.take(&1, [parent_binding])) - end - - has_bindings? = not is_nil(bindings) - - bindings = - base_bindings - |> Map.put(:parent_bindings, parent_bindings) - |> Map.put(:parent_resources, [ - relationship.source | parent_bindings[:parent_resources] || [] - ]) - - dynamic = - if has_bindings? do - filter = - if is_subquery? do - Ash.Filter.move_to_relationship_path(filter, path) - else - filter - end - - AshSqlite.Expr.dynamic_expr(root_query, filter, bindings, true) - else - AshSqlite.Expr.dynamic_expr(query, filter, bindings, true) - end - - {:ok, query} = join_all_relationships(query, filter) - from(row in query, where: ^dynamic) - end - - defp do_base_filter(query, root_query, ash_query, resource, path, bindings) do - case Ash.Resource.Info.base_filter(resource) do - nil -> - query - - filter -> - filter = - resource - |> Ash.Filter.parse!( - filter, - ash_query.calculations, - ash_query.context - ) - - dynamic = - if bindings do - filter = Ash.Filter.move_to_relationship_path(filter, path) - - AshSqlite.Expr.dynamic_expr(root_query, filter, bindings, true) - else - AshSqlite.Expr.dynamic_expr(query, filter, query.__ash_bindings__, true) - end - - from(row in query, where: ^dynamic) - end - end - - defp can_inner_join?(path, expr, seen_an_or? \\ false) - - defp can_inner_join?(path, %{expression: expr}, seen_an_or?), - do: can_inner_join?(path, expr, seen_an_or?) - - defp can_inner_join?(_path, expr, _seen_an_or?) when expr in [nil, true, false], do: true - - defp can_inner_join?(path, %BooleanExpression{op: :and, left: left, right: right}, seen_an_or?) do - can_inner_join?(path, left, seen_an_or?) || can_inner_join?(path, right, seen_an_or?) - end - - defp can_inner_join?(path, %BooleanExpression{op: :or, left: left, right: right}, _) do - can_inner_join?(path, left, true) && can_inner_join?(path, right, true) - end - - defp can_inner_join?( - _, - %Not{}, - _ - ) do - false - end - - defp can_inner_join?( - search_path, - %struct{__operator__?: true, left: %Ref{relationship_path: relationship_path}}, - seen_an_or? - ) - when search_path == relationship_path and struct in @known_inner_join_predicates do - not seen_an_or? - end - - defp can_inner_join?( - search_path, - %struct{__operator__?: true, right: %Ref{relationship_path: relationship_path}}, - seen_an_or? - ) - when search_path == relationship_path and struct in @known_inner_join_predicates do - not seen_an_or? - end - - defp can_inner_join?( - search_path, - %struct{__function__?: true, arguments: arguments}, - seen_an_or? - ) - when struct in @known_inner_join_predicates do - if Enum.any?(arguments, &match?(%Ref{relationship_path: ^search_path}, &1)) do - not seen_an_or? - else - true - end - end - - defp can_inner_join?(_, _, _), do: false - - @doc false - def get_binding(resource, candidate_path, %{__ash_bindings__: _} = query, types) do - types = List.wrap(types) - - Enum.find_value(query.__ash_bindings__.bindings, fn - {binding, %{path: path, source: source, type: type}} -> - if type in types && - Ash.SatSolver.synonymous_relationship_paths?(resource, path, candidate_path, source) do - binding - end - - _ -> - nil - end) - end - - def get_binding(_, _, _, _), do: nil - - defp add_distinct(_relationship, _join_type, joined_query) do - # We can't do the same distincting that we do in ash_postgres - # This means that all filters that reference `has_many` relationships need - # to be rewritten to use `exists`, which will allow us to not need to do any distincting. - # in fact, we probably want to do that in `ash_postgres` automatically too? - # if !joined_query.__ash_bindings__.in_group? && - # (relationship.cardinality == :many || Map.get(relationship, :from_many?)) && - # !joined_query.distinct do - # from(row in joined_query, - # distinct: - # ^AshSqlite.DataLayer.unwrap_one( - # Ash.Resource.Info.primary_key(joined_query.__ash_bindings__.resource) - # ) - # ) - # else - joined_query - # end - end - - defp join_relationship( - query, - relationship, - path, - join_type, - source, - filter - ) do - case Map.get(query.__ash_bindings__.bindings, path) do - %{type: existing_join_type} when join_type != existing_join_type -> - raise "unreachable?" - - nil -> - do_join_relationship( - query, - relationship, - path, - join_type, - source, - filter - ) - - _ -> - {:ok, query} - end - end - - defp do_join_relationship( - query, - %{manual: {module, opts}} = relationship, - path, - kind, - source, - _filter - ) do - full_path = path ++ [relationship.name] - initial_ash_bindings = query.__ash_bindings__ - - binding_data = %{type: kind, path: full_path, source: source} - - query = AshSqlite.DataLayer.add_binding(query, binding_data) - - root_bindings = query.__ash_bindings__ - - case maybe_get_resource_query( - relationship.destination, - relationship, - query, - full_path, - root_bindings - ) do - {:error, error} -> - {:error, error} - - {:ok, relationship_destination} -> - relationship_destination = - relationship_destination - |> Ecto.Queryable.to_query() - - binding_kinds = - case kind do - :left -> - [:left, :inner] - - :inner -> - [:left, :inner] - - other -> - [other] - end - - current_binding = - Enum.find_value(initial_ash_bindings.bindings, 0, fn {binding, data} -> - if data.type in binding_kinds && data.path == path do - binding - end - end) - - module.ash_sqlite_join( - query, - opts, - current_binding, - initial_ash_bindings.current, - kind, - relationship_destination - ) - end - rescue - e in UndefinedFunctionError -> - if e.function == :ash_sqlite_join do - reraise """ - AshSqlite cannot join to a manual relationship #{inspect(module)} that does not implement the `AshSqlite.ManualRelationship` behaviour. - """, - __STACKTRACE__ - else - reraise e, __STACKTRACE__ - end - end - - defp do_join_relationship( - query, - %{type: :many_to_many} = relationship, - path, - kind, - source, - _filter - ) do - join_relationship = - Ash.Resource.Info.relationship(relationship.source, relationship.join_relationship) - - join_path = path ++ [join_relationship.name] - - full_path = path ++ [relationship.name] - - initial_ash_bindings = query.__ash_bindings__ - - binding_data = %{type: kind, path: full_path, source: source} - - query = - query - |> AshSqlite.DataLayer.add_binding(%{ - path: join_path, - type: :left, - source: source - }) - |> AshSqlite.DataLayer.add_binding(binding_data) - - root_bindings = query.__ash_bindings__ - - with {:ok, relationship_through} <- - maybe_get_resource_query( - relationship.through, - join_relationship, - query, - join_path, - root_bindings - ), - {:ok, relationship_destination} <- - maybe_get_resource_query( - relationship.destination, - relationship, - query, - path, - root_bindings - ) do - relationship_through = - relationship_through - |> Ecto.Queryable.to_query() - - relationship_destination = - relationship_destination - |> Ecto.Queryable.to_query() - - binding_kinds = - case kind do - :left -> - [:left, :inner] - - :inner -> - [:left, :inner] - - other -> - [other] - end - - current_binding = - Enum.find_value(initial_ash_bindings.bindings, 0, fn {binding, data} -> - if data.type in binding_kinds && data.path == path do - binding - end - end) - - query = - case kind do - :inner -> - from([{row, current_binding}] in query, - join: through in ^relationship_through, - as: ^initial_ash_bindings.current, - on: - field(row, ^relationship.source_attribute) == - field(through, ^relationship.source_attribute_on_join_resource), - join: destination in ^relationship_destination, - as: ^(initial_ash_bindings.current + 1), - on: - field(destination, ^relationship.destination_attribute) == - field(through, ^relationship.destination_attribute_on_join_resource) - ) - - _ -> - from([{row, current_binding}] in query, - left_join: through in ^relationship_through, - as: ^initial_ash_bindings.current, - on: - field(row, ^relationship.source_attribute) == - field(through, ^relationship.source_attribute_on_join_resource), - left_join: destination in ^relationship_destination, - as: ^(initial_ash_bindings.current + 1), - on: - field(destination, ^relationship.destination_attribute) == - field(through, ^relationship.destination_attribute_on_join_resource) - ) - end - - {:ok, query} - end - end - - defp do_join_relationship( - query, - relationship, - path, - kind, - source, - _filter - ) do - full_path = path ++ [relationship.name] - initial_ash_bindings = query.__ash_bindings__ - - binding_data = %{type: kind, path: full_path, source: source} - - query = AshSqlite.DataLayer.add_binding(query, binding_data) - - root_bindings = query.__ash_bindings__ - - case maybe_get_resource_query( - relationship.destination, - relationship, - query, - full_path, - root_bindings - ) do - {:error, error} -> - {:error, error} - - {:ok, relationship_destination} -> - relationship_destination = - relationship_destination - |> Ecto.Queryable.to_query() - - binding_kinds = - case kind do - :left -> - [:left, :inner] - - :inner -> - [:left, :inner] - - other -> - [other] - end - - current_binding = - Enum.find_value(initial_ash_bindings.bindings, 0, fn {binding, data} -> - if data.type in binding_kinds && data.path == path do - binding - end - end) - - query = - case {kind, Map.get(relationship, :no_attributes?)} do - {:inner, true} -> - from([{row, current_binding}] in query, - join: destination in ^relationship_destination, - as: ^initial_ash_bindings.current, - on: true - ) - - {_, true} -> - from([{row, current_binding}] in query, - left_join: destination in ^relationship_destination, - as: ^initial_ash_bindings.current, - on: true - ) - - {:inner, _} -> - from([{row, current_binding}] in query, - join: destination in ^relationship_destination, - as: ^initial_ash_bindings.current, - on: - field(row, ^relationship.source_attribute) == - field( - destination, - ^relationship.destination_attribute - ) - ) - - _ -> - from([{row, current_binding}] in query, - left_join: destination in ^relationship_destination, - as: ^initial_ash_bindings.current, - on: - field(row, ^relationship.source_attribute) == - field( - destination, - ^relationship.destination_attribute - ) - ) - end - - {:ok, query} - end - end -end diff --git a/lib/migration_generator/migration_generator.ex b/lib/migration_generator/migration_generator.ex index e184a69..8179048 100644 --- a/lib/migration_generator/migration_generator.ex +++ b/lib/migration_generator/migration_generator.ex @@ -19,11 +19,11 @@ defmodule AshSqlite.MigrationGenerator do check: false, drop_columns: false - def generate(apis, opts \\ []) do - apis = List.wrap(apis) + def generate(domains, opts \\ []) do + domains = List.wrap(domains) opts = opts(opts) - all_resources = Enum.uniq(Enum.flat_map(apis, &Ash.Api.Info.resources/1)) + all_resources = Enum.uniq(Enum.flat_map(domains, &Ash.Domain.Info.resources/1)) snapshots = all_resources @@ -49,8 +49,8 @@ defmodule AshSqlite.MigrationGenerator do Does not support everything supported by the migration generator. """ - def take_snapshots(api, repo, only_resources \\ nil) do - all_resources = api |> Ash.Api.Info.resources() |> Enum.uniq() + def take_snapshots(domain, repo, only_resources \\ nil) do + all_resources = domain |> Ash.Domain.Info.resources() |> Enum.uniq() all_resources |> Enum.filter(fn resource -> @@ -408,10 +408,7 @@ defmodule AshSqlite.MigrationGenerator do attributes = Enum.flat_map(snapshots, & &1.attributes) - count_with_create = - snapshots - |> Enum.filter(& &1.has_create_action) - |> Enum.count() + count_with_create = Enum.count(snapshots, & &1.has_create_action) new_snapshot = %{ snapshot @@ -2035,7 +2032,7 @@ defmodule AshSqlite.MigrationGenerator do defp has_create_action?(resource) do resource |> Ash.Resource.Info.actions() - |> Enum.any?(&(&1.type == :create)) + |> Enum.any?(&(&1.type == :create && !&1.manual)) end defp custom_indexes(resource) do diff --git a/lib/mix/helpers.ex b/lib/mix/helpers.ex index 6a5a2f0..7852109 100644 --- a/lib/mix/helpers.ex +++ b/lib/mix/helpers.ex @@ -1,6 +1,6 @@ defmodule AshSqlite.MixHelpers do @moduledoc false - def apis!(opts, args) do + def domains!(opts, args) do apps = if apps_paths = Mix.Project.apps_paths() do apps_paths |> Map.keys() |> Enum.sort() @@ -8,46 +8,46 @@ defmodule AshSqlite.MixHelpers do [Mix.Project.config()[:app]] end - configured_apis = Enum.flat_map(apps, &Application.get_env(&1, :ash_apis, [])) + configured_domains = Enum.flat_map(apps, &Application.get_env(&1, :ash_domains, [])) - apis = - if opts[:apis] && opts[:apis] != "" do - opts[:apis] + domains = + if opts[:domains] && opts[:domains] != "" do + opts[:domains] |> Kernel.||("") |> String.split(",") |> Enum.flat_map(fn "" -> [] - api -> - [Module.concat([api])] + domain -> + [Module.concat([domain])] end) else - configured_apis + configured_domains end - apis + domains |> Enum.map(&ensure_compiled(&1, args)) |> case do [] -> - raise "must supply the --apis argument, or set `config :my_app, ash_apis: [...]` in config" + raise "must supply the --domains argument, or set `config :my_app, ash_domains: [...]` in config" - apis -> - apis + domains -> + domains end end def repos!(opts, args) do - apis = apis!(opts, args) + domains = domains!(opts, args) resources = - apis - |> Enum.flat_map(&Ash.Api.Info.resources/1) + domains + |> Enum.flat_map(&Ash.Domain.Info.resources/1) |> Enum.filter(&(Ash.DataLayer.data_layer(&1) == AshSqlite.DataLayer)) |> case do [] -> raise """ - No resources with `data_layer: AshSqlite.DataLayer` found in the apis #{Enum.map_join(apis, ",", &inspect/1)}. + No resources with `data_layer: AshSqlite.DataLayer` found in the domains #{Enum.map_join(domains, ",", &inspect/1)}. Must be able to find at least one resource with `data_layer: AshSqlite.DataLayer`. """ @@ -62,7 +62,7 @@ defmodule AshSqlite.MixHelpers do |> case do [] -> raise """ - No repos could be found configured on the resources in the apis: #{Enum.map_join(apis, ",", &inspect/1)} + No repos could be found configured on the resources in the domains: #{Enum.map_join(domains, ",", &inspect/1)} At least one resource must have a repo configured. @@ -96,7 +96,7 @@ defmodule AshSqlite.MixHelpers do end end - defp ensure_compiled(api, args) do + defp ensure_compiled(domain, args) do if Code.ensure_loaded?(Mix.Tasks.App.Config) do Mix.Task.run("app.config", args) else @@ -104,18 +104,18 @@ defmodule AshSqlite.MixHelpers do "--no-compile" not in args && Mix.Task.run("compile", args) end - case Code.ensure_compiled(api) do + case Code.ensure_compiled(domain) do {:module, _} -> - api - |> Ash.Api.Info.resources() + domain + |> Ash.Domain.Info.resources() |> Enum.each(&Code.ensure_compiled/1) # TODO: We shouldn't need to make sure that the resources are compiled - api + domain {:error, error} -> - Mix.raise("Could not load #{inspect(api)}, error: #{inspect(error)}. ") + Mix.raise("Could not load #{inspect(domain)}, error: #{inspect(error)}. ") end end diff --git a/lib/mix/tasks/ash_sqlite.create.ex b/lib/mix/tasks/ash_sqlite.create.ex index 03bef97..cf9d499 100644 --- a/lib/mix/tasks/ash_sqlite.create.ex +++ b/lib/mix/tasks/ash_sqlite.create.ex @@ -5,7 +5,7 @@ defmodule Mix.Tasks.AshSqlite.Create do @switches [ quiet: :boolean, - apis: :string, + domains: :string, no_compile: :boolean, no_deps_check: :boolean ] @@ -15,16 +15,16 @@ defmodule Mix.Tasks.AshSqlite.Create do ] @moduledoc """ - Create the storage for repos in all resources for the given (or configured) apis. + Create the storage for repos in all resources for the given (or configured) domains. ## Examples mix ash_sqlite.create - mix ash_sqlite.create --apis MyApp.Api1,MyApp.Api2 + mix ash_sqlite.create --domains MyApp.Domain1,MyApp.Domain2 ## Command line options - * `--apis` - the apis who's repos you want to migrate. + * `--domains` - the domains who's repos you want to migrate. * `--quiet` - do not log output * `--no-compile` - do not compile before creating * `--no-deps-check` - do not compile before creating @@ -41,7 +41,7 @@ defmodule Mix.Tasks.AshSqlite.Create do ["-r", to_string(repo)] end) - rest_opts = AshSqlite.MixHelpers.delete_arg(args, "--apis") + rest_opts = AshSqlite.MixHelpers.delete_arg(args, "--domains") Mix.Task.reenable("ecto.create") diff --git a/lib/mix/tasks/ash_sqlite.drop.ex b/lib/mix/tasks/ash_sqlite.drop.ex index 392a6c1..9c9fb59 100644 --- a/lib/mix/tasks/ash_sqlite.drop.ex +++ b/lib/mix/tasks/ash_sqlite.drop.ex @@ -1,7 +1,7 @@ defmodule Mix.Tasks.AshSqlite.Drop do use Mix.Task - @shortdoc "Drops the repository storage for the repos in the specified (or configured) apis" + @shortdoc "Drops the repository storage for the repos in the specified (or configured) domains" @default_opts [force: false, force_drop: false] @aliases [ @@ -13,7 +13,7 @@ defmodule Mix.Tasks.AshSqlite.Drop do force: :boolean, force_drop: :boolean, quiet: :boolean, - apis: :string, + domains: :string, no_compile: :boolean, no_deps_check: :boolean ] @@ -24,11 +24,11 @@ defmodule Mix.Tasks.AshSqlite.Drop do ## Examples mix ash_sqlite.drop - mix ash_sqlite.drop -r MyApp.Api1,MyApp.Api2 + mix ash_sqlite.drop -r MyApp.Domain1,MyApp.Domain2 ## Command line options - * `--apis` - the apis who's repos should be dropped + * `--doains` - the domains who's repos should be dropped * `-q`, `--quiet` - run the command quietly * `-f`, `--force` - do not ask for confirmation when dropping the database. Configuration is asked only when `:start_permanent` is set to true @@ -49,7 +49,7 @@ defmodule Mix.Tasks.AshSqlite.Drop do ["-r", to_string(repo)] end) - rest_opts = AshSqlite.MixHelpers.delete_arg(args, "--apis") + rest_opts = AshSqlite.MixHelpers.delete_arg(args, "--domains") Mix.Task.reenable("ecto.drop") diff --git a/lib/mix/tasks/ash_sqlite.generate_migrations.ex b/lib/mix/tasks/ash_sqlite.generate_migrations.ex index 0c3de9e..9cd1332 100644 --- a/lib/mix/tasks/ash_sqlite.generate_migrations.ex +++ b/lib/mix/tasks/ash_sqlite.generate_migrations.ex @@ -4,7 +4,7 @@ defmodule Mix.Tasks.AshSqlite.GenerateMigrations do Options: - * `apis` - a comma separated list of API modules, for which migrations will be generated + * `domains` - a comma separated list of domain modules, for which migrations will be generated * `snapshot-path` - a custom path to store the snapshots, defaults to "priv/resource_snapshots" * `migration-path` - a custom path to store the migrations, defaults to "priv". Migrations are stored in a folder for each repo, so `priv/repo_name/migrations` @@ -71,7 +71,7 @@ defmodule Mix.Tasks.AshSqlite.GenerateMigrations do {opts, _} = OptionParser.parse!(args, strict: [ - apis: :string, + domains: :string, snapshot_path: :string, migration_path: :string, quiet: :boolean, @@ -83,13 +83,13 @@ defmodule Mix.Tasks.AshSqlite.GenerateMigrations do ] ) - apis = AshSqlite.MixHelpers.apis!(opts, args) + domains = AshSqlite.MixHelpers.domains!(opts, args) opts = opts |> Keyword.put(:format, !opts[:no_format]) |> Keyword.delete(:no_format) - AshSqlite.MigrationGenerator.generate(apis, opts) + AshSqlite.MigrationGenerator.generate(domains, opts) end end diff --git a/lib/mix/tasks/ash_sqlite.migrate.ex b/lib/mix/tasks/ash_sqlite.migrate.ex index d3d8638..000643b 100644 --- a/lib/mix/tasks/ash_sqlite.migrate.ex +++ b/lib/mix/tasks/ash_sqlite.migrate.ex @@ -4,7 +4,7 @@ defmodule Mix.Tasks.AshSqlite.Migrate do import AshSqlite.MixHelpers, only: [migrations_path: 2] - @shortdoc "Runs the repository migrations for all repositories in the provided (or congigured) apis" + @shortdoc "Runs the repository migrations for all repositories in the provided (or congigured) domains" @aliases [ n: :step @@ -18,7 +18,7 @@ defmodule Mix.Tasks.AshSqlite.Migrate do pool_size: :integer, log_sql: :boolean, strict_version_order: :boolean, - apis: :string, + domains: :string, no_compile: :boolean, no_deps_check: :boolean, migrations_path: :keep @@ -37,7 +37,7 @@ defmodule Mix.Tasks.AshSqlite.Migrate do specific version number, supply `--to version_number`. To migrate a specific number of times, use `--step n`. - This is only really useful if your api or apis only use a single repo. + This is only really useful if your domain or domains only use a single repo. If you have multiple repos and you want to run a single migration and/or migrate/roll them back to different points, you will need to use the ecto specific task, `mix ecto.migrate` and provide your repo name. @@ -48,7 +48,7 @@ defmodule Mix.Tasks.AshSqlite.Migrate do ## Examples mix ash_sqlite.migrate - mix ash_sqlite.migrate --apis MyApp.Api1,MyApp.Api2 + mix ash_sqlite.migrate --domains MyApp.Domain1,MyApp.Domain2 mix ash_sqlite.migrate -n 3 mix ash_sqlite.migrate --step 3 @@ -57,7 +57,7 @@ defmodule Mix.Tasks.AshSqlite.Migrate do ## Command line options - * `--apis` - the apis who's repos should be migrated + * `--domains` - the domains who's repos should be migrated * `--all` - run all pending migrations @@ -99,7 +99,7 @@ defmodule Mix.Tasks.AshSqlite.Migrate do rest_opts = args - |> AshSqlite.MixHelpers.delete_arg("--apis") + |> AshSqlite.MixHelpers.delete_arg("--domains") |> AshSqlite.MixHelpers.delete_arg("--migrations-path") Mix.Task.reenable("ecto.migrate") diff --git a/lib/mix/tasks/ash_sqlite.rollback.ex b/lib/mix/tasks/ash_sqlite.rollback.ex index f1758af..756e59c 100644 --- a/lib/mix/tasks/ash_sqlite.rollback.ex +++ b/lib/mix/tasks/ash_sqlite.rollback.ex @@ -4,7 +4,7 @@ defmodule Mix.Tasks.AshSqlite.Rollback do import AshSqlite.MixHelpers, only: [migrations_path: 2] - @shortdoc "Rolls back the repository migrations for all repositories in the provided (or configured) apis" + @shortdoc "Rolls back the repository migrations for all repositories in the provided (or configured) domains" @moduledoc """ Reverts applied migrations in the given repository. @@ -16,7 +16,7 @@ defmodule Mix.Tasks.AshSqlite.Rollback do specific number of times, use `--step n`. To undo all applied migrations, provide `--all`. - This is only really useful if your api or apis only use a single repo. + This is only really useful if your domain or domains only use a single repo. If you have multiple repos and you want to run a single migration and/or migrate/roll them back to different points, you will need to use the ecto specific task, `mix ecto.migrate` and provide your repo name. @@ -30,7 +30,7 @@ defmodule Mix.Tasks.AshSqlite.Rollback do mix ash_sqlite.rollback --to 20080906120000 ## Command line options - * `--apis` - the apis who's repos should be rolledback + * `--domains` - the domains who's repos should be rolledback * `--all` - revert all applied migrations * `--step` / `-n` - revert n number of applied migrations * `--to` / `-v` - revert all migrations down to and including version @@ -64,7 +64,7 @@ defmodule Mix.Tasks.AshSqlite.Rollback do rest_opts = args - |> AshSqlite.MixHelpers.delete_arg("--apis") + |> AshSqlite.MixHelpers.delete_arg("--domains") |> AshSqlite.MixHelpers.delete_arg("--migrations-path") Mix.Task.reenable("ecto.rollback") diff --git a/lib/sort.ex b/lib/sort.ex deleted file mode 100644 index ec69940..0000000 --- a/lib/sort.ex +++ /dev/null @@ -1,161 +0,0 @@ -defmodule AshSqlite.Sort do - @moduledoc false - require Ecto.Query - - def sort( - query, - sort, - resource, - relationship_path \\ [], - binding \\ 0, - return_order_by? \\ false - ) do - query = AshSqlite.DataLayer.default_bindings(query, resource) - - calcs = - Enum.flat_map(sort, fn - {%Ash.Query.Calculation{} = calculation, _} -> - [calculation] - - _ -> - [] - end) - - {:ok, query} = - AshSqlite.Join.join_all_relationships( - query, - %Ash.Filter{ - resource: resource, - expression: calcs - }, - left_only?: true - ) - - sort - |> sanitize_sort() - |> Enum.reduce_while({:ok, []}, fn - {order, %Ash.Query.Calculation{} = calc}, {:ok, query_expr} -> - type = - if calc.type do - AshSqlite.Types.parameterized_type(calc.type, calc.constraints) - else - nil - end - - calc.opts - |> calc.module.expression(calc.context) - |> Ash.Filter.hydrate_refs(%{ - resource: resource, - public?: false - }) - |> Ash.Filter.move_to_relationship_path(relationship_path) - |> case do - {:ok, expr} -> - expr = - AshSqlite.Expr.dynamic_expr(query, expr, query.__ash_bindings__, false, type) - - {:cont, {:ok, query_expr ++ [{order, expr}]}} - - {:error, error} -> - {:halt, {:error, error}} - end - - {order, sort}, {:ok, query_expr} -> - expr = - Ecto.Query.dynamic(field(as(^binding), ^sort)) - - {:cont, {:ok, query_expr ++ [{order, expr}]}} - end) - |> case do - {:ok, []} -> - if return_order_by? do - {:ok, order_to_fragments([])} - else - {:ok, query} - end - - {:ok, sort_exprs} -> - if return_order_by? do - {:ok, order_to_fragments(sort_exprs)} - else - new_query = Ecto.Query.order_by(query, ^sort_exprs) - - sort_expr = List.last(new_query.order_bys) - - new_query = - new_query - |> Map.update!(:windows, fn windows -> - order_by_expr = %{sort_expr | expr: [order_by: sort_expr.expr]} - Keyword.put(windows, :order, order_by_expr) - end) - |> Map.update!(:__ash_bindings__, &Map.put(&1, :__order__?, true)) - - {:ok, new_query} - end - - {:error, error} -> - {:error, error} - end - end - - def order_to_fragments([]), do: [] - - def order_to_fragments(order) when is_list(order) do - Enum.map(order, &do_order_to_fragments(&1)) - end - - def do_order_to_fragments({order, sort}) do - case order do - :asc -> - Ecto.Query.dynamic([row], fragment("? ASC", ^sort)) - - :desc -> - Ecto.Query.dynamic([row], fragment("? DESC", ^sort)) - - :asc_nulls_last -> - Ecto.Query.dynamic([row], fragment("? ASC NULLS LAST", ^sort)) - - :asc_nulls_first -> - Ecto.Query.dynamic([row], fragment("? ASC NULLS FIRST", ^sort)) - - :desc_nulls_first -> - Ecto.Query.dynamic([row], fragment("? DESC NULLS FIRST", ^sort)) - - :desc_nulls_last -> - Ecto.Query.dynamic([row], fragment("? DESC NULLS LAST", ^sort)) - "DESC NULLS LAST" - end - end - - def order_to_postgres_order(dir) do - case dir do - :asc -> nil - :asc_nils_last -> " ASC NULLS LAST" - :asc_nils_first -> " ASC NULLS FIRST" - :desc -> " DESC" - :desc_nils_last -> " DESC NULLS LAST" - :desc_nils_first -> " DESC NULLS FIRST" - end - end - - defp sanitize_sort(sort) do - sort - |> List.wrap() - |> Enum.map(fn - {sort, {order, context}} -> - {ash_to_ecto_order(order), {sort, context}} - - {sort, order} -> - {ash_to_ecto_order(order), sort} - - sort -> - sort - end) - end - - defp ash_to_ecto_order(:asc_nils_last), do: :asc_nulls_last - defp ash_to_ecto_order(:asc_nils_first), do: :asc_nulls_first - defp ash_to_ecto_order(:desc_nils_last), do: :desc_nulls_last - defp ash_to_ecto_order(:desc_nils_first), do: :desc_nulls_first - defp ash_to_ecto_order(other), do: other -end diff --git a/lib/sql_implementation.ex b/lib/sql_implementation.ex new file mode 100644 index 0000000..86ebda6 --- /dev/null +++ b/lib/sql_implementation.ex @@ -0,0 +1,443 @@ +defmodule AshSqlite.SqlImplementation do + @moduledoc false + use AshSql.Implementation + + require Ecto.Query + + @impl true + def manual_relationship_function, do: :ash_sqlite_join + + @impl true + def manual_relationship_subquery_function, do: :ash_sqlite_subquery + + @impl true + def expr( + query, + %like{arguments: [arg1, arg2], embedded?: pred_embedded?}, + bindings, + embedded?, + acc, + type + ) + when like in [AshSqlite.Functions.Like, AshSqlite.Functions.ILike] do + {arg1, acc} = + AshSql.Expr.dynamic_expr(query, arg1, bindings, pred_embedded? || embedded?, :string, acc) + + {arg2, acc} = + AshSql.Expr.dynamic_expr(query, arg2, bindings, pred_embedded? || embedded?, :string, acc) + + inner_dyn = + if like == AshSqlite.Functions.Like do + Ecto.Query.dynamic(like(^arg1, ^arg2)) + else + Ecto.Query.dynamic(like(fragment("LOWER(?)", ^arg1), fragment("LOWER(?)", ^arg2))) + end + + if type != Ash.Type.Boolean do + {:ok, inner_dyn, acc} + else + {:ok, Ecto.Query.dynamic(type(^inner_dyn, ^type)), acc} + end + end + + def expr( + query, + %Ash.Query.Function.GetPath{ + arguments: [%Ash.Query.Ref{attribute: %{type: type}}, right] + } = get_path, + bindings, + embedded?, + acc, + nil + ) + when is_atom(type) and is_list(right) do + if Ash.Type.embedded_type?(type) do + type = determine_type_at_path(type, right) + + do_get_path(query, get_path, bindings, embedded?, acc, type) + else + do_get_path(query, get_path, bindings, embedded?, acc) + end + end + + def expr( + query, + %Ash.Query.Function.GetPath{ + arguments: [%Ash.Query.Ref{attribute: %{type: {:array, type}}}, right] + } = get_path, + bindings, + embedded?, + acc, + nil + ) + when is_atom(type) and is_list(right) do + if Ash.Type.embedded_type?(type) do + type = determine_type_at_path(type, right) + do_get_path(query, get_path, bindings, embedded?, acc, type) + else + do_get_path(query, get_path, bindings, embedded?, acc) + end + end + + def expr( + query, + %Ash.Query.Function.GetPath{} = get_path, + bindings, + embedded?, + acc, + type + ) do + do_get_path(query, get_path, bindings, embedded?, acc, type) + end + + @impl true + def expr( + _query, + _expr, + _bindings, + _embedded?, + _acc, + _type + ) do + :error + end + + @impl true + def type_expr(expr, nil), do: expr + + def type_expr(expr, type) when is_atom(type) do + type = Ash.Type.get_type(type) + + cond do + !Ash.Type.ash_type?(type) -> + Ecto.Query.dynamic(type(^expr, ^type)) + + Ash.Type.storage_type(type, []) == :ci_string -> + Ecto.Query.dynamic(fragment("(? COLLATE NOCASE)", ^expr)) + + true -> + Ecto.Query.dynamic(type(^expr, ^Ash.Type.storage_type(type, []))) + end + end + + def type_expr(expr, type) do + case type do + {:parameterized, inner_type, constraints} -> + if inner_type.type(constraints) == :ci_string do + Ecto.Query.dynamic(fragment("(? COLLATE NOCASE)", ^expr)) + else + Ecto.Query.dynamic(type(^expr, ^type)) + end + + nil -> + expr + + type -> + Ecto.Query.dynamic(type(^expr, ^type)) + end + end + + @impl true + def table(resource) do + AshSqlite.DataLayer.Info.table(resource) + end + + @impl true + def schema(_resource) do + nil + end + + @impl true + def repo(resource, _kind) do + AshSqlite.DataLayer.Info.repo(resource) + end + + @impl true + def multicolumn_distinct?, do: false + + @impl true + def parameterized_type(type, constraints, no_maps? \\ false) + + def parameterized_type({:parameterized, _, _} = type, _, _) do + type + end + + def parameterized_type({:in, type}, constraints, no_maps?) do + parameterized_type({:array, type}, constraints, no_maps?) + end + + def parameterized_type({:array, type}, constraints, no_maps?) do + case parameterized_type(type, constraints[:items] || [], no_maps?) do + nil -> + nil + + type -> + {:array, type} + end + end + + def parameterized_type(type, _constraints, _no_maps?) + when type in [Ash.Type.Map, Ash.Type.Map.EctoType], + do: nil + + def parameterized_type(type, constraints, no_maps?) do + if Ash.Type.ash_type?(type) do + cast_in_query? = + if function_exported?(Ash.Type, :cast_in_query?, 2) do + Ash.Type.cast_in_query?(type, constraints) + else + Ash.Type.cast_in_query?(type) + end + + if cast_in_query? do + parameterized_type(Ash.Type.ecto_type(type), constraints, no_maps?) + else + nil + end + else + if is_atom(type) && :erlang.function_exported(type, :type, 1) do + {:parameterized, type, constraints || []} + else + type + end + end + end + + @impl true + def determine_types(mod, values) do + Code.ensure_compiled(mod) + + cond do + :erlang.function_exported(mod, :types, 0) -> + mod.types() + + :erlang.function_exported(mod, :args, 0) -> + mod.args() + + true -> + [:any] + end + |> Enum.map(fn types -> + case types do + :same -> + types = + for _ <- values do + :same + end + + closest_fitting_type(types, values) + + :any -> + for _ <- values do + :any + end + + types -> + closest_fitting_type(types, values) + end + end) + |> Enum.filter(fn types -> + Enum.all?(types, &(vagueness(&1) == 0)) + end) + |> case do + [type] -> + if type == :any || type == {:in, :any} do + nil + else + type + end + + # There are things we could likely do here + # We only say "we know what types these are" when we explicitly know + _ -> + Enum.map(values, fn _ -> nil end) + end + end + + defp closest_fitting_type(types, values) do + types_with_values = Enum.zip(types, values) + + types_with_values + |> fill_in_known_types() + |> clarify_types() + end + + defp clarify_types(types) do + basis = + types + |> Enum.map(&elem(&1, 0)) + |> Enum.min_by(&vagueness(&1)) + + Enum.map(types, fn {type, _value} -> + replace_same(type, basis) + end) + end + + defp replace_same({:in, type}, basis) do + {:in, replace_same(type, basis)} + end + + defp replace_same(:same, :same) do + :any + end + + defp replace_same(:same, {:in, :same}) do + {:in, :any} + end + + defp replace_same(:same, basis) do + basis + end + + defp replace_same(other, _basis) do + other + end + + defp fill_in_known_types(types) do + Enum.map(types, &fill_in_known_type/1) + end + + defp fill_in_known_type( + {vague_type, %Ash.Query.Ref{attribute: %{type: type, constraints: constraints}}} = ref + ) + when vague_type in [:any, :same] do + if Ash.Type.ash_type?(type) do + type = type |> parameterized_type(constraints, true) |> array_to_in() + + {type || :any, ref} + else + type = + if is_atom(type) && :erlang.function_exported(type, :type, 1) do + {:parameterized, type, []} |> array_to_in() + else + type |> array_to_in() + end + + {type, ref} + end + end + + defp fill_in_known_type( + {{:array, type}, %Ash.Query.Ref{attribute: %{type: {:array, type}} = attribute} = ref} + ) do + {:in, fill_in_known_type({type, %{ref | attribute: %{attribute | type: type}}})} + end + + defp fill_in_known_type({type, value}), do: {array_to_in(type), value} + + defp array_to_in({:array, v}), do: {:in, array_to_in(v)} + + defp array_to_in({:parameterized, type, constraints}), + do: {:parameterized, array_to_in(type), constraints} + + defp array_to_in(v), do: v + + defp vagueness({:in, type}), do: vagueness(type) + defp vagueness(:same), do: 2 + defp vagueness(:any), do: 1 + defp vagueness(_), do: 0 + + defp do_get_path( + query, + %Ash.Query.Function.GetPath{arguments: [left, right], embedded?: pred_embedded?}, + bindings, + embedded?, + acc, + type \\ nil + ) do + path = "$." <> Enum.join(right, ".") + + {expr, acc} = + AshSql.Expr.dynamic_expr( + query, + %Ash.Query.Function.Fragment{ + embedded?: pred_embedded?, + arguments: [ + raw: "json_extract(", + expr: left, + raw: ", ", + expr: path, + raw: ")" + ] + }, + bindings, + embedded?, + type, + acc + ) + + if type do + {expr, acc} = + AshSql.Expr.dynamic_expr( + query, + %Ash.Query.Function.Type{arguments: [expr, type, []]}, + bindings, + embedded?, + type, + acc + ) + + {:ok, expr, acc} + else + {:ok, expr, acc} + end + end + + defp determine_type_at_path(type, path) do + path + |> Enum.reject(&is_integer/1) + |> do_determine_type_at_path(type) + |> case do + nil -> + nil + + {type, constraints} -> + AshSqlite.Types.parameterized_type(type, constraints) + end + end + + defp do_determine_type_at_path([], _), do: nil + + defp do_determine_type_at_path([item], type) do + case Ash.Resource.Info.attribute(type, item) do + nil -> + nil + + %{type: {:array, type}, constraints: constraints} -> + constraints = constraints[:items] || [] + + {type, constraints} + + %{type: type, constraints: constraints} -> + {type, constraints} + end + end + + defp do_determine_type_at_path([item | rest], type) do + case Ash.Resource.Info.attribute(type, item) do + nil -> + nil + + %{type: {:array, type}} -> + if Ash.Type.embedded_type?(type) do + type + else + nil + end + + %{type: type} -> + if Ash.Type.embedded_type?(type) do + type + else + nil + end + end + |> case do + nil -> + nil + + type -> + do_determine_type_at_path(rest, type) + end + end +end diff --git a/lib/types/ci_string_wrapper.ex b/lib/types/ci_string_wrapper.ex deleted file mode 100644 index 9e363b6..0000000 --- a/lib/types/ci_string_wrapper.ex +++ /dev/null @@ -1,14 +0,0 @@ -defmodule AshSqlite.Type.CiStringWrapper do - @moduledoc false - use Ash.Type - - @impl true - def storage_type(_), do: :ci_string - - @impl true - defdelegate cast_input(value, constraints), to: Ash.Type.CiString - @impl true - defdelegate cast_stored(value, constraints), to: Ash.Type.CiString - @impl true - defdelegate dump_to_native(value, constraints), to: Ash.Type.CiString -end diff --git a/lib/types/string_wrapper.ex b/lib/types/string_wrapper.ex deleted file mode 100644 index cb1faaf..0000000 --- a/lib/types/string_wrapper.ex +++ /dev/null @@ -1,14 +0,0 @@ -defmodule AshSqlite.Type.StringWrapper do - @moduledoc false - use Ash.Type - - @impl true - def storage_type(_), do: :string - - @impl true - defdelegate cast_input(value, constraints), to: Ash.Type.String - @impl true - defdelegate cast_stored(value, constraints), to: Ash.Type.String - @impl true - defdelegate dump_to_native(value, constraints), to: Ash.Type.String -end diff --git a/mix.exs b/mix.exs index 5f77a49..fe6fd9c 100644 --- a/mix.exs +++ b/mix.exs @@ -169,10 +169,11 @@ defmodule AshSqlite.MixProject do defp deps do [ {:ecto_sql, "~> 3.9"}, - {:ecto_sqlite3, "~> 0.12.0"}, + {:ecto_sqlite3, "~> 0.12"}, + {:ash_sql, "~> 0.1.0-rc.2"}, {:ecto, "~> 3.9"}, {:jason, "~> 1.0"}, - {:ash, ash_version("~> 2.15 and >= 2.15.12")}, + {:ash, ash_version("~> 3.0.0-rc.0")}, {:git_ops, "~> 2.5", only: [:dev, :test]}, {:ex_doc, "~> 0.22", only: [:dev, :test], runtime: false}, {:ex_check, "~> 0.14", only: [:dev, :test]}, diff --git a/mix.lock b/mix.lock index 099c305..5eb6b74 100644 --- a/mix.lock +++ b/mix.lock @@ -1,46 +1,39 @@ %{ - "ash": {:hex, :ash, "2.15.15", "8649aad00ba93a6e8792889f27f36954376745dde600c739bc180054d6a76469", [:mix], [{:comparable, "~> 1.0", [hex: :comparable, repo: "hexpm", optional: false]}, {:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:earmark, "~> 1.4", [hex: :earmark, repo: "hexpm", optional: false]}, {:ecto, "~> 3.7", [hex: :ecto, repo: "hexpm", optional: false]}, {:ets, "~> 0.8", [hex: :ets, repo: "hexpm", optional: false]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: false]}, {:picosat_elixir, "~> 0.2", [hex: :picosat_elixir, repo: "hexpm", optional: false]}, {:plug, ">= 0.0.0", [hex: :plug, repo: "hexpm", optional: true]}, {:spark, ">= 1.1.20 and < 2.0.0-0", [hex: :spark, repo: "hexpm", optional: false]}, {:stream_data, "~> 0.6", [hex: :stream_data, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.1", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "4da1105adcc4991841889a238ea7475f74923c36d465f886db862333cb54ecb0"}, - "bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"}, - "cc_precompiler": {:hex, :cc_precompiler, "0.1.8", "933a5f4da3b19ee56539a076076ce4d7716d64efc8db46fd066996a7e46e2bfd", [:mix], [{:elixir_make, "~> 0.7.3", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "176bdf4366956e456bf761b54ad70bc4103d0269ca9558fd7cee93d1b3f116db"}, - "certifi": {:hex, :certifi, "2.9.0", "6f2a475689dd47f19fb74334859d460a2dc4e3252a3324bd2111b8f0429e7e21", [:rebar3], [], "hexpm", "266da46bdb06d6c6d35fde799bcb28d36d985d424ad7c08b5bb48f5b5cdd4641"}, + "ash": {:hex, :ash, "3.0.0-rc.6", "78d9bc068a0c632e4fe2db8a8802f772c65329c8bc15877ceb6eb2ac83e1fa8b", [:mix], [{:comparable, "~> 1.0", [hex: :comparable, repo: "hexpm", optional: false]}, {:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ecto, "~> 3.7", [hex: :ecto, repo: "hexpm", optional: false]}, {:ets, "~> 0.8", [hex: :ets, repo: "hexpm", optional: false]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: false]}, {:picosat_elixir, "~> 0.2", [hex: :picosat_elixir, repo: "hexpm", optional: true]}, {:plug, ">= 0.0.0", [hex: :plug, repo: "hexpm", optional: true]}, {:reactor, "~> 0.8", [hex: :reactor, repo: "hexpm", optional: false]}, {:simple_sat, ">= 0.1.1 and < 1.0.0-0", [hex: :simple_sat, repo: "hexpm", optional: true]}, {:spark, ">= 2.1.7 and < 3.0.0-0", [hex: :spark, repo: "hexpm", optional: false]}, {:splode, "~> 0.2", [hex: :splode, repo: "hexpm", optional: false]}, {:stream_data, "~> 0.6", [hex: :stream_data, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.1", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3e0ccc857572d10972868886aff46f9b1d11c90f8b357f85f2887e71f702e916"}, + "ash_sql": {:hex, :ash_sql, "0.1.1-rc.2", "281e036180ea069c24239ea051fd6551708c21a0690b099acb326d3d7005302e", [:mix], [{:ash, "~> 3.0.0-rc.0", [hex: :ash, repo: "hexpm", optional: false]}, {:ecto, "~> 3.9", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "~> 3.9", [hex: :ecto_sql, repo: "hexpm", optional: false]}], "hexpm", "76a21857b8d823ee47732c20746830732be9a005c72b11db6bd8e203e459a11c"}, + "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"}, + "cc_precompiler": {:hex, :cc_precompiler, "0.1.10", "47c9c08d8869cf09b41da36538f62bc1abd3e19e41701c2cea2675b53c704258", [:mix], [{:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "f6e046254e53cd6b41c6bacd70ae728011aa82b2742a80d6e2214855c6e06b22"}, "comparable": {:hex, :comparable, "1.0.0", "bb669e91cedd14ae9937053e5bcbc3c52bb2f22422611f43b6e38367d94a495f", [:mix], [{:typable, "~> 0.1", [hex: :typable, repo: "hexpm", optional: false]}], "hexpm", "277c11eeb1cd726e7cd41c6c199e7e52fa16ee6830b45ad4cdc62e51f62eb60c"}, - "credo": {:hex, :credo, "1.6.4", "ddd474afb6e8c240313f3a7b0d025cc3213f0d171879429bf8535d7021d9ad78", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "c28f910b61e1ff829bffa056ef7293a8db50e87f2c57a9b5c3f57eee124536b7"}, - "db_connection": {:hex, :db_connection, "2.5.0", "bb6d4f30d35ded97b29fe80d8bd6f928a1912ca1ff110831edcd238a1973652c", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c92d5ba26cd69ead1ff7582dbb860adeedfff39774105a4f1c92cbb654b55aa2"}, + "credo": {:hex, :credo, "1.7.5", "643213503b1c766ec0496d828c90c424471ea54da77c8a168c725686377b9545", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "f799e9b5cd1891577d8c773d245668aa74a2fcd15eb277f51a0131690ebfb3fd"}, + "db_connection": {:hex, :db_connection, "2.6.0", "77d835c472b5b67fc4f29556dee74bf511bbafecdcaf98c27d27fa5918152086", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c2f992d15725e721ec7fbc1189d4ecdb8afef76648c746a8e1cad35e3b8a35f3"}, "decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"}, - "dialyxir": {:hex, :dialyxir, "1.1.0", "c5aab0d6e71e5522e77beff7ba9e08f8e02bad90dfbeffae60eaf0cb47e29488", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "07ea8e49c45f15264ebe6d5b93799d4dd56a44036cf42d0ad9c960bc266c0b9a"}, - "earmark": {:hex, :earmark, "1.4.46", "8c7287bd3137e99d26ae4643e5b7ef2129a260e3dcf41f251750cb4563c8fb81", [:mix], [], "hexpm", "798d86db3d79964e759ddc0c077d5eb254968ed426399fbf5a62de2b5ff8910a"}, - "earmark_parser": {:hex, :earmark_parser, "1.4.35", "437773ca9384edf69830e26e9e7b2e0d22d2596c4a6b17094a3b29f01ea65bb8", [:mix], [], "hexpm", "8652ba3cb85608d0d7aa2d21b45c6fad4ddc9a1f9a1f1b30ca3a246f0acc33f6"}, - "ecto": {:hex, :ecto, "3.10.3", "eb2ae2eecd210b4eb8bece1217b297ad4ff824b4384c0e3fdd28aaf96edd6135", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "44bec74e2364d491d70f7e42cd0d690922659d329f6465e89feb8a34e8cd3433"}, - "ecto_sql": {:hex, :ecto_sql, "3.10.2", "6b98b46534b5c2f8b8b5f03f126e75e2a73c64f3c071149d32987a5378b0fdbd", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.10.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "68c018debca57cb9235e3889affdaec7a10616a4e3a80c99fa1d01fdafaa9007"}, - "ecto_sqlite3": {:hex, :ecto_sqlite3, "0.12.0", "9ee845ac45a76e3c5c0fe65898f3538f5b0969912a95f0beef3d4ae8e63f6a06", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ecto, "~> 3.10", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:exqlite, "~> 0.9", [hex: :exqlite, repo: "hexpm", optional: false]}], "hexpm", "4eaf8550df1fd0043bcf039a5dce407fd8afc30a115ced173fe6b9815eeedb55"}, - "elixir_make": {:hex, :elixir_make, "0.7.7", "7128c60c2476019ed978210c245badf08b03dbec4f24d05790ef791da11aa17c", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}], "hexpm", "5bc19fff950fad52bbe5f211b12db9ec82c6b34a9647da0c2224b8b8464c7e6c"}, + "dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"}, + "earmark_parser": {:hex, :earmark_parser, "1.4.39", "424642f8335b05bb9eb611aa1564c148a8ee35c9c8a8bba6e129d51a3e3c6769", [:mix], [], "hexpm", "06553a88d1f1846da9ef066b87b57c6f605552cfbe40d20bd8d59cc6bde41944"}, + "ecto": {:hex, :ecto, "3.11.2", "e1d26be989db350a633667c5cda9c3d115ae779b66da567c68c80cfb26a8c9ee", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3c38bca2c6f8d8023f2145326cc8a80100c3ffe4dcbd9842ff867f7fc6156c65"}, + "ecto_sql": {:hex, :ecto_sql, "3.11.1", "e9abf28ae27ef3916b43545f9578b4750956ccea444853606472089e7d169470", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ce14063ab3514424276e7e360108ad6c2308f6d88164a076aac8a387e1fea634"}, + "ecto_sqlite3": {:hex, :ecto_sqlite3, "0.15.1", "40f2fbd9e246455f8c42e7e0a77009ef806caa1b3ce6f717b2a0a80e8432fcfd", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "~> 3.11", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:exqlite, "~> 0.19", [hex: :exqlite, repo: "hexpm", optional: false]}], "hexpm", "28b16e177123c688948357176662bf9ff9084daddf950ef5b6baf3ee93707064"}, + "elixir_make": {:hex, :elixir_make, "0.8.3", "d38d7ee1578d722d89b4d452a3e36bcfdc644c618f0d063b874661876e708683", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.0", [hex: :certifi, repo: "hexpm", optional: true]}], "hexpm", "5c99a18571a756d4af7a4d89ca75c28ac899e6103af6f223982f09ce44942cc9"}, "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, "ets": {:hex, :ets, "0.9.0", "79c6a6c205436780486f72d84230c6cba2f8a9920456750ddd1e47389107d5fd", [:mix], [], "hexpm", "2861fdfb04bcaeff370f1a5904eec864f0a56dcfebe5921ea9aadf2a481c822b"}, - "ex_check": {:hex, :ex_check, "0.14.0", "d6fbe0bcc51cf38fea276f5bc2af0c9ae0a2bb059f602f8de88709421dae4f0e", [:mix], [], "hexpm", "8a602e98c66e6a4be3a639321f1f545292042f290f91fa942a285888c6868af0"}, - "ex_doc": {:hex, :ex_doc, "0.30.6", "5f8b54854b240a2b55c9734c4b1d0dd7bdd41f71a095d42a70445c03cf05a281", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "bd48f2ddacf4e482c727f9293d9498e0881597eae6ddc3d9562bd7923375109f"}, - "excoveralls": {:hex, :excoveralls, "0.14.4", "295498f1ae47bdc6dce59af9a585c381e1aefc63298d48172efaaa90c3d251db", [:mix], [{:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "e3ab02f2df4c1c7a519728a6f0a747e71d7d6e846020aae338173619217931c1"}, - "exqlite": {:hex, :exqlite, "0.14.0", "f275c6fe1ce35d383b4ed52461ca98c02354eeb2c651c13f5b4badcfd39b743f", [:make, :mix], [{:cc_precompiler, "~> 0.1", [hex: :cc_precompiler, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "e335eca54749d04dcdedcbc87be85e2176030aab3d7b74b6323fda7e3552ee4c"}, - "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, + "ex_check": {:hex, :ex_check, "0.16.0", "07615bef493c5b8d12d5119de3914274277299c6483989e52b0f6b8358a26b5f", [:mix], [], "hexpm", "4d809b72a18d405514dda4809257d8e665ae7cf37a7aee3be6b74a34dec310f5"}, + "ex_doc": {:hex, :ex_doc, "0.31.2", "8b06d0a5ac69e1a54df35519c951f1f44a7b7ca9a5bb7a260cd8a174d6322ece", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.1", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "317346c14febaba9ca40fd97b5b5919f7751fb85d399cc8e7e8872049f37e0af"}, + "excoveralls": {:hex, :excoveralls, "0.18.0", "b92497e69465dc51bc37a6422226ee690ab437e4c06877e836f1c18daeb35da9", [:mix], [{:castore, "~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "1109bb911f3cb583401760be49c02cbbd16aed66ea9509fc5479335d284da60b"}, + "exqlite": {:hex, :exqlite, "0.20.0", "99b711eb1a3309b380ff54901d3d7db8e7afaf4b68a34398a69e1fa1b9b2054e", [:make, :mix], [{:cc_precompiler, "~> 0.1", [hex: :cc_precompiler, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.8", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "385ed37b8317101b7f9b58333910798ebe395e77ee6ca261be74a1a06b3d61f6"}, + "file_system": {:hex, :file_system, "1.0.0", "b689cc7dcee665f774de94b5a832e578bd7963c8e637ef940cd44327db7de2cd", [:mix], [], "hexpm", "6752092d66aec5a10e662aefeed8ddb9531d79db0bc145bb8c40325ca1d8536d"}, "git_cli": {:hex, :git_cli, "0.3.0", "a5422f9b95c99483385b976f5d43f7e8233283a47cda13533d7c16131cb14df5", [:mix], [], "hexpm", "78cb952f4c86a41f4d3511f1d3ecb28edb268e3a7df278de2faa1bd4672eaf9b"}, - "git_ops": {:hex, :git_ops, "2.5.5", "4f8369f3c9347e06a7f289de98fadfc95194149156335c5292479a53eddbccd2", [:mix], [{:git_cli, "~> 0.2", [hex: :git_cli, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "3b1e3b12968f9da6f79b5e2b2274477206949376e3579d05a5f3d439eda0b746"}, - "hackney": {:hex, :hackney, "1.18.1", "f48bf88f521f2a229fc7bae88cf4f85adc9cd9bcf23b5dc8eb6a1788c662c4f6", [:rebar3], [{:certifi, "~>2.9.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~>6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~>1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.3.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~>1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "a4ecdaff44297e9b5894ae499e9a070ea1888c84afdd1fd9b7b2bc384950128e"}, - "idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"}, + "git_ops": {:hex, :git_ops, "2.6.0", "e0791ee1cf5db03f2c61b7ebd70e2e95cba2bb9b9793011f26609f22c0900087", [:mix], [{:git_cli, "~> 0.2", [hex: :git_cli, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "b98fca849b18aaf490f4ac7d1dd8c6c469b0cc3e6632562d366cab095e666ffe"}, "jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"}, - "makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"}, - "makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"}, - "makeup_erlang": {:hex, :makeup_erlang, "0.1.2", "ad87296a092a46e03b7e9b0be7631ddcf64c790fa68a9ef5323b6cbb36affc72", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f3f5a1ca93ce6e092d92b6d9c049bcda58a3b617a8d888f8e7231c85630e8108"}, - "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"}, - "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"}, - "nimble_options": {:hex, :nimble_options, "1.0.2", "92098a74df0072ff37d0c12ace58574d26880e522c22801437151a159392270e", [:mix], [], "hexpm", "fd12a8db2021036ce12a309f26f564ec367373265b53e25403f0ee697380f1b8"}, - "nimble_parsec": {:hex, :nimble_parsec, "1.3.1", "2c54013ecf170e249e9291ed0a62e5832f70a476c61da16f6aac6dca0189f2af", [:mix], [], "hexpm", "2682e3c0b2eb58d90c6375fc0cc30bc7be06f365bf72608804fb9cffa5e1b167"}, - "parse_trans": {:hex, :parse_trans, "3.3.1", "16328ab840cc09919bd10dab29e431da3af9e9e7e7e6f0089dd5a2d2820011d8", [:rebar3], [], "hexpm", "07cd9577885f56362d414e8c4c4e6bdf10d43a8767abb92d24cbe8b24c54888b"}, - "picosat_elixir": {:hex, :picosat_elixir, "0.2.3", "bf326d0f179fbb3b706bb2c15fbc367dacfa2517157d090fdfc32edae004c597", [:make, :mix], [{:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "f76c9db2dec9d2561ffaa9be35f65403d53e984e8cd99c832383b7ab78c16c66"}, - "sobelow": {:hex, :sobelow, "0.11.1", "23438964486f8112b41e743bbfd402da3e5b296fdc9eacab29914b79c48916dd", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "9897363a7eff96f4809304a90aad819e2ad5e5d24db547af502885146746a53c"}, - "sourceror": {:hex, :sourceror, "0.14.0", "b6b8552d0240400d66b6f107c1bab7ac1726e998efc797f178b7b517e928e314", [:mix], [], "hexpm", "809c71270ad48092d40bbe251a133e49ae229433ce103f762a2373b7a10a8d8b"}, - "spark": {:hex, :spark, "1.1.44", "be9f2669b03ae43447bda77045598a4500988538a7d0ba576b8e306332822147", [:mix], [{:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.5 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:sourceror, "~> 0.1", [hex: :sourceror, repo: "hexpm", optional: false]}], "hexpm", "e49bf5ca770cb0bb9cac7ed8da5eb7871156b3236c8c535f3f4caa93377059a3"}, - "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"}, + "libgraph": {:hex, :libgraph, "0.16.0", "3936f3eca6ef826e08880230f806bfea13193e49bf153f93edcf0239d4fd1d07", [:mix], [], "hexpm", "41ca92240e8a4138c30a7e06466acc709b0cbb795c643e9e17174a178982d6bf"}, + "makeup": {:hex, :makeup, "1.1.1", "fa0bc768698053b2b3869fa8a62616501ff9d11a562f3ce39580d60860c3a55e", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "5dc62fbdd0de44de194898b6710692490be74baa02d9d108bc29f007783b0b48"}, + "makeup_elixir": {:hex, :makeup_elixir, "0.16.2", "627e84b8e8bf22e60a2579dad15067c755531fea049ae26ef1020cad58fe9578", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "41193978704763f6bbe6cc2758b84909e62984c7752b3784bd3c218bb341706b"}, + "makeup_erlang": {:hex, :makeup_erlang, "0.1.5", "e0ff5a7c708dda34311f7522a8758e23bfcd7d8d8068dc312b5eb41c6fd76eba", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "94d2e986428585a21516d7d7149781480013c56e30c6a233534bedf38867a59a"}, + "nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"}, + "reactor": {:hex, :reactor, "0.8.1", "1aec71d16083901277727c8162f6dd0f07e80f5ca98911b6ef4f2c95e6e62758", [:mix], [{:libgraph, "~> 0.16", [hex: :libgraph, repo: "hexpm", optional: false]}, {:spark, "~> 2.0", [hex: :spark, repo: "hexpm", optional: false]}, {:splode, "~> 0.2", [hex: :splode, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.2", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ae3936d97a3e4a316744f70c77b85345b08b70da334024c26e6b5eb8ede1246b"}, + "sobelow": {:hex, :sobelow, "0.13.0", "218afe9075904793f5c64b8837cc356e493d88fddde126a463839351870b8d1e", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "cd6e9026b85fc35d7529da14f95e85a078d9dd1907a9097b3ba6ac7ebbe34a0d"}, + "sourceror": {:hex, :sourceror, "1.0.2", "c5e86fdc14881f797749d1fe5df017ca66727a8146e7ee3e736605a3df78f3e6", [:mix], [], "hexpm", "832335e87d0913658f129d58b2a7dc0490ddd4487b02de6d85bca0169ec2bd79"}, + "spark": {:hex, :spark, "2.1.11", "8093149dfd583b5ce2c06e1fea1faaf4125b50e4703138b2cbefb78c8f4aa07f", [:mix], [{:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:sourceror, "~> 1.0", [hex: :sourceror, repo: "hexpm", optional: false]}], "hexpm", "1877d92ab993b860e9d828bfd72d50367c0d3a53dd84f4de5d221baf66ae8723"}, + "splode": {:hex, :splode, "0.2.1", "020079ec06c9e00f8b6586852e781b5e07aee6ba588f3f45dd993831c87b0511", [:mix], [], "hexpm", "d232a933666061fe1f659d9906042fa94b9b393bb1129a4fde6fa680033b2611"}, "stream_data": {:hex, :stream_data, "0.6.0", "e87a9a79d7ec23d10ff83eb025141ef4915eeb09d4491f79e52f2562b73e5f47", [:mix], [], "hexpm", "b92b5031b650ca480ced047578f1d57ea6dd563f5b57464ad274718c9c29501c"}, "telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"}, "typable": {:hex, :typable, "0.3.0", "0431e121d124cd26f312123e313d2689b9a5322b15add65d424c07779eaa3ca1", [:mix], [], "hexpm", "880a0797752da1a4c508ac48f94711e04c86156f498065a83d160eef945858f8"}, - "unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"}, } diff --git a/test/atomics_test.exs b/test/atomics_test.exs index 76dccab..ae1ad57 100644 --- a/test/atomics_test.exs +++ b/test/atomics_test.exs @@ -1,6 +1,6 @@ defmodule AshSqlite.AtomicsTest do use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Post} + alias AshSqlite.Test.Post import Ash.Expr @@ -10,40 +10,40 @@ defmodule AshSqlite.AtomicsTest do Post |> Ash.Changeset.for_create(:create, %{id: id, title: "foo", price: 1}, upsert?: true) |> Ash.Changeset.atomic_update(:price, expr(price + 1)) - |> Api.create!() + |> Ash.create!() Post |> Ash.Changeset.for_create(:create, %{id: id, title: "foo", price: 1}, upsert?: true) |> Ash.Changeset.atomic_update(:price, expr(price + 1)) - |> Api.create!() + |> Ash.create!() - assert [%{price: 2}] = Post |> Api.read!() + assert [%{price: 2}] = Post |> Ash.read!() end test "a basic atomic works" do post = Post |> Ash.Changeset.for_create(:create, %{title: "foo", price: 1}) - |> Api.create!() + |> Ash.create!() assert %{price: 2} = post |> Ash.Changeset.for_update(:update, %{}) |> Ash.Changeset.atomic_update(:price, expr(price + 1)) - |> Api.update!() + |> Ash.update!() end test "an atomic that violates a constraint will return the proper error" do post = Post |> Ash.Changeset.for_create(:create, %{title: "foo", price: 1}) - |> Api.create!() + |> Ash.create!() assert_raise Ash.Error.Invalid, ~r/does not exist/, fn -> post |> Ash.Changeset.for_update(:update, %{}) |> Ash.Changeset.atomic_update(:organization_id, Ash.UUID.generate()) - |> Api.update!() + |> Ash.update!() end end @@ -51,13 +51,13 @@ defmodule AshSqlite.AtomicsTest do post = Post |> Ash.Changeset.for_create(:create, %{title: "foo", price: 1}) - |> Api.create!() + |> Ash.create!() post = post |> Ash.Changeset.for_update(:update, %{}) |> Ash.Changeset.atomic_update(:score, expr(score_after_winning)) - |> Api.update!() + |> Ash.update!() assert post.score == 1 end @@ -66,7 +66,7 @@ defmodule AshSqlite.AtomicsTest do post = Post |> Ash.Changeset.for_create(:create, %{title: "foo", price: 1}) - |> Api.create!() + |> Ash.create!() assert Post.increment_score!(post, 2).score == 2 diff --git a/test/bulk_create_test.exs b/test/bulk_create_test.exs index a1c4a23..4b6cda6 100644 --- a/test/bulk_create_test.exs +++ b/test/bulk_create_test.exs @@ -1,20 +1,20 @@ defmodule AshSqlite.BulkCreateTest do use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Post} + alias AshSqlite.Test.Post describe "bulk creates" do test "bulk creates insert each input" do - Api.bulk_create!([%{title: "fred"}, %{title: "george"}], Post, :create) + Ash.bulk_create!([%{title: "fred"}, %{title: "george"}], Post, :create) assert [%{title: "fred"}, %{title: "george"}] = Post |> Ash.Query.sort(:title) - |> Api.read!() + |> Ash.read!() end test "bulk creates can be streamed" do assert [{:ok, %{title: "fred"}}, {:ok, %{title: "george"}}] = - Api.bulk_create!([%{title: "fred"}, %{title: "george"}], Post, :create, + Ash.bulk_create!([%{title: "fred"}, %{title: "george"}], Post, :create, return_stream?: true, return_records?: true ) @@ -26,7 +26,7 @@ defmodule AshSqlite.BulkCreateTest do {:ok, %{title: "fred", uniq_one: "one", uniq_two: "two", price: 10}}, {:ok, %{title: "george", uniq_one: "three", uniq_two: "four", price: 20}} ] = - Api.bulk_create!( + Ash.bulk_create!( [ %{title: "fred", uniq_one: "one", uniq_two: "two", price: 10}, %{title: "george", uniq_one: "three", uniq_two: "four", price: 20} @@ -42,7 +42,7 @@ defmodule AshSqlite.BulkCreateTest do {:ok, %{title: "fred", uniq_one: "one", uniq_two: "two", price: 1000}}, {:ok, %{title: "george", uniq_one: "three", uniq_two: "four", price: 20_000}} ] = - Api.bulk_create!( + Ash.bulk_create!( [ %{title: "something", uniq_one: "one", uniq_two: "two", price: 1000}, %{title: "else", uniq_one: "three", uniq_two: "four", price: 20_000} @@ -65,7 +65,7 @@ defmodule AshSqlite.BulkCreateTest do end test "bulk creates can create relationships" do - Api.bulk_create!( + Ash.bulk_create!( [%{title: "fred", rating: %{score: 5}}, %{title: "george", rating: %{score: 0}}], Post, :create @@ -78,14 +78,14 @@ defmodule AshSqlite.BulkCreateTest do Post |> Ash.Query.sort(:title) |> Ash.Query.load(:ratings) - |> Api.read!() + |> Ash.read!() end end describe "validation errors" do test "skips invalid by default" do assert %{records: [_], errors: [_]} = - Api.bulk_create!([%{title: "fred"}, %{title: "not allowed"}], Post, :create, + Ash.bulk_create!([%{title: "fred"}, %{title: "not allowed"}], Post, :create, return_records?: true, return_errors?: true ) @@ -93,7 +93,7 @@ defmodule AshSqlite.BulkCreateTest do test "returns errors in the stream" do assert [{:ok, _}, {:error, _}] = - Api.bulk_create!([%{title: "fred"}, %{title: "not allowed"}], Post, :create, + Ash.bulk_create!([%{title: "fred"}, %{title: "not allowed"}], Post, :create, return_records?: true, return_stream?: true, return_errors?: true @@ -107,9 +107,9 @@ defmodule AshSqlite.BulkCreateTest do org = AshSqlite.Test.Organization |> Ash.Changeset.for_create(:create, %{name: "foo"}) - |> Api.create!() + |> Ash.create!() - Api.bulk_create( + Ash.bulk_create( [ %{title: "fred", organization_id: org.id}, %{title: "george", organization_id: Ash.UUID.generate()} @@ -122,11 +122,11 @@ defmodule AshSqlite.BulkCreateTest do assert [] = Post |> Ash.Query.sort(:title) - |> Api.read!() + |> Ash.read!() end test "database errors don't affect other batches" do - Api.bulk_create( + Ash.bulk_create( [%{title: "george", organization_id: Ash.UUID.generate()}, %{title: "fred"}], Post, :create, @@ -137,7 +137,7 @@ defmodule AshSqlite.BulkCreateTest do assert [%{title: "fred"}] = Post |> Ash.Query.sort(:title) - |> Api.read!() + |> Ash.read!() end end end diff --git a/test/calculation_test.exs b/test/calculation_test.exs index 894f9fb..996a214 100644 --- a/test/calculation_test.exs +++ b/test/calculation_test.exs @@ -1,6 +1,6 @@ defmodule AshSqlite.CalculationTest do use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Account, Api, Author, Comment, Post, User} + alias AshSqlite.Test.{Account, Author, Comment, Post, User} require Ash.Query @@ -8,26 +8,26 @@ defmodule AshSqlite.CalculationTest do author = Author |> Ash.Changeset.for_create(:create, %{bio: %{title: "Mr.", bio: "Bones"}}) - |> Api.create!() + |> Ash.create!() assert %{title: "Mr."} = Author |> Ash.Query.filter(id == ^author.id) |> Ash.Query.load(:title) - |> Api.read_one!() + |> Ash.read_one!() end test "calculations can use the || operator" do author = Author |> Ash.Changeset.for_create(:create, %{bio: %{title: "Mr.", bio: "Bones"}}) - |> Api.create!() + |> Ash.create!() assert %{first_name_or_bob: "bob"} = Author |> Ash.Query.filter(id == ^author.id) |> Ash.Query.load(:first_name_or_bob) - |> Api.read_one!() + |> Ash.read_one!() end test "calculations can use the && operator" do @@ -37,24 +37,24 @@ defmodule AshSqlite.CalculationTest do first_name: "fred", bio: %{title: "Mr.", bio: "Bones"} }) - |> Api.create!() + |> Ash.create!() assert %{first_name_and_bob: "bob"} = Author |> Ash.Query.filter(id == ^author.id) |> Ash.Query.load(:first_name_and_bob) - |> Api.read_one!() + |> Ash.read_one!() end test "concat calculation can be filtered on" do author = Author - |> Ash.Changeset.new(%{first_name: "is", last_name: "match"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{first_name: "is", last_name: "match"}) + |> Ash.create!() Author - |> Ash.Changeset.new(%{first_name: "not", last_name: "match"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{first_name: "not", last_name: "match"}) + |> Ash.create!() author_id = author.id @@ -62,18 +62,18 @@ defmodule AshSqlite.CalculationTest do Author |> Ash.Query.load(:full_name) |> Ash.Query.filter(full_name == "is match") - |> Api.read_one!() + |> Ash.read_one!() end test "conditional calculations can be filtered on" do author = Author - |> Ash.Changeset.new(%{first_name: "tom"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{first_name: "tom"}) + |> Ash.create!() Author - |> Ash.Changeset.new(%{first_name: "tom", last_name: "holland"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{first_name: "tom", last_name: "holland"}) + |> Ash.create!() author_id = author.id @@ -81,45 +81,45 @@ defmodule AshSqlite.CalculationTest do Author |> Ash.Query.load([:conditional_full_name, :full_name]) |> Ash.Query.filter(conditional_full_name == "(none)") - |> Api.read_one!() + |> Ash.read_one!() end test "parameterized calculations can be filtered on" do Author - |> Ash.Changeset.new(%{first_name: "tom", last_name: "holland"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{first_name: "tom", last_name: "holland"}) + |> Ash.create!() assert %{param_full_name: "tom holland"} = Author |> Ash.Query.load(:param_full_name) - |> Api.read_one!() + |> Ash.read_one!() assert %{param_full_name: "tom~holland"} = Author |> Ash.Query.load(param_full_name: [separator: "~"]) - |> Api.read_one!() + |> Ash.read_one!() assert %{} = Author |> Ash.Query.filter(param_full_name(separator: "~") == "tom~holland") - |> Api.read_one!() + |> Ash.read_one!() end test "parameterized related calculations can be filtered on" do author = Author - |> Ash.Changeset.new(%{first_name: "tom", last_name: "holland"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{first_name: "tom", last_name: "holland"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "match"}) + |> Ash.Changeset.for_create(:create, %{title: "match"}) |> Ash.Changeset.manage_relationship(:author, author, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() assert %{title: "match"} = Comment |> Ash.Query.filter(author.param_full_name(separator: "~") == "tom~holland") - |> Api.read_one!() + |> Ash.read_one!() assert %{title: "match"} = Comment @@ -127,137 +127,94 @@ defmodule AshSqlite.CalculationTest do author.param_full_name(separator: "~") == "tom~holland" and author.param_full_name(separator: " ") == "tom holland" ) - |> Api.read_one!() + |> Ash.read_one!() end test "parameterized calculations can be sorted on" do Author - |> Ash.Changeset.new(%{first_name: "tom", last_name: "holland"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{first_name: "tom", last_name: "holland"}) + |> Ash.create!() Author - |> Ash.Changeset.new(%{first_name: "abc", last_name: "def"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{first_name: "abc", last_name: "def"}) + |> Ash.create!() assert [%{first_name: "abc"}, %{first_name: "tom"}] = Author |> Ash.Query.sort(param_full_name: [separator: "~"]) - |> Api.read!() + |> Ash.read!() end test "calculations using if and literal boolean results can run" do Post |> Ash.Query.load(:was_created_in_the_last_month) |> Ash.Query.filter(was_created_in_the_last_month == true) - |> Api.read!() + |> Ash.read!() end test "nested conditional calculations can be loaded" do Author - |> Ash.Changeset.new(%{last_name: "holland"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{last_name: "holland"}) + |> Ash.create!() Author - |> Ash.Changeset.new(%{first_name: "tom"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{first_name: "tom"}) + |> Ash.create!() assert [%{nested_conditional: "No First Name"}, %{nested_conditional: "No Last Name"}] = Author |> Ash.Query.load(:nested_conditional) |> Ash.Query.sort(:nested_conditional) - |> Api.read!() + |> Ash.read!() end test "loading a calculation loads its dependent loads" do user = User |> Ash.Changeset.for_create(:create, %{is_active: true}) - |> Api.create!() + |> Ash.create!() account = Account |> Ash.Changeset.for_create(:create, %{is_active: true}) |> Ash.Changeset.manage_relationship(:user, user, type: :append_and_remove) - |> Api.create!() - |> Api.load!([:active]) + |> Ash.create!() + |> Ash.load!([:active]) assert account.active end - # describe "string join expression" do - # test "no nil values" do - # author = - # Author - # |> Ash.Changeset.for_create(:create, %{ - # first_name: "Bill", - # last_name: "Jones", - # bio: %{title: "Mr.", bio: "Bones"} - # }) - # |> Api.create!() - - # assert %{ - # full_name_with_nils: "Bill Jones", - # full_name_with_nils_no_joiner: "BillJones" - # } = - # Author - # |> Ash.Query.filter(id == ^author.id) - # |> Ash.Query.load(:full_name_with_nils) - # |> Ash.Query.load(:full_name_with_nils_no_joiner) - # |> Api.read_one!() - # end - - # test "with nil value" do - # author = - # Author - # |> Ash.Changeset.for_create(:create, %{ - # first_name: "Bill", - # bio: %{title: "Mr.", bio: "Bones"} - # }) - # |> Api.create!() - - # assert %{ - # full_name_with_nils: "Bill", - # full_name_with_nils_no_joiner: "Bill" - # } = - # Author - # |> Ash.Query.filter(id == ^author.id) - # |> Ash.Query.load(:full_name_with_nils) - # |> Ash.Query.load(:full_name_with_nils_no_joiner) - # |> Api.read_one!() - # end - # end - describe "-/1" do test "makes numbers negative" do Post - |> Ash.Changeset.new(%{title: "match", score: 42}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "match", score: 42}) + |> Ash.create!() assert [%{negative_score: -42}] = Post |> Ash.Query.load(:negative_score) - |> Api.read!() + |> Ash.read!() end end describe "maps" do test "maps can be constructed" do Post - |> Ash.Changeset.new(%{title: "match", score: 42}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "match", score: 42}) + |> Ash.create!() assert [%{score_map: %{negative_score: %{foo: -42}}}] = Post |> Ash.Query.load(:score_map) - |> Api.read!() + |> Ash.read!() end end test "dependent calc" do post = Post - |> Ash.Changeset.new(%{title: "match", price: 10_024}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "match", price: 10_024}) + |> Ash.create!() Post.get_by_id(post.id, query: Post |> Ash.Query.select([:id]) |> Ash.Query.load([:price_string_with_currency_sign]) @@ -267,10 +224,14 @@ defmodule AshSqlite.CalculationTest do test "nested get_path works" do assert "thing" = Post - |> Ash.Changeset.new(%{title: "match", price: 10_024, stuff: %{foo: %{bar: "thing"}}}) + |> Ash.Changeset.for_create(:create, %{ + title: "match", + price: 10_024, + stuff: %{foo: %{bar: "thing"}} + }) |> Ash.Changeset.deselect(:stuff) - |> Api.create!() - |> Api.load!(:foo_bar_from_stuff) + |> Ash.create!() + |> Ash.load!(:foo_bar_from_stuff) |> Map.get(:foo_bar_from_stuff) end @@ -282,19 +243,19 @@ defmodule AshSqlite.CalculationTest do last_name: "Jones", bio: %{title: "Mr.", bio: "Bones"} }) - |> Api.create!() + |> Ash.create!() assert %AshSqlite.Test.Money{} = Post - |> Ash.Changeset.new(%{title: "match", price: 10_024}) + |> Ash.Changeset.for_create(:create, %{title: "match", price: 10_024}) |> Ash.Changeset.manage_relationship(:author, author, type: :append_and_remove) - |> Api.create!() - |> Api.load!(:calc_returning_json) + |> Ash.create!() + |> Ash.load!(:calc_returning_json) |> Map.get(:calc_returning_json) assert [%AshSqlite.Test.Money{}] = author - |> Api.load!(posts: :calc_returning_json) + |> Ash.load!(posts: :calc_returning_json) |> Map.get(:posts) |> Enum.map(&Map.get(&1, :calc_returning_json)) end diff --git a/test/custom_index_test.exs b/test/custom_index_test.exs index 9a28653..d45b0f4 100644 --- a/test/custom_index_test.exs +++ b/test/custom_index_test.exs @@ -1,24 +1,28 @@ defmodule AshSqlite.Test.CustomIndexTest do use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Post} + alias AshSqlite.Test.Post require Ash.Query test "unique constraint errors are properly caught" do Post - |> Ash.Changeset.new(%{title: "first", uniq_custom_one: "what", uniq_custom_two: "what2"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{ + title: "first", + uniq_custom_one: "what", + uniq_custom_two: "what2" + }) + |> Ash.create!() assert_raise Ash.Error.Invalid, ~r/Invalid value provided for uniq_custom_one: dude what the heck/, fn -> Post - |> Ash.Changeset.new(%{ + |> Ash.Changeset.for_create(:create, %{ title: "first", uniq_custom_one: "what", uniq_custom_two: "what2" }) - |> Api.create!() + |> Ash.create!() end end end diff --git a/test/embeddable_resource_test.exs b/test/embeddable_resource_test.exs index 4385e00..8bb95c3 100644 --- a/test/embeddable_resource_test.exs +++ b/test/embeddable_resource_test.exs @@ -1,33 +1,33 @@ defmodule AshSqlite.EmbeddableResourceTest do @moduledoc false use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Author, Bio, Post} + alias AshSqlite.Test.{Author, Bio, Post} require Ash.Query setup do post = Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() %{post: post} end test "calculations can load json", %{post: post} do assert %{calc_returning_json: %AshSqlite.Test.Money{amount: 100, currency: :usd}} = - Api.load!(post, :calc_returning_json) + Ash.load!(post, :calc_returning_json) end test "embeds with list attributes set to nil are loaded as nil" do post = Author - |> Ash.Changeset.new(%{bio: %Bio{list_of_strings: nil}}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{bio: %Bio{list_of_strings: nil}}) + |> Ash.create!() assert is_nil(post.bio.list_of_strings) - post = Api.reload!(post) + post = Ash.reload!(post) assert is_nil(post.bio.list_of_strings) end diff --git a/test/enum_test.exs b/test/enum_test.exs index b25cee5..a0cff4b 100644 --- a/test/enum_test.exs +++ b/test/enum_test.exs @@ -1,13 +1,13 @@ defmodule AshSqlite.EnumTest do @moduledoc false use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Post} + alias AshSqlite.Test.Post require Ash.Query test "valid values are properly inserted" do Post - |> Ash.Changeset.new(%{title: "title", status: :open}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title", status: :open}) + |> Ash.create!() end end diff --git a/test/filter_test.exs b/test/filter_test.exs index f918d62..6fc8095 100644 --- a/test/filter_test.exs +++ b/test/filter_test.exs @@ -1,20 +1,20 @@ defmodule AshSqlite.FilterTest do use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Author, Comment, Post} + alias AshSqlite.Test.{Author, Comment, Post} require Ash.Query describe "with no filter applied" do test "with no data" do - assert [] = Api.read!(Post) + assert [] = Ash.read!(Post) end test "with data" do Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() - assert [%Post{title: "title"}] = Api.read!(Post) + assert [%Post{title: "title"}] = Ash.read!(Post) end end @@ -23,7 +23,7 @@ defmodule AshSqlite.FilterTest do assert_raise Ash.Error.Invalid, fn -> Post |> Ash.Query.filter(id == "foo") - |> Api.read!() + |> Ash.read!() end end end @@ -33,33 +33,33 @@ defmodule AshSqlite.FilterTest do results = Post |> Ash.Query.filter(title == "title") - |> Api.read!() + |> Ash.read!() assert [] = results end test "with data that matches" do Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() results = Post |> Ash.Query.filter(title == "title") - |> Api.read!() + |> Ash.read!() assert [%Post{title: "title"}] = results end test "with some data that matches and some data that doesnt" do Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() results = Post |> Ash.Query.filter(title == "no_title") - |> Api.read!() + |> Ash.read!() assert [] = results end @@ -67,18 +67,18 @@ defmodule AshSqlite.FilterTest do test "with related data that doesn't match" do post = Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "not match"}) + |> Ash.Changeset.for_create(:create, %{title: "not match"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() results = Post |> Ash.Query.filter(comments.title == "match") - |> Api.read!() + |> Ash.read!() assert [] = results end @@ -86,31 +86,31 @@ defmodule AshSqlite.FilterTest do test "with related data two steps away that matches" do author = Author - |> Ash.Changeset.new(%{first_name: "match"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{first_name: "match"}) + |> Ash.create!() post = Post - |> Ash.Changeset.new(%{title: "title"}) + |> Ash.Changeset.for_create(:create, %{title: "title"}) |> Ash.Changeset.manage_relationship(:author, author, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "title2"}) + |> Ash.Changeset.for_create(:create, %{title: "title2"}) |> Ash.Changeset.manage_relationship(:linked_posts, [post], type: :append_and_remove) |> Ash.Changeset.manage_relationship(:author, author, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "not match"}) + |> Ash.Changeset.for_create(:create, %{title: "not match"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) |> Ash.Changeset.manage_relationship(:author, author, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() results = Comment |> Ash.Query.filter(author.posts.linked_posts.title == "title") - |> Api.read!() + |> Ash.read!() assert [_] = results end @@ -118,18 +118,18 @@ defmodule AshSqlite.FilterTest do test "with related data that does match" do post = Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "match"}) + |> Ash.Changeset.for_create(:create, %{title: "match"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() results = Post |> Ash.Query.filter(comments.title == "match") - |> Api.read!() + |> Ash.read!() assert [%Post{title: "title"}] = results end @@ -137,23 +137,23 @@ defmodule AshSqlite.FilterTest do test "with related data that does and doesn't match" do post = Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "match"}) + |> Ash.Changeset.for_create(:create, %{title: "match"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "not match"}) + |> Ash.Changeset.for_create(:create, %{title: "not match"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() results = Post |> Ash.Query.filter(comments.title == "match") - |> Api.read!() + |> Ash.read!() assert [%Post{title: "title"}] = results end @@ -162,22 +162,22 @@ defmodule AshSqlite.FilterTest do describe "in" do test "it properly filters" do Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "title1"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title1"}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "title2"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title2"}) + |> Ash.create!() assert [%Post{title: "title1"}, %Post{title: "title2"}] = Post |> Ash.Query.filter(title in ["title1", "title2"]) |> Ash.Query.sort(title: :asc) - |> Api.read!() + |> Ash.read!() end end @@ -186,37 +186,37 @@ defmodule AshSqlite.FilterTest do results = Post |> Ash.Query.filter(title == "title" or score == 1) - |> Api.read!() + |> Ash.read!() assert [] = results end test "with data that doesn't match" do Post - |> Ash.Changeset.new(%{title: "no title", score: 2}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "no title", score: 2}) + |> Ash.create!() results = Post |> Ash.Query.filter(title == "title" or score == 1) - |> Api.read!() + |> Ash.read!() assert [] = results end test "with data that matches both conditions" do Post - |> Ash.Changeset.new(%{title: "title", score: 0}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title", score: 0}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{score: 1, title: "nothing"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{score: 1, title: "nothing"}) + |> Ash.create!() results = Post |> Ash.Query.filter(title == "title" or score == 1) - |> Api.read!() + |> Ash.read!() |> Enum.sort_by(& &1.score) assert [%Post{title: "title", score: 0}, %Post{title: "nothing", score: 1}] = results @@ -224,17 +224,17 @@ defmodule AshSqlite.FilterTest do test "with data that matches one condition and data that matches nothing" do Post - |> Ash.Changeset.new(%{title: "title", score: 0}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title", score: 0}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{score: 2, title: "nothing"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{score: 2, title: "nothing"}) + |> Ash.create!() results = Post |> Ash.Query.filter(title == "title" or score == 1) - |> Api.read!() + |> Ash.read!() |> Enum.sort_by(& &1.score) assert [%Post{title: "title", score: 0}] = results @@ -243,18 +243,18 @@ defmodule AshSqlite.FilterTest do test "with related data in an or statement that matches, while basic filter doesn't match" do post = Post - |> Ash.Changeset.new(%{title: "doesn't match"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "doesn't match"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "match"}) + |> Ash.Changeset.for_create(:create, %{title: "match"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() results = Post |> Ash.Query.filter(title == "match" or comments.title == "match") - |> Api.read!() + |> Ash.read!() assert [%Post{title: "doesn't match"}] = results end @@ -262,18 +262,18 @@ defmodule AshSqlite.FilterTest do test "with related data in an or statement that doesn't match, while basic filter does match" do post = Post - |> Ash.Changeset.new(%{title: "match"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "match"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "doesn't match"}) + |> Ash.Changeset.for_create(:create, %{title: "doesn't match"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() results = Post |> Ash.Query.filter(title == "match" or comments.title == "match") - |> Api.read!() + |> Ash.read!() assert [%Post{title: "match"}] = results end @@ -281,25 +281,25 @@ defmodule AshSqlite.FilterTest do test "with related data and an inner join condition" do post = Post - |> Ash.Changeset.new(%{title: "match"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "match"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "match"}) + |> Ash.Changeset.for_create(:create, %{title: "match"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() results = Post |> Ash.Query.filter(title == comments.title) - |> Api.read!() + |> Ash.read!() assert [%Post{title: "match"}] = results results = Post |> Ash.Query.filter(title != comments.title) - |> Api.read!() + |> Ash.read!() assert [] = results end @@ -311,13 +311,13 @@ defmodule AshSqlite.FilterTest do |> Ash.Changeset.for_create(:create, bio: %{title: "Dr.", bio: "Strange", years_of_experience: 10} ) - |> Api.create!() + |> Ash.create!() Author |> Ash.Changeset.for_create(:create, bio: %{title: "Highlander", bio: "There can be only one."} ) - |> Api.create!() + |> Ash.create!() :ok end @@ -326,261 +326,172 @@ defmodule AshSqlite.FilterTest do assert [%{bio: %{title: "Dr."}}] = Author |> Ash.Query.filter(bio[:title] == "Dr.") - |> Api.read!() + |> Ash.read!() end test "works using simple equality for integers" do assert [%{bio: %{title: "Dr."}}] = Author |> Ash.Query.filter(bio[:years_of_experience] == 10) - |> Api.read!() + |> Ash.read!() end test "calculations that use embeds can be filtered on" do assert [%{bio: %{title: "Dr."}}] = Author |> Ash.Query.filter(title == "Dr.") - |> Api.read!() + |> Ash.read!() end end describe "basic expressions" do test "basic expressions work" do Post - |> Ash.Changeset.new(%{title: "match", score: 4}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "match", score: 4}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "non_match", score: 2}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "non_match", score: 2}) + |> Ash.create!() assert [%{title: "match"}] = Post |> Ash.Query.filter(score + 1 == 5) - |> Api.read!() + |> Ash.read!() end end describe "case insensitive fields" do test "it matches case insensitively" do Post - |> Ash.Changeset.new(%{title: "match", category: "FoObAr"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "match", category: "FoObAr"}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{category: "bazbuz"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{category: "bazbuz"}) + |> Ash.create!() assert [%{title: "match"}] = Post |> Ash.Query.filter(category == "fOoBaR") - |> Api.read!() + |> Ash.read!() end end - # describe "contains/2" do - # test "it works when it matches" do - # Post - # |> Ash.Changeset.new(%{title: "match"}) - # |> Api.create!() - - # Post - # |> Ash.Changeset.new(%{title: "bazbuz"}) - # |> Api.create!() - - # assert [%{title: "match"}] = - # Post - # |> Ash.Query.filter(contains(title, "atc")) - # |> Api.read!() - # end - - # test "it works when a case insensitive string is provided as a value" do - # Post - # |> Ash.Changeset.new(%{title: "match"}) - # |> Api.create!() - - # Post - # |> Ash.Changeset.new(%{title: "bazbuz"}) - # |> Api.create!() - - # assert [%{title: "match"}] = - # Post - # |> Ash.Query.filter(contains(title, ^%Ash.CiString{string: "ATC"})) - # |> Api.read!() - # end - - # test "it works on a case insensitive column" do - # Post - # |> Ash.Changeset.new(%{category: "match"}) - # |> Api.create!() - - # Post - # |> Ash.Changeset.new(%{category: "bazbuz"}) - # |> Api.create!() - - # assert [%{category: %Ash.CiString{string: "match"}}] = - # Post - # |> Ash.Query.filter(contains(category, ^"ATC")) - # |> Api.read!() - # end - - # test "it works on a case insensitive calculation" do - # Post - # |> Ash.Changeset.new(%{category: "match"}) - # |> Api.create!() - - # Post - # |> Ash.Changeset.new(%{category: "bazbuz"}) - # |> Api.create!() - - # assert [%{category: %Ash.CiString{string: "match"}}] = - # Post - # |> Ash.Query.filter(contains(category_label, ^"ATC")) - # |> Api.read!() - # end - - # test "it works on related values" do - # post = - # Post - # |> Ash.Changeset.new(%{title: "match"}) - # |> Api.create!() - - # Comment - # |> Ash.Changeset.new(%{title: "abba"}) - # |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - # |> Api.create!() - - # post2 = - # Post - # |> Ash.Changeset.new(%{title: "no_match"}) - # |> Api.create!() - - # Comment - # |> Ash.Changeset.new(%{title: "acca"}) - # |> Ash.Changeset.manage_relationship(:post, post2, type: :append_and_remove) - # |> Api.create!() - - # assert [%{title: "match"}] = - # Post - # |> Ash.Query.filter(contains(comments.title, ^"bb")) - # |> Api.read!() - # end - # end - describe "exists/2" do test "it works with single relationships" do post = Post - |> Ash.Changeset.new(%{title: "match"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "match"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "abba"}) + |> Ash.Changeset.for_create(:create, %{title: "abba"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() post2 = Post - |> Ash.Changeset.new(%{title: "no_match"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "no_match"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "acca"}) + |> Ash.Changeset.for_create(:create, %{title: "acca"}) |> Ash.Changeset.manage_relationship(:post, post2, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() assert [%{title: "match"}] = Post |> Ash.Query.filter(exists(comments, title == ^"abba")) - |> Api.read!() + |> Ash.read!() end test "it works with many to many relationships" do post = Post - |> Ash.Changeset.new(%{title: "a"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "a"}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "b"}) + |> Ash.Changeset.for_create(:create, %{title: "b"}) |> Ash.Changeset.manage_relationship(:linked_posts, [post], type: :append_and_remove) - |> Api.create!() + |> Ash.create!() assert [%{title: "b"}] = Post |> Ash.Query.filter(exists(linked_posts, title == ^"a")) - |> Api.read!() + |> Ash.read!() end test "it works with join association relationships" do post = Post - |> Ash.Changeset.new(%{title: "a"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "a"}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "b"}) + |> Ash.Changeset.for_create(:create, %{title: "b"}) |> Ash.Changeset.manage_relationship(:linked_posts, [post], type: :append_and_remove) - |> Api.create!() + |> Ash.create!() assert [%{title: "b"}] = Post |> Ash.Query.filter(exists(linked_posts, title == ^"a")) - |> Api.read!() + |> Ash.read!() end test "it works with nested relationships as the path" do post = Post - |> Ash.Changeset.new(%{title: "a"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "a"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "comment"}) + |> Ash.Changeset.for_create(:create, %{title: "comment"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "b"}) + |> Ash.Changeset.for_create(:create, %{title: "b"}) |> Ash.Changeset.manage_relationship(:linked_posts, [post], type: :append_and_remove) - |> Api.create!() + |> Ash.create!() assert [%{title: "b"}] = Post |> Ash.Query.filter(exists(linked_posts.comments, title == ^"comment")) - |> Api.read!() + |> Ash.read!() end test "it works with an `at_path`" do post = Post - |> Ash.Changeset.new(%{title: "a"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "a"}) + |> Ash.create!() other_post = Post - |> Ash.Changeset.new(%{title: "other_a"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "other_a"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "comment"}) + |> Ash.Changeset.for_create(:create, %{title: "comment"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "comment"}) + |> Ash.Changeset.for_create(:create, %{title: "comment"}) |> Ash.Changeset.manage_relationship(:post, other_post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "b"}) + |> Ash.Changeset.for_create(:create, %{title: "b"}) |> Ash.Changeset.manage_relationship(:linked_posts, [post], type: :append_and_remove) - |> Api.create!() + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "b"}) + |> Ash.Changeset.for_create(:create, %{title: "b"}) |> Ash.Changeset.manage_relationship(:linked_posts, [other_post], type: :append_and_remove) - |> Api.create!() + |> Ash.create!() assert [%{title: "b"}] = Post @@ -588,7 +499,7 @@ defmodule AshSqlite.FilterTest do linked_posts.title == "a" and linked_posts.exists(comments, title == ^"comment") ) - |> Api.read!() + |> Ash.read!() assert [%{title: "b"}] = Post @@ -596,66 +507,66 @@ defmodule AshSqlite.FilterTest do linked_posts.title == "a" and linked_posts.exists(comments, title == ^"comment") ) - |> Api.read!() + |> Ash.read!() end test "it works with nested relationships inside of exists" do post = Post - |> Ash.Changeset.new(%{title: "a"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "a"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "comment"}) + |> Ash.Changeset.for_create(:create, %{title: "comment"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "b"}) + |> Ash.Changeset.for_create(:create, %{title: "b"}) |> Ash.Changeset.manage_relationship(:linked_posts, [post], type: :append_and_remove) - |> Api.create!() + |> Ash.create!() assert [%{title: "b"}] = Post |> Ash.Query.filter(exists(linked_posts, comments.title == ^"comment")) - |> Api.read!() + |> Ash.read!() end end describe "filtering on enum types" do test "it allows simple filtering" do Post - |> Ash.Changeset.new(status_enum: "open") - |> Api.create!() + |> Ash.Changeset.for_create(:create, status_enum: "open") + |> Ash.create!() assert %{status_enum: :open} = Post |> Ash.Query.filter(status_enum == ^"open") - |> Api.read_one!() + |> Ash.read_one!() end test "it allows simple filtering without casting" do Post - |> Ash.Changeset.new(status_enum_no_cast: "open") - |> Api.create!() + |> Ash.Changeset.for_create(:create, status_enum_no_cast: "open") + |> Ash.create!() assert %{status_enum_no_cast: :open} = Post |> Ash.Query.filter(status_enum_no_cast == ^"open") - |> Api.read_one!() + |> Ash.read_one!() end end describe "atom filters" do test "it works on matches" do Post - |> Ash.Changeset.new(%{title: "match"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "match"}) + |> Ash.create!() result = Post |> Ash.Query.filter(type == :sponsored) - |> Api.read!() + |> Ash.read!() assert [%Post{title: "match"}] = result end @@ -664,20 +575,20 @@ defmodule AshSqlite.FilterTest do describe "like" do test "like builds and matches" do Post - |> Ash.Changeset.new(%{title: "MaTcH"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "MaTcH"}) + |> Ash.create!() results = Post |> Ash.Query.filter(like(title, "%aTc%")) - |> Api.read!() + |> Ash.read!() assert [%Post{title: "MaTcH"}] = results results = Post |> Ash.Query.filter(like(title, "%atc%")) - |> Api.read!() + |> Ash.read!() assert [] = results end @@ -686,20 +597,20 @@ defmodule AshSqlite.FilterTest do describe "ilike" do test "ilike builds and matches" do Post - |> Ash.Changeset.new(%{title: "MaTcH"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "MaTcH"}) + |> Ash.create!() results = Post |> Ash.Query.filter(ilike(title, "%aTc%")) - |> Api.read!() + |> Ash.read!() assert [%Post{title: "MaTcH"}] = results results = Post |> Ash.Query.filter(ilike(title, "%atc%")) - |> Api.read!() + |> Ash.read!() assert [%Post{title: "MaTcH"}] = results end @@ -709,22 +620,22 @@ defmodule AshSqlite.FilterTest do test "double replacement works" do post = Post - |> Ash.Changeset.new(%{title: "match", score: 4}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "match", score: 4}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "non_match", score: 2}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "non_match", score: 2}) + |> Ash.create!() assert [%{title: "match"}] = Post |> Ash.Query.filter(fragment("? = ?", title, ^post.title)) - |> Api.read!() + |> Ash.read!() assert [] = Post |> Ash.Query.filter(fragment("? = ?", title, "nope")) - |> Api.read!() + |> Ash.read!() end end @@ -732,13 +643,13 @@ defmodule AshSqlite.FilterTest do test "it doesn't raise an error" do Comment |> Ash.Query.filter(not is_nil(popular_ratings.id)) - |> Api.read!() + |> Ash.read!() end test "it doesn't raise an error when nested" do Post |> Ash.Query.filter(not is_nil(comments.popular_ratings.id)) - |> Api.read!() + |> Ash.read!() end end end diff --git a/test/load_test.exs b/test/load_test.exs index 9fe831a..495636d 100644 --- a/test/load_test.exs +++ b/test/load_test.exs @@ -1,6 +1,6 @@ defmodule AshSqlite.Test.LoadTest do use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Comment, Post} + alias AshSqlite.Test.{Comment, Post} require Ash.Query @@ -8,18 +8,18 @@ defmodule AshSqlite.Test.LoadTest do assert %Post{comments: %Ash.NotLoaded{type: :relationship}} = post = Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "match"}) + |> Ash.Changeset.for_create(:create, %{title: "match"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() results = Post |> Ash.Query.load(:comments) - |> Api.read!() + |> Ash.read!() assert [%Post{comments: [%{title: "match"}]}] = results end @@ -28,18 +28,18 @@ defmodule AshSqlite.Test.LoadTest do assert %Comment{post: %Ash.NotLoaded{type: :relationship}} = comment = Comment - |> Ash.Changeset.new(%{}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "match"}) + |> Ash.Changeset.for_create(:create, %{title: "match"}) |> Ash.Changeset.manage_relationship(:comments, [comment], type: :append_and_remove) - |> Api.create!() + |> Ash.create!() results = Comment |> Ash.Query.load(:post) - |> Api.read!() + |> Ash.read!() assert [%Comment{post: %{title: "match"}}] = results end @@ -47,29 +47,29 @@ defmodule AshSqlite.Test.LoadTest do test "many_to_many loads work" do source_post = Post - |> Ash.Changeset.new(%{title: "source"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "source"}) + |> Ash.create!() destination_post = Post - |> Ash.Changeset.new(%{title: "destination"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "destination"}) + |> Ash.create!() destination_post2 = Post - |> Ash.Changeset.new(%{title: "destination"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "destination"}) + |> Ash.create!() source_post |> Ash.Changeset.new() |> Ash.Changeset.manage_relationship(:linked_posts, [destination_post, destination_post2], type: :append_and_remove ) - |> Api.update!() + |> Ash.update!() results = source_post - |> Api.load!(:linked_posts) + |> Ash.load!(:linked_posts) assert %{linked_posts: [%{title: "destination"}, %{title: "destination"}]} = results end @@ -77,29 +77,29 @@ defmodule AshSqlite.Test.LoadTest do test "many_to_many loads work when nested" do source_post = Post - |> Ash.Changeset.new(%{title: "source"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "source"}) + |> Ash.create!() destination_post = Post - |> Ash.Changeset.new(%{title: "destination"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "destination"}) + |> Ash.create!() source_post |> Ash.Changeset.new() |> Ash.Changeset.manage_relationship(:linked_posts, [destination_post], type: :append_and_remove ) - |> Api.update!() + |> Ash.update!() destination_post |> Ash.Changeset.new() |> Ash.Changeset.manage_relationship(:linked_posts, [source_post], type: :append_and_remove) - |> Api.update!() + |> Ash.update!() results = source_post - |> Api.load!(linked_posts: :linked_posts) + |> Ash.load!(linked_posts: :linked_posts) assert %{linked_posts: [%{title: "destination", linked_posts: [%{title: "source"}]}]} = results @@ -221,25 +221,25 @@ defmodule AshSqlite.Test.LoadTest do test "loading many to many relationships on records works without loading its join relationship when using code interface" do source_post = Post - |> Ash.Changeset.new(%{title: "source"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "source"}) + |> Ash.create!() destination_post = Post - |> Ash.Changeset.new(%{title: "abc"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "abc"}) + |> Ash.create!() destination_post2 = Post - |> Ash.Changeset.new(%{title: "def"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "def"}) + |> Ash.create!() source_post |> Ash.Changeset.new() |> Ash.Changeset.manage_relationship(:linked_posts, [destination_post, destination_post2], type: :append_and_remove ) - |> Api.update!() + |> Ash.update!() assert %{linked_posts: [_, _]} = Post.get_by_id!(source_post.id, load: [:linked_posts]) end diff --git a/test/manual_relationships_test.exs b/test/manual_relationships_test.exs index 12d831b..5eaafe5 100644 --- a/test/manual_relationships_test.exs +++ b/test/manual_relationships_test.exs @@ -1,43 +1,43 @@ defmodule AshSqlite.Test.ManualRelationshipsTest do use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Comment, Post} + alias AshSqlite.Test.{Comment, Post} require Ash.Query describe "manual first" do test "relationships can be filtered on with no data" do Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() assert [] = - Post |> Ash.Query.filter(comments_containing_title.title == "title") |> Api.read!() + Post |> Ash.Query.filter(comments_containing_title.title == "title") |> Ash.read!() end test "relationships can be filtered on with data" do post = Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "title2"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title2"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "title2"}) + |> Ash.Changeset.for_create(:create, %{title: "title2"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "no match"}) + |> Ash.Changeset.for_create(:create, %{title: "no match"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() assert [_] = Post |> Ash.Query.filter(comments_containing_title.title == "title2") - |> Api.read!() + |> Ash.read!() end end @@ -45,44 +45,44 @@ defmodule AshSqlite.Test.ManualRelationshipsTest do test "relationships can be filtered on with no data" do post = Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "no match"}) + |> Ash.Changeset.for_create(:create, %{title: "no match"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() assert [] = Comment |> Ash.Query.filter(post.comments_containing_title.title == "title2") - |> Api.read!() + |> Ash.read!() end test "relationships can be filtered on with data" do post = Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "title2"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title2"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "title2"}) + |> Ash.Changeset.for_create(:create, %{title: "title2"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "no match"}) + |> Ash.Changeset.for_create(:create, %{title: "no match"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() assert [_, _] = Comment |> Ash.Query.filter(post.comments_containing_title.title == "title2") - |> Api.read!() + |> Ash.read!() end end @@ -90,27 +90,27 @@ defmodule AshSqlite.Test.ManualRelationshipsTest do test "relationships can be filtered on with data" do post = Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "title2"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title2"}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "title2"}) + |> Ash.Changeset.for_create(:create, %{title: "title2"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "no match"}) + |> Ash.Changeset.for_create(:create, %{title: "no match"}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() assert [_, _] = Comment |> Ash.Query.filter(post.comments_containing_title.post.title == "title") - |> Api.read!() + |> Ash.read!() end end end diff --git a/test/migration_generator_test.exs b/test/migration_generator_test.exs index 6030541..ee21de0 100644 --- a/test/migration_generator_test.exs +++ b/test/migration_generator_test.exs @@ -10,6 +10,7 @@ defmodule AshSqlite.MigrationGeneratorTest do defmodule unquote(mod) do use Ash.Resource, + domain: nil, data_layer: AshSqlite.DataLayer sqlite do @@ -34,25 +35,17 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defmacrop defapi(resources) do + defmacrop defdomain(resources) do quote do Code.compiler_options(ignore_module_conflict: true) - defmodule Registry do - use Ash.Registry - - entries do - for resource <- unquote(resources) do - entry(resource) - end - end - end - - defmodule Api do - use Ash.Api + defmodule Domain do + use Ash.Domain resources do - registry(Registry) + for resource <- unquote(resources) do + resource(resource) + end end end @@ -89,11 +82,11 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post]) + defdomain([Post]) Mix.shell(Mix.Shell.Process) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -169,11 +162,11 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post]) + defdomain([Post]) Mix.shell(Mix.Shell.Process) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -199,9 +192,9 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post]) + defdomain([Post]) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -228,9 +221,9 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post]) + defdomain([Post]) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -252,11 +245,11 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post]) + defdomain([Post]) send(self(), {:mix_shell_input, :yes?, true}) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -277,11 +270,11 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post]) + defdomain([Post]) send(self(), {:mix_shell_input, :yes?, false}) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -304,12 +297,12 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post]) + defdomain([Post]) send(self(), {:mix_shell_input, :yes?, true}) send(self(), {:mix_shell_input, :prompt, "subject"}) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -335,11 +328,11 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post]) + defdomain([Post]) send(self(), {:mix_shell_input, :yes?, false}) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -368,9 +361,9 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post, Post2]) + defdomain([Post, Post2]) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -401,11 +394,11 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post]) + defdomain([Post]) Mix.shell(Mix.Shell.Process) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -435,14 +428,14 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post]) + defdomain([Post]) - [api: Api] + [domain: Domain] end - test "returns code(1) if snapshots and resources don't fit", %{api: api} do + test "returns code(1) if snapshots and resources don't fit", %{domain: domain} do assert catch_exit( - AshSqlite.MigrationGenerator.generate(api, + AshSqlite.MigrationGenerator.generate(domain, snapshot_path: "test_snapshot_path", migration_path: "test_migration_path", check: true @@ -482,9 +475,9 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post, Post2]) + defdomain([Post, Post2]) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -517,9 +510,9 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post, Post2]) + defdomain([Post, Post2]) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -552,9 +545,9 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post, Post2]) + defdomain([Post, Post2]) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -578,7 +571,7 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -615,6 +608,7 @@ defmodule AshSqlite.MigrationGeneratorTest do defmodule Comment do use Ash.Resource, + domain: nil, data_layer: AshSqlite.DataLayer sqlite do @@ -634,6 +628,7 @@ defmodule AshSqlite.MigrationGeneratorTest do defmodule Post do use Ash.Resource, + domain: nil, data_layer: AshSqlite.DataLayer sqlite do @@ -662,16 +657,16 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post, Comment]) + defdomain([Post, Comment]) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, format: false ) - [api: Api] + [domain: Domain] end test "it uses the relationship's table context if it is set" do @@ -698,17 +693,16 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post]) + defdomain([Post]) - log = - capture_log(fn -> - AshSqlite.MigrationGenerator.generate(Api, - snapshot_path: "test_snapshots_path", - migration_path: "test_migration_path", - quiet: true, - format: false - ) - end) + capture_log(fn -> + AshSqlite.MigrationGenerator.generate(Domain, + snapshot_path: "test_snapshots_path", + migration_path: "test_migration_path", + quiet: true, + format: false + ) + end) assert [file1] = Enum.sort(Path.wildcard("test_migration_path/**/*_migrate_resources*.exs")) @@ -735,6 +729,7 @@ defmodule AshSqlite.MigrationGeneratorTest do defmodule Comment do use Ash.Resource, + domain: nil, data_layer: AshSqlite.DataLayer sqlite do @@ -751,11 +746,11 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post, Comment]) + defdomain([Post, Comment]) Mix.shell(Mix.Shell.Process) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, @@ -776,6 +771,7 @@ defmodule AshSqlite.MigrationGeneratorTest do defmodule Comment do use Ash.Resource, + domain: nil, data_layer: AshSqlite.DataLayer sqlite do @@ -792,9 +788,9 @@ defmodule AshSqlite.MigrationGeneratorTest do end end - defapi([Post, Comment]) + defdomain([Post, Comment]) - AshSqlite.MigrationGenerator.generate(Api, + AshSqlite.MigrationGenerator.generate(Domain, snapshot_path: "test_snapshots_path", migration_path: "test_migration_path", quiet: true, diff --git a/test/polymorphism_test.exs b/test/polymorphism_test.exs index 15ae06e..519a0ea 100644 --- a/test/polymorphism_test.exs +++ b/test/polymorphism_test.exs @@ -1,29 +1,29 @@ defmodule AshSqlite.PolymorphismTest do use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Post, Rating} + alias AshSqlite.Test.{Post, Rating} require Ash.Query test "you can create related data" do Post |> Ash.Changeset.for_create(:create, rating: %{score: 10}) - |> Api.create!() + |> Ash.create!() assert [%{score: 10}] = Rating |> Ash.Query.set_context(%{data_layer: %{table: "post_ratings"}}) - |> Api.read!() + |> Ash.read!() end test "you can read related data" do Post |> Ash.Changeset.for_create(:create, rating: %{score: 10}) - |> Api.create!() + |> Ash.create!() assert [%{score: 10}] = Post |> Ash.Query.load(:ratings) - |> Api.read_one!() + |> Ash.read_one!() |> Map.get(:ratings) end end diff --git a/test/primary_key_test.exs b/test/primary_key_test.exs index 53dacb4..40b5340 100644 --- a/test/primary_key_test.exs +++ b/test/primary_key_test.exs @@ -1,16 +1,17 @@ defmodule AshSqlite.Test.PrimaryKeyTest do @moduledoc false use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, IntegerPost, Post, PostView} + alias AshSqlite.Test.{IntegerPost, Post, PostView} require Ash.Query test "creates record with integer primary key" do - assert %IntegerPost{} = IntegerPost |> Ash.Changeset.new(%{title: "title"}) |> Api.create!() + assert %IntegerPost{} = + IntegerPost |> Ash.Changeset.for_create(:create, %{title: "title"}) |> Ash.create!() end test "creates record with uuid primary key" do - assert %Post{} = Post |> Ash.Changeset.new(%{title: "title"}) |> Api.create!() + assert %Post{} = Post |> Ash.Changeset.for_create(:create, %{title: "title"}) |> Ash.create!() end describe "resources without a primary key" do @@ -18,12 +19,12 @@ defmodule AshSqlite.Test.PrimaryKeyTest do post = Post |> Ash.Changeset.for_action(:create, %{title: "not very interesting"}) - |> Api.create!() + |> Ash.create!() assert {:ok, view} = PostView |> Ash.Changeset.for_action(:create, %{browser: :firefox, post_id: post.id}) - |> Api.create() + |> Ash.create() assert view.browser == :firefox assert view.post_id == post.id @@ -34,14 +35,14 @@ defmodule AshSqlite.Test.PrimaryKeyTest do post = Post |> Ash.Changeset.for_action(:create, %{title: "not very interesting"}) - |> Api.create!() + |> Ash.create!() expected = PostView |> Ash.Changeset.for_action(:create, %{browser: :firefox, post_id: post.id}) - |> Api.create!() + |> Ash.create!() - assert {:ok, [actual]} = Api.read(PostView) + assert {:ok, [actual]} = Ash.read(PostView) assert actual.time == expected.time assert actual.browser == expected.browser diff --git a/test/select_test.exs b/test/select_test.exs index a2fbca2..85af50a 100644 --- a/test/select_test.exs +++ b/test/select_test.exs @@ -1,15 +1,15 @@ defmodule AshSqlite.SelectTest do @moduledoc false use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Post} + alias AshSqlite.Test.Post require Ash.Query test "values not selected in the query are not present in the response" do Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() - assert [%{title: nil}] = Api.read!(Ash.Query.select(Post, :id)) + assert [%{title: %Ash.NotLoaded{}}] = Ash.read!(Ash.Query.select(Post, :id)) end end diff --git a/test/sort_test.exs b/test/sort_test.exs index 103b938..c17f4b3 100644 --- a/test/sort_test.exs +++ b/test/sort_test.exs @@ -1,29 +1,29 @@ defmodule AshSqlite.SortTest do @moduledoc false use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Comment, Post, PostLink} + alias AshSqlite.Test.{Comment, Post, PostLink} require Ash.Query test "multi-column sorts work" do Post - |> Ash.Changeset.new(%{title: "aaa", score: 0}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "aaa", score: 0}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "aaa", score: 1}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "aaa", score: 1}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "bbb", score: 0}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "bbb", score: 0}) + |> Ash.create!() assert [ %{title: "aaa", score: 0}, %{title: "aaa", score: 1}, %{title: "bbb"} ] = - Api.read!( + Ash.read!( Post |> Ash.Query.sort(title: :asc, score: :asc) ) @@ -32,31 +32,31 @@ defmodule AshSqlite.SortTest do test "multi-column sorts work on inclusion" do post = Post - |> Ash.Changeset.new(%{title: "aaa", score: 0}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "aaa", score: 0}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "aaa", score: 1}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "aaa", score: 1}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "bbb", score: 0}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "bbb", score: 0}) + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "aaa", likes: 1}) + |> Ash.Changeset.for_create(:create, %{title: "aaa", likes: 1}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "bbb", likes: 1}) + |> Ash.Changeset.for_create(:create, %{title: "bbb", likes: 1}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() Comment - |> Ash.Changeset.new(%{title: "aaa", likes: 2}) + |> Ash.Changeset.for_create(:create, %{title: "aaa", likes: 2}) |> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove) - |> Api.create!() + |> Ash.create!() posts = Post @@ -68,7 +68,7 @@ defmodule AshSqlite.SortTest do |> Ash.Query.limit(1) ) |> Ash.Query.sort([:title, :score]) - |> Api.read!() + |> Ash.read!() assert [ %{title: "aaa", comments: [%{title: "aaa"}]}, @@ -79,23 +79,23 @@ defmodule AshSqlite.SortTest do test "multicolumn sort works with a select statement" do Post - |> Ash.Changeset.new(%{title: "aaa", score: 0}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "aaa", score: 0}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "aaa", score: 1}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "aaa", score: 1}) + |> Ash.create!() Post - |> Ash.Changeset.new(%{title: "bbb", score: 0}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "bbb", score: 0}) + |> Ash.create!() assert [ %{title: "aaa", score: 0}, %{title: "aaa", score: 1}, %{title: "bbb"} ] = - Api.read!( + Ash.read!( Post |> Ash.Query.sort(title: :asc, score: :asc) |> Ash.Query.select([:title, :score]) @@ -105,43 +105,43 @@ defmodule AshSqlite.SortTest do test "sorting when joining to a many to many relationship sorts properly" do post1 = Post - |> Ash.Changeset.new(%{title: "aaa", score: 0}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "aaa", score: 0}) + |> Ash.create!() post2 = Post - |> Ash.Changeset.new(%{title: "bbb", score: 1}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "bbb", score: 1}) + |> Ash.create!() post3 = Post - |> Ash.Changeset.new(%{title: "ccc", score: 0}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "ccc", score: 0}) + |> Ash.create!() PostLink |> Ash.Changeset.new() |> Ash.Changeset.manage_relationship(:source_post, post1, type: :append) |> Ash.Changeset.manage_relationship(:destination_post, post3, type: :append) - |> Api.create!() + |> Ash.create!() PostLink |> Ash.Changeset.new() |> Ash.Changeset.manage_relationship(:source_post, post2, type: :append) |> Ash.Changeset.manage_relationship(:destination_post, post2, type: :append) - |> Api.create!() + |> Ash.create!() PostLink |> Ash.Changeset.new() |> Ash.Changeset.manage_relationship(:source_post, post3, type: :append) |> Ash.Changeset.manage_relationship(:destination_post, post1, type: :append) - |> Api.create!() + |> Ash.create!() assert [ %{title: "aaa"}, %{title: "bbb"}, %{title: "ccc"} ] = - Api.read!( + Ash.read!( Post |> Ash.Query.sort(title: :asc) |> Ash.Query.filter(linked_posts.title in ["aaa", "bbb", "ccc"]) @@ -152,7 +152,7 @@ defmodule AshSqlite.SortTest do %{title: "bbb"}, %{title: "aaa"} ] = - Api.read!( + Ash.read!( Post |> Ash.Query.sort(title: :desc) |> Ash.Query.filter(linked_posts.title in ["aaa", "bbb", "ccc"] or title == "aaa") @@ -163,7 +163,7 @@ defmodule AshSqlite.SortTest do %{title: "bbb"}, %{title: "aaa"} ] = - Api.read!( + Ash.read!( Post |> Ash.Query.sort(title: :desc) |> Ash.Query.filter( diff --git a/test/support/api.ex b/test/support/api.ex deleted file mode 100644 index 75ffabe..0000000 --- a/test/support/api.ex +++ /dev/null @@ -1,8 +0,0 @@ -defmodule AshSqlite.Test.Api do - @moduledoc false - use Ash.Api - - resources do - registry(AshSqlite.Test.Registry) - end -end diff --git a/test/support/concat.ex b/test/support/concat.ex index a83e4ba..0977a28 100644 --- a/test/support/concat.ex +++ b/test/support/concat.ex @@ -1,6 +1,6 @@ defmodule AshSqlite.Test.Concat do @moduledoc false - use Ash.Calculation + use Ash.Resource.Calculation require Ash.Query def init(opts) do @@ -11,16 +11,16 @@ defmodule AshSqlite.Test.Concat do end end - def expression(opts, %{separator: separator}) do + def expression(opts, %{arguments: %{separator: separator}}) do Enum.reduce(opts[:keys], nil, fn key, expr -> if expr do if separator do - Ash.Query.expr(^expr <> ^separator <> ref(^key)) + expr(^expr <> ^separator <> ^ref(key)) else - Ash.Query.expr(^expr <> ref(^key)) + expr(^expr <> ^ref(key)) end else - Ash.Query.expr(ref(^key)) + expr(^ref(key)) end end) end diff --git a/test/support/domain.ex b/test/support/domain.ex new file mode 100644 index 0000000..90c0680 --- /dev/null +++ b/test/support/domain.ex @@ -0,0 +1,23 @@ +defmodule AshSqlite.Test.Domain do + @moduledoc false + use Ash.Domain + + resources do + resource(AshSqlite.Test.Post) + resource(AshSqlite.Test.Comment) + resource(AshSqlite.Test.IntegerPost) + resource(AshSqlite.Test.Rating) + resource(AshSqlite.Test.PostLink) + resource(AshSqlite.Test.PostView) + resource(AshSqlite.Test.Author) + resource(AshSqlite.Test.Profile) + resource(AshSqlite.Test.User) + resource(AshSqlite.Test.Account) + resource(AshSqlite.Test.Organization) + resource(AshSqlite.Test.Manager) + end + + authorization do + authorize(:when_requested) + end +end diff --git a/test/support/registry.ex b/test/support/registry.ex deleted file mode 100644 index f9ef255..0000000 --- a/test/support/registry.ex +++ /dev/null @@ -1,19 +0,0 @@ -defmodule AshSqlite.Test.Registry do - @moduledoc false - use Ash.Registry - - entries do - entry(AshSqlite.Test.Post) - entry(AshSqlite.Test.Comment) - entry(AshSqlite.Test.IntegerPost) - entry(AshSqlite.Test.Rating) - entry(AshSqlite.Test.PostLink) - entry(AshSqlite.Test.PostView) - entry(AshSqlite.Test.Author) - entry(AshSqlite.Test.Profile) - entry(AshSqlite.Test.User) - entry(AshSqlite.Test.Account) - entry(AshSqlite.Test.Organization) - entry(AshSqlite.Test.Manager) - end -end diff --git a/test/support/relationships/comments_containing_title.ex b/test/support/relationships/comments_containing_title.ex index dcac7ec..d80d049 100644 --- a/test/support/relationships/comments_containing_title.ex +++ b/test/support/relationships/comments_containing_title.ex @@ -13,7 +13,7 @@ defmodule AshSqlite.Test.Post.CommentsContainingTitle do query |> Ash.Query.filter(post_id in ^post_ids) |> Ash.Query.filter(contains(title, post.title)) - |> AshSqlite.Test.Api.read!(actor: actor, authorize?: authorize?) + |> Ash.read!(actor: actor, authorize?: authorize?) |> Enum.group_by(& &1.post_id)} end diff --git a/test/support/resources/account.ex b/test/support/resources/account.ex index 79bcea2..92903ce 100644 --- a/test/support/resources/account.ex +++ b/test/support/resources/account.ex @@ -1,21 +1,23 @@ defmodule AshSqlite.Test.Account do @moduledoc false - use Ash.Resource, data_layer: AshSqlite.DataLayer + use Ash.Resource, domain: AshSqlite.Test.Domain, data_layer: AshSqlite.DataLayer actions do + default_accept(:*) defaults([:create, :read, :update, :destroy]) end attributes do uuid_primary_key(:id) - attribute(:is_active, :boolean) + attribute(:is_active, :boolean, public?: true) end calculations do calculate( :active, :boolean, - expr(is_active) + expr(is_active), + public?: true ) end @@ -25,6 +27,6 @@ defmodule AshSqlite.Test.Account do end relationships do - belongs_to(:user, AshSqlite.Test.User) + belongs_to(:user, AshSqlite.Test.User, public?: true) end end diff --git a/test/support/resources/author.ex b/test/support/resources/author.ex index ad853a1..d953ea2 100644 --- a/test/support/resources/author.ex +++ b/test/support/resources/author.ex @@ -1,6 +1,7 @@ defmodule AshSqlite.Test.Author do @moduledoc false use Ash.Resource, + domain: AshSqlite.Test.Domain, data_layer: AshSqlite.DataLayer sqlite do @@ -10,19 +11,20 @@ defmodule AshSqlite.Test.Author do attributes do uuid_primary_key(:id, writable?: true) - attribute(:first_name, :string) - attribute(:last_name, :string) - attribute(:bio, AshSqlite.Test.Bio) - attribute(:badges, {:array, :atom}) + attribute(:first_name, :string, public?: true) + attribute(:last_name, :string, public?: true) + attribute(:bio, AshSqlite.Test.Bio, public?: true) + attribute(:badges, {:array, :atom}, public?: true) end actions do + default_accept(:*) defaults([:create, :read, :update, :destroy]) end relationships do - has_one(:profile, AshSqlite.Test.Profile) - has_many(:posts, AshSqlite.Test.Post) + has_one(:profile, AshSqlite.Test.Profile, public?: true) + has_many(:posts, AshSqlite.Test.Post, public?: true) end calculations do diff --git a/test/support/resources/bio.ex b/test/support/resources/bio.ex index 27d889f..ce87602 100644 --- a/test/support/resources/bio.ex +++ b/test/support/resources/bio.ex @@ -3,15 +3,17 @@ defmodule AshSqlite.Test.Bio do use Ash.Resource, data_layer: :embedded actions do + default_accept(:*) defaults([:create, :read, :update, :destroy]) end attributes do - attribute(:title, :string) - attribute(:bio, :string) - attribute(:years_of_experience, :integer) + attribute(:title, :string, public?: true) + attribute(:bio, :string, public?: true) + attribute(:years_of_experience, :integer, public?: true) attribute :list_of_strings, {:array, :string} do + public?(true) allow_nil?(true) default(nil) end diff --git a/test/support/resources/comment.ex b/test/support/resources/comment.ex index b5dc7a3..7c6e2fb 100644 --- a/test/support/resources/comment.ex +++ b/test/support/resources/comment.ex @@ -1,6 +1,7 @@ defmodule AshSqlite.Test.Comment do @moduledoc false use Ash.Resource, + domain: AshSqlite.Test.Domain, data_layer: AshSqlite.DataLayer, authorizers: [ Ash.Policy.Authorizer @@ -23,6 +24,7 @@ defmodule AshSqlite.Test.Comment do end actions do + default_accept(:*) defaults([:read, :update, :destroy]) create :create do @@ -35,22 +37,24 @@ defmodule AshSqlite.Test.Comment do attributes do uuid_primary_key(:id) - attribute(:title, :string) - attribute(:likes, :integer) - attribute(:arbitrary_timestamp, :utc_datetime_usec) - create_timestamp(:created_at, writable?: true) + attribute(:title, :string, public?: true) + attribute(:likes, :integer, public?: true) + attribute(:arbitrary_timestamp, :utc_datetime_usec, public?: true) + create_timestamp(:created_at, writable?: true, public?: true) end relationships do - belongs_to(:post, AshSqlite.Test.Post) - belongs_to(:author, AshSqlite.Test.Author) + belongs_to(:post, AshSqlite.Test.Post, public?: true) + belongs_to(:author, AshSqlite.Test.Author, public?: true) has_many(:ratings, AshSqlite.Test.Rating, + public?: true, destination_attribute: :resource_id, relationship_context: %{data_layer: %{table: "comment_ratings"}} ) has_many(:popular_ratings, AshSqlite.Test.Rating, + public?: true, destination_attribute: :resource_id, relationship_context: %{data_layer: %{table: "comment_ratings"}}, filter: expr(score > 5) diff --git a/test/support/resources/integer_post.ex b/test/support/resources/integer_post.ex index 60c3f4a..874bfa3 100644 --- a/test/support/resources/integer_post.ex +++ b/test/support/resources/integer_post.ex @@ -1,6 +1,7 @@ defmodule AshSqlite.Test.IntegerPost do @moduledoc false use Ash.Resource, + domain: AshSqlite.Test.Domain, data_layer: AshSqlite.DataLayer sqlite do @@ -9,11 +10,12 @@ defmodule AshSqlite.Test.IntegerPost do end actions do + default_accept(:*) defaults([:create, :read, :update, :destroy]) end attributes do integer_primary_key(:id) - attribute(:title, :string) + attribute(:title, :string, public?: true) end end diff --git a/test/support/resources/manager.ex b/test/support/resources/manager.ex index 3d3c5fd..725f596 100644 --- a/test/support/resources/manager.ex +++ b/test/support/resources/manager.ex @@ -1,6 +1,7 @@ defmodule AshSqlite.Test.Manager do @moduledoc false use Ash.Resource, + domain: AshSqlite.Test.Domain, data_layer: AshSqlite.DataLayer sqlite do @@ -9,6 +10,7 @@ defmodule AshSqlite.Test.Manager do end actions do + default_accept(:*) defaults([:read, :update, :destroy]) create :create do @@ -25,14 +27,15 @@ defmodule AshSqlite.Test.Manager do attributes do uuid_primary_key(:id) - attribute(:name, :string) - attribute(:code, :string, allow_nil?: false) - attribute(:must_be_present, :string, allow_nil?: false) - attribute(:role, :string) + attribute(:name, :string, public?: true) + attribute(:code, :string, allow_nil?: false, public?: true) + attribute(:must_be_present, :string, allow_nil?: false, public?: true) + attribute(:role, :string, public?: true) end relationships do belongs_to :organization, AshSqlite.Test.Organization do + public?(true) attribute_writable?(true) end end diff --git a/test/support/resources/organization.ex b/test/support/resources/organization.ex index f1387ef..f2a1524 100644 --- a/test/support/resources/organization.ex +++ b/test/support/resources/organization.ex @@ -1,6 +1,7 @@ defmodule AshSqlite.Test.Organization do @moduledoc false use Ash.Resource, + domain: AshSqlite.Test.Domain, data_layer: AshSqlite.DataLayer sqlite do @@ -9,17 +10,12 @@ defmodule AshSqlite.Test.Organization do end actions do + default_accept(:*) defaults([:create, :read, :update, :destroy]) end attributes do uuid_primary_key(:id, writable?: true) - attribute(:name, :string) + attribute(:name, :string, public?: true) end - - # relationships do - # has_many(:users, AshSqlite.Test.User) - # has_many(:posts, AshSqlite.Test.Post) - # has_many(:managers, AshSqlite.Test.Manager) - # end end diff --git a/test/support/resources/post.ex b/test/support/resources/post.ex index f793b2f..968e121 100644 --- a/test/support/resources/post.ex +++ b/test/support/resources/post.ex @@ -1,6 +1,7 @@ defmodule AshSqlite.Test.Post do @moduledoc false use Ash.Resource, + domain: AshSqlite.Test.Domain, data_layer: AshSqlite.DataLayer, authorizers: [ Ash.Policy.Authorizer @@ -31,6 +32,7 @@ defmodule AshSqlite.Test.Post do end actions do + default_accept(:*) defaults([:update, :destroy]) read :read do @@ -66,71 +68,82 @@ defmodule AshSqlite.Test.Post do attributes do uuid_primary_key(:id, writable?: true) - attribute(:title, :string) - attribute(:score, :integer) - attribute(:public, :boolean) - attribute(:category, :ci_string) - attribute(:type, :atom, default: :sponsored, private?: true, writable?: false) - attribute(:price, :integer) - attribute(:decimal, :decimal, default: Decimal.new(0)) - attribute(:status, AshSqlite.Test.Types.Status) - attribute(:status_enum, AshSqlite.Test.Types.StatusEnum) - attribute(:status_enum_no_cast, AshSqlite.Test.Types.StatusEnumNoCast, source: :status_enum) - attribute(:stuff, :map) - attribute(:uniq_one, :string) - attribute(:uniq_two, :string) - attribute(:uniq_custom_one, :string) - attribute(:uniq_custom_two, :string) + attribute(:title, :string, public?: true) + attribute(:score, :integer, public?: true) + attribute(:public, :boolean, public?: true) + attribute(:category, :ci_string, public?: true) + attribute(:type, :atom, default: :sponsored, writable?: false) + attribute(:price, :integer, public?: true) + attribute(:decimal, :decimal, default: Decimal.new(0), public?: true) + attribute(:status, AshSqlite.Test.Types.Status, public?: true) + attribute(:status_enum, AshSqlite.Test.Types.StatusEnum, public?: true) + + attribute(:status_enum_no_cast, AshSqlite.Test.Types.StatusEnumNoCast, + source: :status_enum, + public?: true + ) + + attribute(:stuff, :map, public?: true) + attribute(:uniq_one, :string, public?: true) + attribute(:uniq_two, :string, public?: true) + attribute(:uniq_custom_one, :string, public?: true) + attribute(:uniq_custom_two, :string, public?: true) create_timestamp(:created_at) update_timestamp(:updated_at) end code_interface do - define_for(AshSqlite.Test.Api) define(:get_by_id, action: :read, get_by: [:id]) define(:increment_score, args: [{:optional, :amount}]) end relationships do belongs_to :organization, AshSqlite.Test.Organization do + public?(true) attribute_writable?(true) end - belongs_to(:author, AshSqlite.Test.Author) + belongs_to(:author, AshSqlite.Test.Author, public?: true) - has_many(:comments, AshSqlite.Test.Comment, destination_attribute: :post_id) + has_many(:comments, AshSqlite.Test.Comment, destination_attribute: :post_id, public?: true) has_many :comments_matching_post_title, AshSqlite.Test.Comment do + public?(true) filter(expr(title == parent_expr(title))) end has_many :popular_comments, AshSqlite.Test.Comment do + public?(true) destination_attribute(:post_id) filter(expr(likes > 10)) end has_many :comments_containing_title, AshSqlite.Test.Comment do + public?(true) manual(AshSqlite.Test.Post.CommentsContainingTitle) end has_many(:ratings, AshSqlite.Test.Rating, + public?: true, destination_attribute: :resource_id, relationship_context: %{data_layer: %{table: "post_ratings"}} ) has_many(:post_links, AshSqlite.Test.PostLink, + public?: true, destination_attribute: :source_post_id, filter: [state: :active] ) many_to_many(:linked_posts, __MODULE__, + public?: true, through: AshSqlite.Test.PostLink, join_relationship: :post_links, source_attribute_on_join_resource: :source_post_id, destination_attribute_on_join_resource: :destination_post_id ) - has_many(:views, AshSqlite.Test.PostView) + has_many(:views, AshSqlite.Test.PostView, public?: true) end validations do @@ -191,10 +204,10 @@ end defmodule CalculatePostPriceString do @moduledoc false - use Ash.Calculation + use Ash.Resource.Calculation @impl true - def select(_, _, _), do: [:price] + def load(_, _, _), do: [:price] @impl true def calculate(records, _, _) do @@ -208,7 +221,7 @@ end defmodule CalculatePostPriceStringWithSymbol do @moduledoc false - use Ash.Calculation + use Ash.Resource.Calculation @impl true def load(_, _, _), do: [:price_string] diff --git a/test/support/resources/post_link.ex b/test/support/resources/post_link.ex index a91b4cd..a794d73 100644 --- a/test/support/resources/post_link.ex +++ b/test/support/resources/post_link.ex @@ -1,6 +1,7 @@ defmodule AshSqlite.Test.PostLink do @moduledoc false use Ash.Resource, + domain: AshSqlite.Test.Domain, data_layer: AshSqlite.DataLayer sqlite do @@ -9,6 +10,7 @@ defmodule AshSqlite.Test.PostLink do end actions do + default_accept(:*) defaults([:create, :read, :update, :destroy]) end @@ -18,6 +20,7 @@ defmodule AshSqlite.Test.PostLink do attributes do attribute :state, :atom do + public?(true) constraints(one_of: [:active, :archived]) default(:active) end @@ -25,11 +28,13 @@ defmodule AshSqlite.Test.PostLink do relationships do belongs_to :source_post, AshSqlite.Test.Post do + public?(true) allow_nil?(false) primary_key?(true) end belongs_to :destination_post, AshSqlite.Test.Post do + public?(true) allow_nil?(false) primary_key?(true) end diff --git a/test/support/resources/post_views.ex b/test/support/resources/post_views.ex index 45599f7..c87307a 100644 --- a/test/support/resources/post_views.ex +++ b/test/support/resources/post_views.ex @@ -1,18 +1,20 @@ defmodule AshSqlite.Test.PostView do @moduledoc false - use Ash.Resource, data_layer: AshSqlite.DataLayer + use Ash.Resource, domain: AshSqlite.Test.Domain, data_layer: AshSqlite.DataLayer actions do + default_accept(:*) defaults([:create, :read]) end attributes do create_timestamp(:time) - attribute(:browser, :atom, constraints: [one_of: [:firefox, :chrome, :edge]]) + attribute(:browser, :atom, constraints: [one_of: [:firefox, :chrome, :edge]], public?: true) end relationships do belongs_to :post, AshSqlite.Test.Post do + public?(true) allow_nil?(false) attribute_writable?(true) end diff --git a/test/support/resources/profile.ex b/test/support/resources/profile.ex index cefbb48..043a91a 100644 --- a/test/support/resources/profile.ex +++ b/test/support/resources/profile.ex @@ -1,6 +1,7 @@ defmodule AshSqlite.Test.Profile do @moduledoc false use Ash.Resource, + domain: AshSqlite.Test.Domain, data_layer: AshSqlite.DataLayer sqlite do @@ -10,14 +11,15 @@ defmodule AshSqlite.Test.Profile do attributes do uuid_primary_key(:id, writable?: true) - attribute(:description, :string) + attribute(:description, :string, public?: true) end actions do + default_accept(:*) defaults([:create, :read, :update, :destroy]) end relationships do - belongs_to(:author, AshSqlite.Test.Author) + belongs_to(:author, AshSqlite.Test.Author, public?: true) end end diff --git a/test/support/resources/rating.ex b/test/support/resources/rating.ex index fa6f8e4..90f5760 100644 --- a/test/support/resources/rating.ex +++ b/test/support/resources/rating.ex @@ -1,6 +1,7 @@ defmodule AshSqlite.Test.Rating do @moduledoc false use Ash.Resource, + domain: AshSqlite.Test.Domain, data_layer: AshSqlite.DataLayer sqlite do @@ -9,12 +10,13 @@ defmodule AshSqlite.Test.Rating do end actions do + default_accept(:*) defaults([:create, :read, :update, :destroy]) end attributes do uuid_primary_key(:id) - attribute(:score, :integer) - attribute(:resource_id, :uuid) + attribute(:score, :integer, public?: true) + attribute(:resource_id, :uuid, public?: true) end end diff --git a/test/support/resources/user.ex b/test/support/resources/user.ex index 26b98cb..7baab1c 100644 --- a/test/support/resources/user.ex +++ b/test/support/resources/user.ex @@ -1,14 +1,15 @@ defmodule AshSqlite.Test.User do @moduledoc false - use Ash.Resource, data_layer: AshSqlite.DataLayer + use Ash.Resource, domain: AshSqlite.Test.Domain, data_layer: AshSqlite.DataLayer actions do + default_accept(:*) defaults([:create, :read, :update, :destroy]) end attributes do uuid_primary_key(:id) - attribute(:is_active, :boolean) + attribute(:is_active, :boolean, public?: true) end sqlite do @@ -17,7 +18,7 @@ defmodule AshSqlite.Test.User do end relationships do - belongs_to(:organization, AshSqlite.Test.Organization) - has_many(:accounts, AshSqlite.Test.Account) + belongs_to(:organization, AshSqlite.Test.Organization, public?: true) + has_many(:accounts, AshSqlite.Test.Account, public?: true) end end diff --git a/test/support/types/money.ex b/test/support/types/money.ex index b486eeb..d576d6b 100644 --- a/test/support/types/money.ex +++ b/test/support/types/money.ex @@ -5,11 +5,13 @@ defmodule AshSqlite.Test.Money do attributes do attribute :amount, :integer do + public?(true) allow_nil?(false) constraints(min: 0) end attribute :currency, :atom do + public?(true) constraints(one_of: [:eur, :usd]) end end diff --git a/test/type_test.exs b/test/type_test.exs index 5b41e4d..815eb4a 100644 --- a/test/type_test.exs +++ b/test/type_test.exs @@ -1,6 +1,6 @@ defmodule AshSqlite.Test.TypeTest do use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Post} + alias AshSqlite.Test.Post require Ash.Query @@ -9,6 +9,6 @@ defmodule AshSqlite.Test.TypeTest do Post |> Ash.Query.filter(fragment("? = ?", id, type(^uuid, :uuid))) - |> Api.read!() + |> Ash.read!() end end diff --git a/test/unique_identity_test.exs b/test/unique_identity_test.exs index 23ffc6b..6ef6d54 100644 --- a/test/unique_identity_test.exs +++ b/test/unique_identity_test.exs @@ -1,34 +1,43 @@ defmodule AshSqlite.Test.UniqueIdentityTest do use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Post} + alias AshSqlite.Test.Post require Ash.Query test "unique constraint errors are properly caught" do post = Post - |> Ash.Changeset.new(%{title: "title"}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{title: "title"}) + |> Ash.create!() assert_raise Ash.Error.Invalid, ~r/Invalid value provided for id: has already been taken/, fn -> Post - |> Ash.Changeset.new(%{id: post.id}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{id: post.id}) + |> Ash.create!() end end test "a unique constraint can be used to upsert when the resource has a base filter" do post = Post - |> Ash.Changeset.new(%{title: "title", uniq_one: "fred", uniq_two: "astair", price: 10}) - |> Api.create!() + |> Ash.Changeset.for_create(:create, %{ + title: "title", + uniq_one: "fred", + uniq_two: "astair", + price: 10 + }) + |> Ash.create!() new_post = Post - |> Ash.Changeset.new(%{title: "title2", uniq_one: "fred", uniq_two: "astair"}) - |> Api.create!(upsert?: true, upsert_identity: :uniq_one_and_two) + |> Ash.Changeset.for_create(:create, %{ + title: "title2", + uniq_one: "fred", + uniq_two: "astair" + }) + |> Ash.create!(upsert?: true, upsert_identity: :uniq_one_and_two) assert new_post.id == post.id assert new_post.price == 10 diff --git a/test/upsert_test.exs b/test/upsert_test.exs index daf94f3..cde27e8 100644 --- a/test/upsert_test.exs +++ b/test/upsert_test.exs @@ -1,6 +1,6 @@ defmodule AshSqlite.Test.UpsertTest do use AshSqlite.RepoCase, async: false - alias AshSqlite.Test.{Api, Post} + alias AshSqlite.Test.Post require Ash.Query @@ -13,7 +13,7 @@ defmodule AshSqlite.Test.UpsertTest do id: id, title: "title2" }) - |> Api.create!(upsert?: true) + |> Ash.create!(upsert?: true) assert new_post.id == id assert new_post.created_at == new_post.updated_at @@ -24,7 +24,7 @@ defmodule AshSqlite.Test.UpsertTest do id: id, title: "title2" }) - |> Api.create!(upsert?: true) + |> Ash.create!(upsert?: true) assert updated_post.id == id assert updated_post.created_at == new_post.created_at @@ -40,7 +40,7 @@ defmodule AshSqlite.Test.UpsertTest do id: id, title: "title2" }) - |> Api.create!(upsert?: true) + |> Ash.create!(upsert?: true) assert new_post.id == id assert new_post.created_at == new_post.updated_at @@ -52,7 +52,7 @@ defmodule AshSqlite.Test.UpsertTest do title: "title2", decimal: Decimal.new(5) }) - |> Api.create!(upsert?: true) + |> Ash.create!(upsert?: true) assert updated_post.id == id assert Decimal.equal?(updated_post.decimal, Decimal.new(5))