init: copy and gouge ash_postgres

This commit is contained in:
Zach Daniel 2023-09-22 22:52:22 -04:00
commit 6fb41026f8
112 changed files with 16375 additions and 0 deletions

21
.check.exs Normal file
View file

@ -0,0 +1,21 @@
[
## all available options with default values (see `mix check` docs for description)
# parallel: true,
# skipped: true,
retry: false,
## list of tools (see `mix check` docs for defaults)
tools: [
## curated tools may be disabled (e.g. the check for compilation warnings)
# {:compiler, false},
## ...or adjusted (e.g. use one-line formatter for more compact credo output)
# {:credo, "mix credo --format oneline"},
{:check_formatter, command: "mix spark.formatter --check"},
{:check_migrations, command: "mix test.check_migrations"}
## custom new tools may be added (mix tasks or arbitrary commands)
# {:my_mix_task, command: "mix release", env: %{"MIX_ENV" => "prod"}},
# {:my_arbitrary_tool, command: "npm test", cd: "assets"},
# {:my_arbitrary_script, command: ["my_script", "argument with spaces"], cd: "scripts"}
]
]

184
.credo.exs Normal file
View file

@ -0,0 +1,184 @@
# This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any config using `mix credo -C <name>`. If no config name is given
# "default" is used.
#
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
#
included: [
"lib/",
"src/",
"test/",
"web/",
"apps/*/lib/",
"apps/*/src/",
"apps/*/test/",
"apps/*/web/"
],
excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"]
},
#
# Load and configure plugins here:
#
plugins: [],
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
#
requires: [],
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
#
strict: false,
#
# To modify the timeout for parsing files, change this value:
#
parse_timeout: 5000,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
#
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
#
## Consistency Checks
#
{Credo.Check.Consistency.ExceptionNames, []},
{Credo.Check.Consistency.LineEndings, []},
{Credo.Check.Consistency.ParameterPatternMatching, []},
{Credo.Check.Consistency.SpaceAroundOperators, false},
{Credo.Check.Consistency.SpaceInParentheses, []},
{Credo.Check.Consistency.TabsOrSpaces, []},
#
## Design Checks
#
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage, false},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, false},
{Credo.Check.Design.TagFIXME, []},
#
## Readability Checks
#
{Credo.Check.Readability.AliasOrder, []},
{Credo.Check.Readability.FunctionNames, []},
{Credo.Check.Readability.LargeNumbers, []},
{Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]},
{Credo.Check.Readability.ModuleAttributeNames, []},
{Credo.Check.Readability.ModuleDoc, []},
{Credo.Check.Readability.ModuleNames, []},
{Credo.Check.Readability.ParenthesesInCondition, false},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
{Credo.Check.Readability.PredicateFunctionNames, []},
{Credo.Check.Readability.PreferImplicitTry, []},
{Credo.Check.Readability.RedundantBlankLines, []},
{Credo.Check.Readability.Semicolons, []},
{Credo.Check.Readability.SpaceAfterCommas, []},
{Credo.Check.Readability.StringSigils, []},
{Credo.Check.Readability.TrailingBlankLine, []},
{Credo.Check.Readability.TrailingWhiteSpace, []},
{Credo.Check.Readability.UnnecessaryAliasExpansion, []},
{Credo.Check.Readability.VariableNames, []},
#
## Refactoring Opportunities
#
{Credo.Check.Refactor.CondStatements, []},
{Credo.Check.Refactor.CyclomaticComplexity, false},
{Credo.Check.Refactor.FunctionArity, []},
{Credo.Check.Refactor.LongQuoteBlocks, []},
{Credo.Check.Refactor.MapInto, []},
{Credo.Check.Refactor.MatchInCondition, []},
{Credo.Check.Refactor.NegatedConditionsInUnless, []},
{Credo.Check.Refactor.NegatedConditionsWithElse, []},
{Credo.Check.Refactor.Nesting, [max_nesting: 5]},
{Credo.Check.Refactor.UnlessWithElse, []},
{Credo.Check.Refactor.WithClauses, []},
#
## Warnings
#
{Credo.Check.Warning.BoolOperationOnSameValues, []},
{Credo.Check.Warning.ExpensiveEmptyEnumCheck, []},
{Credo.Check.Warning.IExPry, []},
{Credo.Check.Warning.IoInspect, []},
{Credo.Check.Warning.LazyLogging, []},
{Credo.Check.Warning.MixEnv, false},
{Credo.Check.Warning.OperationOnSameValues, []},
{Credo.Check.Warning.OperationWithConstantResult, []},
{Credo.Check.Warning.RaiseInsideRescue, []},
{Credo.Check.Warning.UnusedEnumOperation, []},
{Credo.Check.Warning.UnusedFileOperation, []},
{Credo.Check.Warning.UnusedKeywordOperation, []},
{Credo.Check.Warning.UnusedListOperation, []},
{Credo.Check.Warning.UnusedPathOperation, []},
{Credo.Check.Warning.UnusedRegexOperation, []},
{Credo.Check.Warning.UnusedStringOperation, []},
{Credo.Check.Warning.UnusedTupleOperation, []},
{Credo.Check.Warning.UnsafeExec, []},
#
# Checks scheduled for next check update (opt-in for now, just replace `false` with `[]`)
#
# Controversial and experimental checks (opt-in, just replace `false` with `[]`)
#
{Credo.Check.Readability.StrictModuleLayout, false},
{Credo.Check.Consistency.MultiAliasImportRequireUse, false},
{Credo.Check.Consistency.UnusedVariableNames, false},
{Credo.Check.Design.DuplicatedCode, false},
{Credo.Check.Readability.AliasAs, false},
{Credo.Check.Readability.MultiAlias, false},
{Credo.Check.Readability.Specs, false},
{Credo.Check.Readability.SinglePipe, false},
{Credo.Check.Readability.WithCustomTaggedTuple, false},
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.DoubleBooleanNegation, false},
{Credo.Check.Refactor.ModuleDependencies, false},
{Credo.Check.Refactor.NegatedIsNil, false},
{Credo.Check.Refactor.PipeChainStart, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.LeakyEnvironment, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
{Credo.Check.Warning.UnsafeToAtom, false}
#
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}

54
.formatter.exs Normal file
View file

@ -0,0 +1,54 @@
spark_locals_without_parens = [
base_filter_sql: 1,
check: 1,
check_constraint: 2,
check_constraint: 3,
code?: 1,
concurrently: 1,
create?: 1,
deferrable: 1,
down: 1,
exclusion_constraint_names: 1,
foreign_key_names: 1,
identity_index_names: 1,
ignore?: 1,
include: 1,
index: 1,
index: 2,
message: 1,
migrate?: 1,
migration_defaults: 1,
migration_ignore_attributes: 1,
migration_types: 1,
name: 1,
on_delete: 1,
on_update: 1,
polymorphic?: 1,
polymorphic_name: 1,
polymorphic_on_delete: 1,
polymorphic_on_update: 1,
prefix: 1,
reference: 1,
reference: 2,
repo: 1,
schema: 1,
skip_unique_indexes: 1,
statement: 1,
statement: 2,
table: 1,
template: 1,
unique: 1,
unique_index_names: 1,
up: 1,
update?: 1,
using: 1,
where: 1
]
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],
locals_without_parens: spark_locals_without_parens,
export: [
locals_without_parens: spark_locals_without_parens
]
]

76
.github/CODE_OF_CONDUCT.md vendored Normal file
View file

@ -0,0 +1,76 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at zach@zachdaniel.dev. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
https://www.contributor-covenant.org/faq

2
.github/CONTRIBUTING.md vendored Normal file
View file

@ -0,0 +1,2 @@
# Contributing Guidelines
Contributing guidelines can be found in the core project, [ash](https://github.com/ash-project/ash/blob/main/.github/CONTRIBUTING.md)

27
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View file

@ -0,0 +1,27 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug, needs review
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is. If you are not sure if the bug is related to `ash` or an extension, log it with [ash](https://github.com/ash-project/ash/issues/new) and we will move it.
**To Reproduce**
A minimal set of resource definitions and calls that can reproduce the bug.
**Expected behavior**
A clear and concise description of what you expected to happen.
** Runtime
- Elixir version
- Erlang version
- OS
- Ash version
- any related extension versions
**Additional context**
Add any other context about the problem here.

View file

@ -0,0 +1,36 @@
---
name: Proposal
about: Suggest an idea for this project
title: ''
labels: enhancement, needs review
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Express the feature either with a change to resource syntax, or with a change to the resource interface**
For example
```elixir
attributes do
attribute :foo, :integer, bar: 10 # <- Adding `bar` here would cause <x>
end
```
Or
```elixir
Api.read(:resource, bar: 10) # <- Adding `bar` here would cause <x>
```
**Additional context**
Add any other context or screenshots about the feature request here.

4
.github/PULL_REQUEST_TEMPLATE.md vendored Normal file
View file

@ -0,0 +1,4 @@
### Contributor checklist
- [ ] Bug fixes include regression tests
- [ ] Features include unit/acceptance tests

15
.github/workflows/elixir.yml vendored Normal file
View file

@ -0,0 +1,15 @@
name: CI
on:
push:
tags:
- "v*"
branches: [main]
pull_request:
branches: [main]
jobs:
ash-ci:
uses: ash-project/ash/.github/workflows/ash-ci.yml@main
with:
sqlite: true
secrets:
hex_api_key: ${{ secrets.HEX_API_KEY }}

27
.gitignore vendored Normal file
View file

@ -0,0 +1,27 @@
# The directory Mix will write compiled artifacts to.
/_build/
# If you run "mix test --cover", coverage assets end up here.
/cover/
# The directory Mix downloads your dependencies sources to.
/deps/
# Where third-party dependencies like ExDoc output generated docs.
/doc/
# Ignore .fetch files in case you like to edit your project deps locally.
/.fetch
# If the VM crashes, it generates a dump, let's ignore it too.
erl_crash.dump
# Also ignore archive artifacts (built via "mix archive.build").
*.ez
# Ignore package tarball (built via "mix hex.build").
ash_sqlite-*.tar
test_migration_path
test_snapshots_path

2
.tool-versions Normal file
View file

@ -0,0 +1,2 @@
erlang 26.0.2
elixir 1.15.4

7
.vscode/settings.json vendored Normal file
View file

@ -0,0 +1,7 @@
{
"cSpell.words": [
"citext",
"mapset",
"strpos"
]
}

21
LICENSE Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 Zachary Scott Daniel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

52
README.md Normal file
View file

@ -0,0 +1,52 @@
# AshSqlite
![Elixir CI](https://github.com/ash-project/ash_sqlite/workflows/Elixir%20CI/badge.svg)
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
[![Coverage Status](https://coveralls.io/repos/github/ash-project/ash_sqlite/badge.svg?branch=main)](https://coveralls.io/github/ash-project/ash_sqlite?branch=main)
[![Hex version badge](https://img.shields.io/hexpm/v/ash_sqlite.svg)](https://hex.pm/packages/ash_sqlite)
## DSL
See the DSL documentation in `AshSqlite.DataLayer` for DSL documentation
## Usage
Add `ash_qlite` to your `mix.exs` file.
```elixir
{:ash_sqlite, "~> 0.1.0"}
```
To use this data layer, you need to chage your Ecto Repo's from `use Ecto.Repo`,
to `use Sqlite.Repo`. because AshSqlite adds functionality to Ecto Repos.
Then, configure each of your `Ash.Resource` resources by adding `use Ash.Resource, data_layer: AshSqlite.DataLayer` like so:
```elixir
defmodule MyApp.SomeResource do
use Ash.Resource, data_layer: AshSqlite.DataLayer
sqlite do
repo MyApp.Repo
table "table_name"
end
attributes do
# ... Attribute definitions
end
end
```
## Generating Migrations
See the documentation for `Mix.Tasks.AshSqlite.GenerateMigrations` for how to generate migrations from your resources
# Contributors
Ash is made possible by its excellent community!
<a href="https://github.com/ash-project/ash_sqlite/graphs/contributors">
<img src="https://contrib.rocks/image?repo=ash-project/ash_sqlite" />
</a>
[Become a contributor](https://ash-hq.org/docs/guides/ash/latest/how_to/contribute.md)

52
config/config.exs Normal file
View file

@ -0,0 +1,52 @@
import Config
config :ash, :use_all_identities_in_manage_relationship?, false
if Mix.env() == :dev do
config :git_ops,
mix_project: AshSqlite.MixProject,
changelog_file: "CHANGELOG.md",
repository_url: "https://github.com/ash-project/ash_sqlite",
# Instructs the tool to manage your mix version in your `mix.exs` file
# See below for more information
manage_mix_version?: true,
# Instructs the tool to manage the version in your README.md
# Pass in `true` to use `"README.md"` or a string to customize
manage_readme_version: ["README.md", "documentation/tutorials/get-started-with-sqlite.md"],
version_tag_prefix: "v"
end
if Mix.env() == :test do
config :ash, :validate_api_resource_inclusion?, false
config :ash, :validate_api_config_inclusion?, false
config :ash_sqlite, AshSqlite.TestRepo,
username: "sqlite",
database: "ash_sqlite",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
# sobelow_skip ["Config.Secrets"]
config :ash_sqlite, AshSqlite.TestRepo, password: "sqlite"
config :ash_sqlite, AshSqlite.TestRepo, migration_primary_key: [name: :id, type: :binary_id]
config :ash_sqlite, AshSqlite.TestNoSandboxRepo,
username: "sqlite",
database: "ash_sqlite_test",
hostname: "localhost"
# sobelow_skip ["Config.Secrets"]
config :ash_sqlite, AshSqlite.TestNoSandboxRepo, password: "sqlite"
config :ash_sqlite, AshSqlite.TestNoSandboxRepo,
migration_primary_key: [name: :id, type: :binary_id]
config :ash_sqlite,
ecto_repos: [AshSqlite.TestRepo, AshSqlite.TestNoSandboxRepo],
ash_apis: [
AshSqlite.Test.Api
]
config :logger, level: :warning
end

View file

@ -0,0 +1,87 @@
# Join Manual Relationships
See [Defining Manual Relationships](https://hexdocs.pm/ash/defining-manual-relationships.html) for an idea of manual relationships in general.
Manual relationships allow for expressing complex/non-typical relationships between resources in a standard way.
Individual data layers may interact with manual relationships in their own way, so see their corresponding guides.
## Example
```elixir
# in the resource
relationships do
has_many :tickets_above_threshold, Helpdesk.Support.Ticket do
manual Helpdesk.Support.Ticket.Relationships.TicketsAboveThreshold
end
end
# implementation
defmodule Helpdesk.Support.Ticket.Relationships.TicketsAboveThreshold do
use Ash.Resource.ManualRelationship
use AshSqlite.ManualRelationship
require Ash.Query
require Ecto.Query
def load(records, _opts, %{query: query, actor: actor, authorize?: authorize?}) do
# Use existing records to limit resultds
rep_ids = Enum.map(records, & &1.id)
# Using Ash to get the destination records is ideal, so you can authorize access like normal
# but if you need to use a raw ecto query here, you can. As long as you return the right structure.
{:ok,
query
|> Ash.Query.filter(representative_id in ^rep_ids)
|> Ash.Query.filter(priority > representative.priority_threshold)
|> Helpdesk.Support.read!(actor: actor, authorize?: authorize?)
# Return the items grouped by the primary key of the source, i.e representative.id => [...tickets above threshold]
|> Enum.group_by(& &1.representative_id)}
end
# query is the "source" query that is being built.
# _opts are options provided to the manual relationship, i.e `{Manual, opt: :val}`
# current_binding is what the source of the relationship is bound to. Access fields with `as(^current_binding).field`
# as_binding is the binding that your join should create. When you join, make sure you say `as: ^as_binding` on the
# part of the query that represents the destination of the relationship
# type is `:inner` or `:left`.
# destination_query is what you should join to to add the destination to the query, i.e `join: dest in ^destination-query`
def ash_sqlite_join(query, _opts, current_binding, as_binding, :inner, destination_query) do
{:ok,
Ecto.Query.from(_ in query,
join: dest in ^destination_query,
as: ^as_binding,
on: dest.representative_id == as(^current_binding).id,
on: dest.priority > as(^current_binding).priority_threshold
)}
end
def ash_sqlite_join(query, _opts, current_binding, as_binding, :left, destination_query) do
{:ok,
Ecto.Query.from(_ in query,
left_join: dest in ^destination_query,
as: ^as_binding,
on: dest.representative_id == as(^current_binding).id,
on: dest.priority > as(^current_binding).priority_threshold
)}
end
# _opts are options provided to the manual relationship, i.e `{Manual, opt: :val}`
# current_binding is what the source of the relationship is bound to. Access fields with `parent_as(^current_binding).field`
# as_binding is the binding that has already been created for your join. Access fields on it via `as(^as_binding)`
# destination_query is what you should use as the basis of your query
def ash_sqlite_subquery(_opts, current_binding, as_binding, destination_query) do
{:ok,
Ecto.Query.from(_ in destination_query,
where: parent_as(^current_binding).id == as(^as_binding).representative_id,
where: as(^as_binding).priority > parent_as(^current_binding).priority_threshold
)}
end
end
```

View file

@ -0,0 +1,11 @@
# Testing With Sqlite
Testing resources with SQLite generally requires passing `async?: false` to
your tests, due to `SQLite`'s limitation of having a single write transaction
open at any one time.
This should be coupled with to make sure that Ash does not spawn any tasks.
```elixir
config :ash, :disable_async?, true
```

View file

@ -0,0 +1,25 @@
# Using Fragments
Fragments allow you to use arbitrary sqlite expressions in your queries. Fragments can often be an escape hatch to allow you to do things that don't have something officially supported with Ash.
## Examples
Use simple expressions
```elixir
fragment("? / ?", points, count)
```
Call functions
```elixir
fragment("repeat('hello', 4)")
```
Use entire queries
```elixir
fragment("points > (SELECT SUM(points) FROM games WHERE user_id = ? AND id != ?)", user_id, id)
```
Using entire queries like the above is a last resort, but can often help us avoid having to add extra structure unnecessarily.

View file

@ -0,0 +1,106 @@
# Migrations
## Migration Generator Primer
<iframe width="560" height="315" src="https://www.youtube.com/embed/GtsL_lIis4Q?si=5G6-5ckzBEzL4zko" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share" allowfullscreen></iframe>
## Tasks
The available tasks are:
- `mix ash_sqlite.generate_migrations`
- `mix ash_sqlite.create`
- `mix ash_sqlite.drop`
AshSqlite is built on top of ecto, so much of its behavior is pass-through/orchestration of that tooling.
## Basic Workflow
- Make resource changes
- Run `mix ash_sqlite.generate_migrations` to generate migrations and resource snapshots
- Run `mix ash_sqlite.migrate` to run those migrations
For more information on generating migrations, see the module documentation here:
`Mix.Tasks.AshSqlite.GenerateMigrations`, or run `mix help ash_sqlite.generate_migrations`
For running your migrations, there is a mix task that will find all of the repos configured in your apis and run their
migrations. It is a thin wrapper around `mix ecto.migrate`. Ours is called `mix ash_sqlite.migrate`
If you want to run or rollback individual migrations, use the corresponding
### Regenerating Migrations
Often, you will run into a situation where you want to make a slight change to a resource after you've already generated and run migrations. If you are using git and would like to undo those changes, then regenerate the migrations, this script may prove useful:
```bash
#!/bin/bash
# Get count of untracked migrations
N_MIGRATIONS=$(git ls-files --others priv/repo/migrations | wc -l)
# Rollback untracked migrations
mix ecto.rollback -n $N_MIGRATIONS
# Delete untracked migrations and snapshots
git ls-files --others priv/repo/migrations | xargs rm
git ls-files --others priv/resource_snapshots | xargs rm
# Regenerate migrations
mix ash_sqlite.generate_migrations
# Run migrations if flag
if echo $* | grep -e "-m" -q
then
mix ecto.migrate
fi
```
After saving this file to something like `regen.sh`, make it executable with `chmod +x regen.sh`. Now you can run it with `./regen.sh`. If you would like the migrations to automatically run after regeneration, add the `-m` flag: `./regen.sh -m`.
## Multiple Repos
If you are using multiple repos, you will likely need to use `mix ecto.migrate` and manage it separately for each repo, as the options would
be applied to both repo, which wouldn't make sense.
## Running Migrations in Production
Define a module similar to the following:
```elixir
defmodule MyApp.Release do
@moduledoc """
Houses tasks that need to be executed in the released application (because mix is not present in releases).
"""
@app :my_ap
def migrate do
load_app()
for repo <- repos() do
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :up, all: true))
end
end
def rollback(repo, version) do
load_app()
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :down, to: version))
end
defp repos do
apis()
|> Enum.flat_map(fn api ->
api
|> Ash.Api.Info.resources()
|> Enum.map(&AshSqlite.repo/1)
end)
|> Enum.uniq()
end
defp apis do
Application.fetch_env!(:my_app, :ash_apis)
end
defp load_app do
Application.load(@app)
end
end
```

View file

@ -0,0 +1,82 @@
# Polymorphic Resources
To support leveraging the same resource backed by multiple tables (useful for things like polymorphic associations), AshSqlite supports setting the `data_layer.table` context for a given resource. For this example, lets assume that you have a `MyApp.Post` resource and a `MyApp.Comment` resource. For each of those resources, users can submit `reactions`. However, you want a separate table for `post_reactions` and `comment_reactions`. You could accomplish that like so:
```elixir
defmodule MyApp.Reaction do
use Ash.Resource,
data_layer: AshSqlite.DataLayer
sqlite do
polymorphic? true # Without this, `table` is a required configuration
end
attributes do
attribute(:resource_id, :uuid)
end
...
end
```
Then, in your related resources, you set the table context like so:
```elixir
defmodule MyApp.Post do
use Ash.Resource,
data_layer: AshSqlite.DataLayer
...
relationships do
has_many :reactions, MyApp.Reaction,
relationship_context: %{data_layer: %{table: "post_reactions"}},
destination_attribute: :resource_id
end
end
defmodule MyApp.Comment do
use Ash.Resource,
data_layer: AshSqlite.DataLayer
...
relationships do
has_many :reactions, MyApp.Reaction,
relationship_context: %{data_layer: %{table: "comment_reactions"}},
destination_attribute: :resource_id
end
end
```
With this, when loading or editing related data, ash will automatically set that context.
For managing related data, see `Ash.Changeset.manage_relationship/4` and other relationship functions
in `Ash.Changeset`
## Table specific actions
To make actions use a specific table, you can use the `set_context` query preparation/change.
For example:
```elixir
defmodule MyApp.Reaction do
actions do
read :for_comments do
prepare set_context(%{data_layer: %{table: "comment_reactions"}})
end
read :for_posts do
prepare set_context(%{data_layer: %{table: "post_reactions"}})
end
end
end
```
## Migrations
When a migration is marked as `polymorphic? true`, the migration generator will look at
all resources that are related to it, that set the `%{data_layer: %{table: "table"}}` context.
For each of those, a migration is generated/managed automatically. This means that adding reactions
to a new resource is as easy as adding the relationship and table context, and then running
`mix ash_sqlite.generate_migrations`.

View file

@ -0,0 +1,23 @@
# References
To configure the foreign keys on a resource, we use the `references` block.
For example:
```elixir
references do
reference :post, on_delete: :delete, on_update: :update, name: "comments_to_posts_fkey"
end
```
## Important
No resource logic is applied with these operations! No authorization rules or validations take place, and no notifications are issued. This operation happens *directly* in the database. That
## Nothing vs Restrict
The difference between `:nothing` and `:restrict` is subtle and, if you are unsure, choose `:nothing` (the default behavior). `:restrict` will prevent the deletion from happening *before* the end of the database transaction, whereas `:nothing` allows the transaction to complete before doing so. This allows for things like updating or deleting the destination row and *then* updating updating or deleting the reference(as long as you are in a transaction).
## On Delete
This option is called `on_delete`, instead of `on_destroy`, because it is hooking into the database level deletion, *not* a `destroy` action in your resource.

View file

@ -0,0 +1,24 @@
# Sqlite Expressions
In addition to the expressions listed in the [Ash expressions guide](https://hexdocs.pm/ash/expressions.html), AshSqlite provides the following expressions
## Fragments
`fragment` allows you to embed raw sql into the query. Use question marks to interpolate values from the outer expression.
For example:
```elixir
Ash.Query.filter(User, fragment("? IS NOT NULL", first_name))
```
# Like
This wraps the builtin sqlite `LIKE` operator.
Please be aware, these match *patterns* not raw text. Use `contains/1` if you want to match text without supporting patterns, i.e `%` and `_` have semantic meaning!
For example:
```elixir
Ash.Query.filter(User, like(name, "%obo%")) # name contains obo anywhere in the string, case sensitively
```

View file

@ -0,0 +1,286 @@
# Get Started With Sqlite
## Goals
In this guide we will:
1. Setup AshSqlite, which includes setting up [Ecto](https://hexdocs.pm/ecto/Ecto.html)
2. Add AshSqlite to the resources created in [the Ash getting started guide](https://hexdocs.pm/ash/get-started.html)
3. Show how the various features of AshSqlite can help you work quickly and cleanly against a sqlite database
4. Highlight some of the more advanced features you can use when using AshSqlite.
5. Point you to additional resources you may need on your journey
## Requirements
- A working SQLite installation, with a sufficiently permissive user
- If you would like to follow along, you will need to add begin with [the Ash getting started guide](https://hexdocs.pm/ash/get-started.html)
## Steps
### Add AshSqlite
Add the `:ash_sqlite` dependency to your application
`{:ash_sqlite, "~> 1.3.6"}`
Add `:ash_sqlite` to your `.formatter.exs` file
```elixir
[
# import the formatter rules from `:ash_sqlite`
import_deps: [..., :ash_sqlite],
inputs: [...]
]
```
### Create and configure your Repo
Create `lib/helpdesk/repo.ex` with the following contents. `AshSqlite.Repo` is a thin wrapper around `Ecto.Repo`, so see their documentation for how to use it if you need to use it directly. For standard Ash usage, all you will need to do is configure your resources to use your repo.
```elixir
# in lib/helpdesk/repo.ex
defmodule Helpdesk.Repo do
use AshSqlite.Repo, otp_app: :helpdesk
end
```
Next we will need to create configuration files for various environments. Run the following to create the configuration files we need.
```bash
mkdir -p config
touch config/config.exs
touch config/dev.exs
touch config/runtime.exs
touch config/test.exs
```
Place the following contents in those files, ensuring that the credentials match the user you created for your database. For most conventional installations this will work out of the box. If you've followed other guides before this one, they may have had you create these files already, so just make sure these contents are there.
```elixir
# in config/config.exs
import Config
# This should already have been added in the first
# getting started guide
config :helpdesk,
ash_apis: [Helpdesk.Support]
config :helpdesk,
ecto_repos: [Helpdesk.Repo]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{config_env()}.exs"
```
```elixir
# in config/dev.exs
import Config
# Configure your database
config :helpdesk, Helpdesk.Repo,
username: "sqlite",
password: "sqlite",
hostname: "localhost",
database: "helpdesk_dev",
port: 5432,
show_sensitive_data_on_connection_error: true,
pool_size: 10
```
```elixir
# in config/runtime.exs
import Config
if config_env() == :prod do
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :helpdesk, Helpdesk.Repo,
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
end
```
```elixir
# in config/test.exs
import Config
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :helpdesk, Helpdesk.Repo,
username: "sqlite",
password: "sqlite",
hostname: "localhost",
database: "helpdesk_test#{System.get_env("MIX_TEST_PARTITION")}",
pool: Ecto.Adapters.SQL.Sandbox,
pool_size: 10
```
And finally, add the repo to your application
```elixir
# in lib/helpdesk/application.ex
def start(_type, _args) do
children = [
# Starts a worker by calling: Helpdesk.Worker.start_link(arg)
# {Helpdesk.Worker, arg}
Helpdesk.Repo
]
...
```
### Add AshSqlite to our resources
Now we can add the data layer to our resources. The basic configuration for a resource requires the `d:AshSqlite.sqlite|table` and the `d:AshSqlite.sqlite|repo`.
```elixir
# in lib/helpdesk/support/resources/ticket.ex
use Ash.Resource,
data_layer: AshSqlite.DataLayer
sqlite do
table "tickets"
repo Helpdesk.Repo
end
```
```elixir
# in lib/helpdesk/support/resources/representative.ex
use Ash.Resource,
data_layer: AshSqlite.DataLayer
sqlite do
table "representatives"
repo Helpdesk.Repo
end
```
### Create the database and tables
First, we'll create the database with `mix ash_sqlite.create`.
Then we will generate database migrations. This is one of the many ways that AshSqlite can save time and reduce complexity.
```bash
mix ash_sqlite.generate_migrations --name add_tickets_and_representatives
```
If you are unfamiliar with database migrations, it is a good idea to get a rough idea of what they are and how they work. See the links at the bottom of this guide for more. A rough overview of how migrations work is that each time you need to make changes to your database, they are saved as small, reproducible scripts that can be applied in order. This is necessary both for clean deploys as well as working with multiple developers making changes to the structure of a single database.
Typically, you need to write these by hand. AshSqlite, however, will store snapshots each time you run the command to generate migrations and will figure out what migrations need to be created.
You should always look at the generated migrations to ensure that they look correct. Do so now by looking at the generated file in `priv/repo/migrations`.
Finally, we will create the local database and apply the generated migrations:
```bash
mix ash_sqlite.create
mix ash_sqlite.migrate
```
### Try it out
And now we're ready to try it out! Run the following in iex:
Lets create some data. We'll make a representative and give them some open and some closed tickets.
```elixir
require Ash.Query
representative = (
Helpdesk.Support.Representative
|> Ash.Changeset.for_create(:create, %{name: "Joe Armstrong"})
|> Helpdesk.Support.create!()
)
for i <- 0..5 do
ticket =
Helpdesk.Support.Ticket
|> Ash.Changeset.for_create(:open, %{subject: "Issue #{i}"})
|> Helpdesk.Support.create!()
|> Ash.Changeset.for_update(:assign, %{representative_id: representative.id})
|> Helpdesk.Support.update!()
if rem(i, 2) == 0 do
ticket
|> Ash.Changeset.for_update(:close)
|> Helpdesk.Support.update!()
end
end
```
And now we can read that data. You should see some debug logs that show the sql queries AshSqlite is generating.
```elixir
require Ash.Query
# Show the tickets where the subject contains "2"
Helpdesk.Support.Ticket
|> Ash.Query.filter(contains(subject, "2"))
|> Helpdesk.Support.read!()
```
```elixir
require Ash.Query
# Show the tickets that are closed and their subject does not contain "4"
Helpdesk.Support.Ticket
|> Ash.Query.filter(status == :closed and not(contains(subject, "4")))
|> Helpdesk.Support.read!()
```
And, naturally, now that we are storing this in sqlite, this database is persisted even if we stop/start our application. The nice thing, however, is that this was the _exact_ same code that we ran against our resources when they were backed by ETS.
### Calculations
Calculations can be pushed down into SQL using expressions.
For example, we can determine the percentage of tickets that are open:
```elixir
# in lib/helpdesk/support/resources/representative.ex
calculations do
calculate :percent_open, :float, expr(open_tickets / total_tickets )
end
```
Calculations can be loaded.
```elixir
require Ash.Query
Helpdesk.Support.Representative
|> Ash.Query.filter(percent_open > 0.25)
|> Ash.Query.sort(:percent_open)
|> Ash.Query.load(:percent_open)
|> Helpdesk.Support.read!()
```
### Rich Configuration Options
Take a look at the DSL documentation for more information on what you can configure. You can add check constraints, configure the behavior of foreign keys and more!
### What next?
- Check out the data layer docs: `AshSqlite.DataLayer`
- [Ecto's documentation](https://hexdocs.pm/ecto/Ecto.html). AshSqlite (and much of Ash itself) is made possible by the amazing Ecto. If you find yourself looking for escape hatches when using Ash or ways to work directly with your database, you will want to know how Ecto works. Ash and AshSqlite intentionally do not hide Ecto, and in fact encourages its use whenever you need an escape hatch.
- [Ecto's Migration documentation](https://hexdocs.pm/ecto_sql/Ecto.Migration.html) read more about migrations. Even with the ash_sqlite migration generator, you will very likely need to modify your own migrations some day.

7
lib/ash_sqlite.ex Normal file
View file

@ -0,0 +1,7 @@
defmodule AshSqlite do
@moduledoc """
The AshSqlite extension gives you tools to map a resource to a sqlite database table.
For more, check out the [getting started guide](/documentation/tutorials/get-started-with-sqlite.md)
"""
end

73
lib/calculation.ex Normal file
View file

@ -0,0 +1,73 @@
defmodule AshSqlite.Calculation do
@moduledoc false
require Ecto.Query
def add_calculations(query, [], _, _), do: {:ok, query}
def add_calculations(query, calculations, resource, source_binding) do
query = AshSqlite.DataLayer.default_bindings(query, resource)
query =
if query.select do
query
else
Ecto.Query.select_merge(query, %{})
end
dynamics =
Enum.map(calculations, fn {calculation, expression} ->
type =
AshSqlite.Types.parameterized_type(
calculation.type,
Map.get(calculation, :constraints, [])
)
expr =
AshSqlite.Expr.dynamic_expr(
query,
expression,
query.__ash_bindings__,
false,
type
)
expr =
if type do
Ecto.Query.dynamic(type(^expr, ^type))
else
expr
end
{calculation.load, calculation.name, expr}
end)
{:ok, add_calculation_selects(query, dynamics)}
end
defp add_calculation_selects(query, dynamics) do
{in_calculations, in_body} =
Enum.split_with(dynamics, fn {load, _name, _dynamic} -> is_nil(load) end)
calcs =
in_body
|> Map.new(fn {load, _, dynamic} ->
{load, dynamic}
end)
calcs =
if Enum.empty?(in_calculations) do
calcs
else
Map.put(
calcs,
:calculations,
Map.new(in_calculations, fn {_, name, dynamic} ->
{name, dynamic}
end)
)
end
Ecto.Query.select_merge(query, ^calcs)
end
end

30
lib/check_constraint.ex Normal file
View file

@ -0,0 +1,30 @@
defmodule AshSqlite.CheckConstraint do
@moduledoc "Represents a configured check constraint on the table backing a resource"
defstruct [:attribute, :name, :message, :check]
def schema do
[
attribute: [
type: :any,
doc:
"The attribute or list of attributes to which an error will be added if the check constraint fails",
required: true
],
name: [
type: :string,
required: true,
doc: "The name of the constraint"
],
message: [
type: :string,
doc: "The message to be added if the check constraint fails"
],
check: [
type: :string,
doc:
"The contents of the check. If this is set, the migration generator will include it when generating migrations"
]
]
end
end

20
lib/custom_extension.ex Normal file
View file

@ -0,0 +1,20 @@
defmodule AshSqlite.CustomExtension do
@moduledoc """
A custom extension implementation.
"""
@callback install(version :: integer) :: String.t()
@callback uninstall(version :: integer) :: String.t()
defmacro __using__(name: name, latest_version: latest_version) do
quote do
@behaviour AshSqlite.CustomExtension
@extension_name unquote(name)
@extension_latest_version unquote(latest_version)
def extension, do: {@extension_name, @extension_latest_version, &install/1, &uninstall/1}
end
end
end

120
lib/custom_index.ex Normal file
View file

@ -0,0 +1,120 @@
defmodule AshSqlite.CustomIndex do
@moduledoc "Represents a custom index on the table backing a resource"
@fields [
:table,
:fields,
:name,
:unique,
:concurrently,
:using,
:prefix,
:where,
:include,
:message
]
defstruct @fields
def fields, do: @fields
@schema [
fields: [
type: {:wrap_list, {:or, [:atom, :string]}},
doc: "The fields to include in the index."
],
name: [
type: :string,
doc: "the name of the index. Defaults to \"\#\{table\}_\#\{column\}_index\"."
],
unique: [
type: :boolean,
doc: "indicates whether the index should be unique.",
default: false
],
concurrently: [
type: :boolean,
doc: "indicates whether the index should be created/dropped concurrently.",
default: false
],
using: [
type: :string,
doc: "configures the index type."
],
prefix: [
type: :string,
doc: "specify an optional prefix for the index."
],
where: [
type: :string,
doc: "specify conditions for a partial index."
],
message: [
type: :string,
doc: "A custom message to use for unique indexes that have been violated"
],
include: [
type: {:list, :string},
doc:
"specify fields for a covering index. This is not supported by all databases. For more information on SQLite support, please read the official docs."
]
]
def schema, do: @schema
# sobelow_skip ["DOS.StringToAtom"]
def transform(%__MODULE__{fields: fields} = index) do
index = %{
index
| fields:
Enum.map(fields, fn field ->
if is_atom(field) do
field
else
String.to_atom(field)
end
end)
}
cond do
index.name ->
if Regex.match?(~r/^[0-9a-zA-Z_]+$/, index.name) do
{:ok, index}
else
{:error,
"Custom index name #{index.name} is not valid. Must have letters, numbers and underscores only"}
end
mismatched_field =
Enum.find(index.fields, fn field ->
!Regex.match?(~r/^[0-9a-zA-Z_]+$/, to_string(field))
end) ->
{:error,
"""
Custom index field #{mismatched_field} contains invalid index name characters.
A name must be set manually, i.e
`name: "your_desired_index_name"`
Index names must have letters, numbers and underscores only
"""}
true ->
{:ok, index}
end
end
def name(_resource, %{name: name}) when is_binary(name) do
name
end
# sobelow_skip ["DOS.StringToAtom"]
def name(table, %{fields: fields}) do
[table, fields, "index"]
|> List.flatten()
|> Enum.map(&to_string(&1))
|> Enum.map(&String.replace(&1, ~r"[^\w_]", "_"))
|> Enum.map_join("_", &String.replace_trailing(&1, "_", ""))
|> String.to_atom()
end
end

1829
lib/data_layer.ex Normal file

File diff suppressed because it is too large Load diff

128
lib/data_layer/info.ex Normal file
View file

@ -0,0 +1,128 @@
defmodule AshSqlite.DataLayer.Info do
@moduledoc "Introspection functions for "
alias Spark.Dsl.Extension
@doc "The configured repo for a resource"
def repo(resource) do
Extension.get_opt(resource, [:sqlite], :repo, nil, true)
end
@doc "The configured table for a resource"
def table(resource) do
Extension.get_opt(resource, [:sqlite], :table, nil, true)
end
@doc "The configured schema for a resource"
def schema(resource) do
Extension.get_opt(resource, [:sqlite], :schema, nil, true)
end
@doc "The configured references for a resource"
def references(resource) do
Extension.get_entities(resource, [:sqlite, :references])
end
@doc "The configured reference for a given relationship of a resource"
def reference(resource, relationship) do
resource
|> Extension.get_entities([:sqlite, :references])
|> Enum.find(&(&1.relationship == relationship))
end
@doc "A keyword list of customized migration types"
def migration_types(resource) do
Extension.get_opt(resource, [:sqlite], :migration_types, [])
end
@doc "A keyword list of customized migration defaults"
def migration_defaults(resource) do
Extension.get_opt(resource, [:sqlite], :migration_defaults, [])
end
@doc "A list of attributes to be ignored when generating migrations"
def migration_ignore_attributes(resource) do
Extension.get_opt(resource, [:sqlite], :migration_ignore_attributes, [])
end
@doc "The configured check_constraints for a resource"
def check_constraints(resource) do
Extension.get_entities(resource, [:sqlite, :check_constraints])
end
@doc "The configured custom_indexes for a resource"
def custom_indexes(resource) do
Extension.get_entities(resource, [:sqlite, :custom_indexes])
end
@doc "The configured custom_statements for a resource"
def custom_statements(resource) do
Extension.get_entities(resource, [:sqlite, :custom_statements])
end
@doc "The configured polymorphic_reference_on_delete for a resource"
def polymorphic_on_delete(resource) do
Extension.get_opt(resource, [:sqlite, :references], :polymorphic_on_delete, nil, true)
end
@doc "The configured polymorphic_reference_on_update for a resource"
def polymorphic_on_update(resource) do
Extension.get_opt(resource, [:sqlite, :references], :polymorphic_on_update, nil, true)
end
@doc "The configured polymorphic_reference_name for a resource"
def polymorphic_name(resource) do
Extension.get_opt(resource, [:sqlite, :references], :polymorphic_on_delete, nil, true)
end
@doc "The configured polymorphic? for a resource"
def polymorphic?(resource) do
Extension.get_opt(resource, [:sqlite], :polymorphic?, nil, true)
end
@doc "The configured unique_index_names"
def unique_index_names(resource) do
Extension.get_opt(resource, [:sqlite], :unique_index_names, [], true)
end
@doc "The configured exclusion_constraint_names"
def exclusion_constraint_names(resource) do
Extension.get_opt(resource, [:sqlite], :exclusion_constraint_names, [], true)
end
@doc "The configured identity_index_names"
def identity_index_names(resource) do
Extension.get_opt(resource, [:sqlite], :identity_index_names, [], true)
end
@doc "Identities not to include in the migrations"
def skip_identities(resource) do
Extension.get_opt(resource, [:sqlite], :skip_identities, [], true)
end
@doc "The configured foreign_key_names"
def foreign_key_names(resource) do
Extension.get_opt(resource, [:sqlite], :foreign_key_names, [], true)
end
@doc "Whether or not the resource should be included when generating migrations"
def migrate?(resource) do
Extension.get_opt(resource, [:sqlite], :migrate?, nil, true)
end
@doc "A list of keys to always include in upserts."
def global_upsert_keys(resource) do
Extension.get_opt(resource, [:sqlite], :global_upsert_keys, [])
end
@doc "A stringified version of the base_filter, to be used in a where clause when generating unique indexes"
def base_filter_sql(resource) do
Extension.get_opt(resource, [:sqlite], :base_filter_sql, nil)
end
@doc "Skip generating unique indexes when generating migrations"
def skip_unique_indexes(resource) do
Extension.get_opt(resource, [:sqlite], :skip_unique_indexes, [])
end
end

View file

@ -0,0 +1,89 @@
defprotocol EctoMigrationDefault do
@moduledoc """
Allows configuring how values are translated to default values in migrations.
Still a work in progress, but covers most standard values aside from maps.
"""
@fallback_to_any true
@doc "Returns the text (elixir code) that will be placed into a migration as the default value"
def to_default(value)
end
defimpl EctoMigrationDefault, for: Any do
require Logger
def to_default(value) do
Logger.warning("""
You have specified a default value for a type that cannot be explicitly
converted to an Ecto default:
`#{inspect(value)}`
The default value in the migration will be set to `nil` and you can edit
your migration accordingly.
To prevent this warning, implement the `EctoMigrationDefault` protocol
for the appropriate Elixir type in your Ash project, or configure its
default value in `migration_defaults` in the sqlite section. Use `\\\"nil\\\"`
for no default.
""")
"nil"
end
end
defimpl EctoMigrationDefault, for: Integer do
def to_default(value) do
to_string(value)
end
end
defimpl EctoMigrationDefault, for: Float do
def to_default(value) do
to_string(value)
end
end
defimpl EctoMigrationDefault, for: Decimal do
def to_default(value) do
~s["#{value}"]
end
end
defimpl EctoMigrationDefault, for: BitString do
def to_default(value) do
inspect(value)
end
end
defimpl EctoMigrationDefault, for: DateTime do
def to_default(value) do
~s[fragment("'#{to_string(value)}'")]
end
end
defimpl EctoMigrationDefault, for: NaiveDateTime do
def to_default(value) do
~s[fragment("'#{to_string(value)}'")]
end
end
defimpl EctoMigrationDefault, for: Date do
def to_default(value) do
~s[fragment("'#{to_string(value)}'")]
end
end
defimpl EctoMigrationDefault, for: Time do
def to_default(value) do
~s[fragment("'#{to_string(value)}'")]
end
end
defimpl EctoMigrationDefault, for: Atom do
def to_default(value) when value in [nil, true, false], do: inspect(value)
def to_default(value) do
inspect(to_string(value))
end
end

1459
lib/expr.ex Normal file

File diff suppressed because it is too large Load diff

72
lib/functions/fragment.ex Normal file
View file

@ -0,0 +1,72 @@
defmodule AshSqlite.Functions.Fragment do
@moduledoc """
A function that maps to ecto's `fragment` function
https://hexdocs.pm/ecto/Ecto.Query.API.html#fragment/1
"""
use Ash.Query.Function, name: :fragment
def private?, do: true
# Varargs is special, and should only be used in rare circumstances (like this one)
# no type casting or help can be provided for these functions.
def args, do: :var_args
def new([fragment | _]) when not is_binary(fragment) do
{:error, "First argument to `fragment` must be a string."}
end
def new([fragment | rest]) do
split = split_fragment(fragment)
if Enum.count(split, &(&1 == :slot)) != length(rest) do
{:error,
"fragment(...) expects extra arguments in the same amount of question marks in string. " <>
"It received #{Enum.count(split, &(&1 == :slot))} extra argument(s) but expected #{length(rest)}"}
else
{:ok, %__MODULE__{arguments: merge_fragment(split, rest)}}
end
end
def casted_new([fragment | _]) when not is_binary(fragment) do
{:error, "First argument to `fragment` must be a string."}
end
def casted_new([fragment | rest]) do
split = split_fragment(fragment)
if Enum.count(split, &(&1 == :slot)) != length(rest) do
{:error,
"fragment(...) expects extra arguments in the same amount of question marks in string. " <>
"It received #{Enum.count(split, &(&1 == :slot))} extra argument(s) but expected #{length(rest)}"}
else
{:ok, %__MODULE__{arguments: merge_fragment(split, rest, :casted_expr)}}
end
end
defp merge_fragment(expr, args, tag \\ :expr)
defp merge_fragment([], [], _tag), do: []
defp merge_fragment([:slot | rest], [arg | rest_args], tag) do
[{tag, arg} | merge_fragment(rest, rest_args, tag)]
end
defp merge_fragment([val | rest], rest_args, tag) do
[{:raw, val} | merge_fragment(rest, rest_args, tag)]
end
defp split_fragment(frag, consumed \\ "")
defp split_fragment(<<>>, consumed),
do: [consumed]
defp split_fragment(<<??, rest::binary>>, consumed),
do: [consumed, :slot | split_fragment(rest, "")]
defp split_fragment(<<?\\, ??, rest::binary>>, consumed),
do: split_fragment(rest, consumed <> <<??>>)
defp split_fragment(<<first::utf8, rest::binary>>, consumed),
do: split_fragment(rest, consumed <> <<first::utf8>>)
end

9
lib/functions/like.ex Normal file
View file

@ -0,0 +1,9 @@
defmodule AshSqlite.Functions.Like do
@moduledoc """
Maps to the builtin sqlite function `like`.
"""
use Ash.Query.Function, name: :like
def args, do: [[:string, :string]]
end

775
lib/join.ex Normal file
View file

@ -0,0 +1,775 @@
defmodule AshSqlite.Join do
@moduledoc false
import Ecto.Query, only: [from: 2, subquery: 1]
alias Ash.Query.{BooleanExpression, Not, Ref}
@known_inner_join_operators [
Eq,
GreaterThan,
GreaterThanOrEqual,
In,
LessThanOrEqual,
LessThan,
NotEq
]
|> Enum.map(&Module.concat(Ash.Query.Operator, &1))
@known_inner_join_functions [
Ago,
Contains
]
|> Enum.map(&Module.concat(Ash.Query.Function, &1))
@known_inner_join_predicates @known_inner_join_functions ++ @known_inner_join_operators
def join_all_relationships(
query,
filter,
opts \\ [],
relationship_paths \\ nil,
path \\ [],
source \\ nil
) do
relationship_paths =
cond do
relationship_paths ->
relationship_paths
opts[:no_this?] ->
filter
|> Ash.Filter.map(fn
%Ash.Query.Parent{} ->
# Removing any `This` from the filter
nil
other ->
other
end)
|> Ash.Filter.relationship_paths()
|> to_joins(filter)
true ->
filter
|> Ash.Filter.relationship_paths()
|> to_joins(filter)
end
Enum.reduce_while(relationship_paths, {:ok, query}, fn
{_join_type, []}, {:ok, query} ->
{:cont, {:ok, query}}
{join_type, [relationship | rest_rels]}, {:ok, query} ->
source = source || relationship.source
current_path = path ++ [relationship]
current_join_type = join_type
look_for_join_types =
case join_type do
:left ->
[:left, :inner]
:inner ->
[:left, :inner]
other ->
[other]
end
case get_binding(source, Enum.map(current_path, & &1.name), query, look_for_join_types) do
binding when is_integer(binding) ->
case join_all_relationships(
query,
filter,
opts,
[{join_type, rest_rels}],
current_path,
source
) do
{:ok, query} ->
{:cont, {:ok, query}}
{:error, error} ->
{:halt, {:error, error}}
end
nil ->
case join_relationship(
query,
relationship,
Enum.map(path, & &1.name),
current_join_type,
source,
filter
) do
{:ok, joined_query} ->
joined_query_with_distinct = add_distinct(relationship, join_type, joined_query)
case join_all_relationships(
joined_query_with_distinct,
filter,
opts,
[{join_type, rest_rels}],
current_path,
source
) do
{:ok, query} ->
{:cont, {:ok, query}}
{:error, error} ->
{:halt, {:error, error}}
end
{:error, error} ->
{:halt, {:error, error}}
end
end
end)
end
defp to_joins(paths, filter) do
paths
|> Enum.map(fn path ->
if can_inner_join?(path, filter) do
{:inner,
AshSqlite.Join.relationship_path_to_relationships(
filter.resource,
path
)}
else
{:left,
AshSqlite.Join.relationship_path_to_relationships(
filter.resource,
path
)}
end
end)
end
def relationship_path_to_relationships(resource, path, acc \\ [])
def relationship_path_to_relationships(_resource, [], acc), do: Enum.reverse(acc)
def relationship_path_to_relationships(resource, [relationship | rest], acc) do
relationship = Ash.Resource.Info.relationship(resource, relationship)
relationship_path_to_relationships(relationship.destination, rest, [relationship | acc])
end
def maybe_get_resource_query(
resource,
relationship,
root_query,
path \\ [],
bindings \\ nil,
start_binding \\ nil,
is_subquery? \\ true
) do
resource
|> Ash.Query.new(nil, base_filter?: false)
|> Ash.Query.set_context(%{data_layer: %{start_bindings_at: start_binding}})
|> Ash.Query.set_context((bindings || root_query.__ash_bindings__).context)
|> Ash.Query.set_context(relationship.context)
|> case do
%{valid?: true} = query ->
ash_query = query
initial_query = %{
AshSqlite.DataLayer.resource_to_query(resource, nil)
| prefix: Map.get(root_query, :prefix)
}
case Ash.Query.data_layer_query(query,
initial_query: initial_query
) do
{:ok, query} ->
query =
query
|> do_base_filter(
root_query,
ash_query,
resource,
path,
bindings
)
|> do_relationship_filter(
relationship,
root_query,
ash_query,
resource,
path,
bindings,
is_subquery?
)
{:ok, query}
{:error, error} ->
{:error, error}
end
query ->
{:error, query}
end
end
defp do_relationship_filter(query, %{filter: nil}, _, _, _, _, _, _), do: query
defp do_relationship_filter(
query,
relationship,
root_query,
ash_query,
resource,
path,
bindings,
is_subquery?
) do
filter =
resource
|> Ash.Filter.parse!(
relationship.filter,
ash_query.calculations,
Map.update(
ash_query.context,
:parent_stack,
[relationship.source],
&[&1 | relationship.source]
)
)
base_bindings = bindings || query.__ash_bindings__
parent_binding =
case :lists.droplast(path) do
[] ->
base_bindings.bindings
|> Enum.find_value(fn {key, %{type: type}} ->
if type == :root do
key
end
end)
path ->
get_binding(
root_query.__ash_bindings__.resource,
path,
%{query | __ash_bindings__: base_bindings},
[
:inner,
:left
]
)
end
parent_bindings = %{
base_bindings
| resource: relationship.source,
calculations: %{},
parent_resources: [],
context: relationship.context,
current: parent_binding + 1
}
parent_bindings =
if bindings do
Map.put(parent_bindings, :parent_is_parent_as?, !is_subquery?)
else
parent_bindings
|> Map.update!(:bindings, &Map.take(&1, [parent_binding]))
end
has_bindings? = not is_nil(bindings)
bindings =
base_bindings
|> Map.put(:parent_bindings, parent_bindings)
|> Map.put(:parent_resources, [
relationship.source | parent_bindings[:parent_resources] || []
])
dynamic =
if has_bindings? do
filter =
if is_subquery? do
Ash.Filter.move_to_relationship_path(filter, path)
else
filter
end
AshSqlite.Expr.dynamic_expr(root_query, filter, bindings, true)
else
AshSqlite.Expr.dynamic_expr(query, filter, bindings, true)
end
{:ok, query} = join_all_relationships(query, filter)
from(row in query, where: ^dynamic)
end
defp do_base_filter(query, root_query, ash_query, resource, path, bindings) do
case Ash.Resource.Info.base_filter(resource) do
nil ->
query
filter ->
filter =
resource
|> Ash.Filter.parse!(
filter,
ash_query.calculations,
ash_query.context
)
dynamic =
if bindings do
filter = Ash.Filter.move_to_relationship_path(filter, path)
AshSqlite.Expr.dynamic_expr(root_query, filter, bindings, true)
else
AshSqlite.Expr.dynamic_expr(query, filter, query.__ash_bindings__, true)
end
from(row in query, where: ^dynamic)
end
end
def set_join_prefix(join_query, query, resource) do
%{
join_query
| prefix:
AshSqlite.DataLayer.Info.schema(resource) ||
AshSqlite.DataLayer.Info.repo(resource).config()[:default_prefix] ||
"public"
}
end
defp can_inner_join?(path, expr, seen_an_or? \\ false)
defp can_inner_join?(path, %{expression: expr}, seen_an_or?),
do: can_inner_join?(path, expr, seen_an_or?)
defp can_inner_join?(_path, expr, _seen_an_or?) when expr in [nil, true, false], do: true
defp can_inner_join?(path, %BooleanExpression{op: :and, left: left, right: right}, seen_an_or?) do
can_inner_join?(path, left, seen_an_or?) || can_inner_join?(path, right, seen_an_or?)
end
defp can_inner_join?(path, %BooleanExpression{op: :or, left: left, right: right}, _) do
can_inner_join?(path, left, true) && can_inner_join?(path, right, true)
end
defp can_inner_join?(
_,
%Not{},
_
) do
false
end
defp can_inner_join?(
search_path,
%struct{__operator__?: true, left: %Ref{relationship_path: relationship_path}},
seen_an_or?
)
when search_path == relationship_path and struct in @known_inner_join_predicates do
not seen_an_or?
end
defp can_inner_join?(
search_path,
%struct{__operator__?: true, right: %Ref{relationship_path: relationship_path}},
seen_an_or?
)
when search_path == relationship_path and struct in @known_inner_join_predicates do
not seen_an_or?
end
defp can_inner_join?(
search_path,
%struct{__function__?: true, arguments: arguments},
seen_an_or?
)
when struct in @known_inner_join_predicates do
if Enum.any?(arguments, &match?(%Ref{relationship_path: ^search_path}, &1)) do
not seen_an_or?
else
true
end
end
defp can_inner_join?(_, _, _), do: false
@doc false
def get_binding(resource, candidate_path, %{__ash_bindings__: _} = query, types) do
types = List.wrap(types)
Enum.find_value(query.__ash_bindings__.bindings, fn
{binding, %{path: path, source: source, type: type}} ->
if type in types &&
Ash.SatSolver.synonymous_relationship_paths?(resource, path, candidate_path, source) do
binding
end
_ ->
nil
end)
end
def get_binding(_, _, _, _), do: nil
defp add_distinct(relationship, _join_type, joined_query) do
if !joined_query.__ash_bindings__.in_group? &&
(relationship.cardinality == :many || Map.get(relationship, :from_many?)) &&
!joined_query.distinct do
from(row in joined_query,
distinct: ^Ash.Resource.Info.primary_key(joined_query.__ash_bindings__.resource)
)
else
joined_query
end
end
defp join_relationship(
query,
relationship,
path,
join_type,
source,
filter
) do
case Map.get(query.__ash_bindings__.bindings, path) do
%{type: existing_join_type} when join_type != existing_join_type ->
raise "unreachable?"
nil ->
do_join_relationship(
query,
relationship,
path,
join_type,
source,
filter
)
_ ->
{:ok, query}
end
end
defp do_join_relationship(
query,
%{manual: {module, opts}} = relationship,
path,
kind,
source,
filter
) do
full_path = path ++ [relationship.name]
initial_ash_bindings = query.__ash_bindings__
binding_data = %{type: kind, path: full_path, source: source}
query = AshSqlite.DataLayer.add_binding(query, binding_data)
used_calculations =
Ash.Filter.used_calculations(
filter,
relationship.destination,
full_path
)
use_root_query_bindings? = true
root_bindings =
if use_root_query_bindings? do
query.__ash_bindings__
end
case maybe_get_resource_query(
relationship.destination,
relationship,
query,
full_path,
root_bindings
) do
{:error, error} ->
{:error, error}
{:ok, relationship_destination} ->
relationship_destination =
relationship_destination
|> Ecto.Queryable.to_query()
|> set_join_prefix(query, relationship.destination)
binding_kinds =
case kind do
:left ->
[:left, :inner]
:inner ->
[:left, :inner]
other ->
[other]
end
current_binding =
Enum.find_value(initial_ash_bindings.bindings, 0, fn {binding, data} ->
if data.type in binding_kinds && data.path == path do
binding
end
end)
module.ash_sqlite_join(
query,
opts,
current_binding,
initial_ash_bindings.current,
kind,
relationship_destination
)
end
rescue
e in UndefinedFunctionError ->
if e.function == :ash_sqlite_join do
reraise """
AshSqlite cannot join to a manual relationship #{inspect(module)} that does not implement the `AshSqlite.ManualRelationship` behaviour.
""",
__STACKTRACE__
else
reraise e, __STACKTRACE__
end
end
defp do_join_relationship(
query,
%{type: :many_to_many} = relationship,
path,
kind,
source,
filter
) do
join_relationship =
Ash.Resource.Info.relationship(relationship.source, relationship.join_relationship)
join_path = path ++ [join_relationship.name]
full_path = path ++ [relationship.name]
initial_ash_bindings = query.__ash_bindings__
binding_data = %{type: kind, path: full_path, source: source}
query =
query
|> AshSqlite.DataLayer.add_binding(%{
path: join_path,
type: :left,
source: source
})
|> AshSqlite.DataLayer.add_binding(binding_data)
used_calculations =
Ash.Filter.used_calculations(
filter,
relationship.destination,
full_path
)
use_root_query_bindings? = true
root_bindings =
if use_root_query_bindings? do
query.__ash_bindings__
end
with {:ok, relationship_through} <-
maybe_get_resource_query(
relationship.through,
join_relationship,
query,
join_path,
root_bindings
),
{:ok, relationship_destination} <-
maybe_get_resource_query(
relationship.destination,
relationship,
query,
path,
root_bindings
) do
relationship_through =
relationship_through
|> Ecto.Queryable.to_query()
|> set_join_prefix(query, relationship.through)
relationship_destination =
relationship_destination
|> Ecto.Queryable.to_query()
|> set_join_prefix(query, relationship.destination)
binding_kinds =
case kind do
:left ->
[:left, :inner]
:inner ->
[:left, :inner]
other ->
[other]
end
current_binding =
Enum.find_value(initial_ash_bindings.bindings, 0, fn {binding, data} ->
if data.type in binding_kinds && data.path == path do
binding
end
end)
query =
case kind do
:inner ->
from([{row, current_binding}] in query,
join: through in ^relationship_through,
as: ^initial_ash_bindings.current,
on:
field(row, ^relationship.source_attribute) ==
field(through, ^relationship.source_attribute_on_join_resource),
join: destination in ^relationship_destination,
as: ^(initial_ash_bindings.current + 1),
on:
field(destination, ^relationship.destination_attribute) ==
field(through, ^relationship.destination_attribute_on_join_resource)
)
_ ->
from([{row, current_binding}] in query,
left_join: through in ^relationship_through,
as: ^initial_ash_bindings.current,
on:
field(row, ^relationship.source_attribute) ==
field(through, ^relationship.source_attribute_on_join_resource),
left_join: destination in ^relationship_destination,
as: ^(initial_ash_bindings.current + 1),
on:
field(destination, ^relationship.destination_attribute) ==
field(through, ^relationship.destination_attribute_on_join_resource)
)
end
{:ok, query}
end
end
defp do_join_relationship(
query,
relationship,
path,
kind,
source,
filter
) do
full_path = path ++ [relationship.name]
initial_ash_bindings = query.__ash_bindings__
binding_data = %{type: kind, path: full_path, source: source}
query = AshSqlite.DataLayer.add_binding(query, binding_data)
used_calculations =
Ash.Filter.used_calculations(
filter,
relationship.destination,
full_path
)
use_root_query_bindings? = true
root_bindings =
if use_root_query_bindings? do
query.__ash_bindings__
end
case maybe_get_resource_query(
relationship.destination,
relationship,
query,
full_path,
root_bindings
) do
{:error, error} ->
{:error, error}
{:ok, relationship_destination} ->
relationship_destination =
relationship_destination
|> Ecto.Queryable.to_query()
|> set_join_prefix(query, relationship.destination)
binding_kinds =
case kind do
:left ->
[:left, :inner]
:inner ->
[:left, :inner]
other ->
[other]
end
current_binding =
Enum.find_value(initial_ash_bindings.bindings, 0, fn {binding, data} ->
if data.type in binding_kinds && data.path == path do
binding
end
end)
query =
case {kind, Map.get(relationship, :no_attributes?)} do
{:inner, true} ->
from([{row, current_binding}] in query,
join: destination in ^relationship_destination,
as: ^initial_ash_bindings.current,
on: true
)
{_, true} ->
from([{row, current_binding}] in query,
left_join: destination in ^relationship_destination,
as: ^initial_ash_bindings.current,
on: true
)
{:inner, _} ->
from([{row, current_binding}] in query,
join: destination in ^relationship_destination,
as: ^initial_ash_bindings.current,
on:
field(row, ^relationship.source_attribute) ==
field(
destination,
^relationship.destination_attribute
)
)
_ ->
from([{row, current_binding}] in query,
left_join: destination in ^relationship_destination,
as: ^initial_ash_bindings.current,
on:
field(row, ^relationship.source_attribute) ==
field(
destination,
^relationship.destination_attribute
)
)
end
{:ok, query}
end
end
end

View file

@ -0,0 +1,25 @@
defmodule AshSqlite.ManualRelationship do
@moduledoc "A behavior for sqlite-specific manual relationship functionality"
@callback ash_sqlite_join(
source_query :: Ecto.Query.t(),
opts :: Keyword.t(),
current_binding :: term,
destination_binding :: term,
type :: :inner | :left,
destination_query :: Ecto.Query.t()
) :: {:ok, Ecto.Query.t()} | {:error, term}
@callback ash_sqlite_subquery(
opts :: Keyword.t(),
current_binding :: term,
destination_binding :: term,
destination_query :: Ecto.Query.t()
) :: {:ok, Ecto.Query.t()} | {:error, term}
defmacro __using__(_) do
quote do
@behaviour AshSqlite.ManualRelationship
end
end
end

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,902 @@
defmodule AshSqlite.MigrationGenerator.Operation do
@moduledoc false
defmodule Helper do
@moduledoc false
def join(list),
do:
list
|> List.flatten()
|> Enum.reject(&is_nil/1)
|> Enum.join(", ")
|> String.replace(", )", ")")
def maybe_add_default("nil"), do: nil
def maybe_add_default(value), do: "default: #{value}"
def maybe_add_primary_key(true), do: "primary_key: true"
def maybe_add_primary_key(_), do: nil
def maybe_add_null(false), do: "null: false"
def maybe_add_null(_), do: nil
def maybe_add_prefix(nil), do: nil
def maybe_add_prefix(prefix), do: "prefix: #{prefix}"
def in_quotes(nil), do: nil
def in_quotes(value), do: "\"#{value}\""
def as_atom(value) when is_atom(value), do: Macro.inspect_atom(:remote_call, value)
# sobelow_skip ["DOS.StringToAtom"]
def as_atom(value), do: Macro.inspect_atom(:remote_call, String.to_atom(value))
def option(key, value) do
if value do
"#{as_atom(key)}: #{inspect(value)}"
end
end
def on_delete(%{on_delete: on_delete}) when on_delete in [:delete, :nilify] do
"on_delete: :#{on_delete}_all"
end
def on_delete(%{on_delete: on_delete}) when is_atom(on_delete) and not is_nil(on_delete) do
"on_delete: :#{on_delete}"
end
def on_delete(_), do: nil
def on_update(%{on_update: on_update}) when on_update in [:update, :nilify] do
"on_update: :#{on_update}_all"
end
def on_update(%{on_update: on_update}) when is_atom(on_update) and not is_nil(on_update) do
"on_update: :#{on_update}"
end
def on_update(_), do: nil
def reference_type(
%{type: :integer},
%{destination_attribute_generated: true, destination_attribute_default: "nil"}
) do
:bigint
end
def reference_type(%{type: type}, _) do
type
end
end
defmodule CreateTable do
@moduledoc false
defstruct [:table, :schema, :multitenancy, :old_multitenancy]
end
defmodule AddAttribute do
@moduledoc false
defstruct [:attribute, :table, :schema, :multitenancy, :old_multitenancy]
import Helper
def up(%{
multitenancy: %{strategy: :attribute, attribute: source_attribute},
attribute:
%{
references:
%{
table: table,
destination_attribute: reference_attribute,
schema: destination_schema,
multitenancy: %{strategy: :attribute, attribute: destination_attribute}
} = reference
} = attribute
}) do
with_match =
if destination_attribute != reference_attribute do
"with: [#{as_atom(source_attribute)}: :#{as_atom(destination_attribute)}], match: :full"
end
size =
if attribute[:size] do
"size: #{attribute[:size]}"
end
[
"add #{inspect(attribute.source)}",
"references(:#{as_atom(table)}",
[
"column: #{inspect(reference_attribute)}",
with_match,
"name: #{inspect(reference.name)}",
"type: #{inspect(reference_type(attribute, reference))}",
option("prefix", destination_schema),
on_delete(reference),
on_update(reference),
size
],
")",
maybe_add_default(attribute.default),
maybe_add_primary_key(attribute.primary_key?),
maybe_add_null(attribute.allow_nil?)
]
|> join()
end
def up(%{
attribute:
%{
references:
%{
table: table,
schema: destination_schema,
destination_attribute: destination_attribute
} = reference
} = attribute
}) do
size =
if attribute[:size] do
"size: #{attribute[:size]}"
end
[
"add #{inspect(attribute.source)}",
"references(:#{as_atom(table)}",
[
"column: #{inspect(destination_attribute)}",
"name: #{inspect(reference.name)}",
"type: #{inspect(reference_type(attribute, reference))}",
option("prefix", destination_schema),
size,
on_delete(reference),
on_update(reference)
],
")",
maybe_add_default(attribute.default),
maybe_add_primary_key(attribute.primary_key?),
maybe_add_null(attribute.allow_nil?)
]
|> join()
end
def up(%{attribute: %{type: :bigint, default: "nil", generated?: true} = attribute}) do
[
"add #{inspect(attribute.source)}",
":bigserial",
maybe_add_null(attribute.allow_nil?),
maybe_add_primary_key(attribute.primary_key?)
]
|> join()
end
def up(%{attribute: %{type: :integer, default: "nil", generated?: true} = attribute}) do
[
"add #{inspect(attribute.source)}",
":serial",
maybe_add_null(attribute.allow_nil?),
maybe_add_primary_key(attribute.primary_key?)
]
|> join()
end
def up(%{attribute: attribute}) do
size =
if attribute[:size] do
"size: #{attribute[:size]}"
end
[
"add #{inspect(attribute.source)}",
"#{inspect(attribute.type)}",
maybe_add_null(attribute.allow_nil?),
maybe_add_default(attribute.default),
size,
maybe_add_primary_key(attribute.primary_key?)
]
|> join()
end
def down(
%{
attribute: attribute,
table: table,
multitenancy: multitenancy
} = op
) do
AshSqlite.MigrationGenerator.Operation.RemoveAttribute.up(%{
op
| attribute: attribute,
table: table,
multitenancy: multitenancy
})
end
end
defmodule AlterDeferrability do
@moduledoc false
defstruct [:table, :schema, :references, :direction, no_phase: true]
def up(%{direction: :up, table: table, references: %{name: name, deferrable: true}}) do
"execute(\"ALTER TABLE #{table} alter CONSTRAINT #{name} DEFERRABLE INITIALLY IMMEDIATE\");"
end
def up(%{direction: :up, table: table, references: %{name: name, deferrable: :initially}}) do
"execute(\"ALTER TABLE #{table} alter CONSTRAINT #{name} DEFERRABLE INITIALLY DEFERRED\");"
end
def up(%{direction: :up, table: table, references: %{name: name}}) do
"execute(\"ALTER TABLE #{table} alter CONSTRAINT #{name} NOT DEFERRABLE\");"
end
def up(_), do: ""
def down(%{direction: :down} = data), do: up(%{data | direction: :up})
def down(_), do: ""
end
defmodule AlterAttribute do
@moduledoc false
defstruct [
:old_attribute,
:new_attribute,
:table,
:schema,
:multitenancy,
:old_multitenancy
]
import Helper
defp alter_opts(attribute, old_attribute) do
primary_key =
cond do
attribute.primary_key? and !old_attribute.primary_key? ->
", primary_key: true"
old_attribute.primary_key? and !attribute.primary_key? ->
", primary_key: false"
true ->
nil
end
default =
if attribute.default != old_attribute.default do
if is_nil(attribute.default) do
", default: nil"
else
", default: #{attribute.default}"
end
end
null =
if attribute.allow_nil? != old_attribute.allow_nil? do
", null: #{attribute.allow_nil?}"
end
"#{null}#{default}#{primary_key}"
end
def up(%{
multitenancy: multitenancy,
old_attribute: old_attribute,
new_attribute: attribute,
schema: schema
}) do
type_or_reference =
if AshSqlite.MigrationGenerator.has_reference?(multitenancy, attribute) and
Map.get(old_attribute, :references) != Map.get(attribute, :references) do
reference(multitenancy, attribute, schema)
else
inspect(attribute.type)
end
"modify #{inspect(attribute.source)}, #{type_or_reference}#{alter_opts(attribute, old_attribute)}"
end
defp reference(
%{strategy: :attribute, attribute: source_attribute},
%{
references:
%{
multitenancy: %{strategy: :attribute, attribute: destination_attribute},
table: table,
schema: destination_schema,
destination_attribute: reference_attribute
} = reference
} = attribute,
schema
) do
destination_schema =
if schema != destination_schema do
destination_schema
end
with_match =
if destination_attribute != reference_attribute do
"with: [#{as_atom(source_attribute)}: :#{as_atom(destination_attribute)}], match: :full"
end
size =
if attribute[:size] do
"size: #{attribute[:size]}"
end
join([
"references(:#{as_atom(table)}, column: #{inspect(reference_attribute)}",
with_match,
"name: #{inspect(reference.name)}",
"type: #{inspect(reference_type(attribute, reference))}",
size,
option("prefix", destination_schema),
on_delete(reference),
on_update(reference),
")"
])
end
defp reference(
_,
%{
references:
%{
table: table,
destination_attribute: destination_attribute,
schema: destination_schema
} = reference
} = attribute,
schema
) do
destination_schema =
if schema != destination_schema do
destination_schema
end
size =
if attribute[:size] do
"size: #{attribute[:size]}"
end
join([
"references(:#{as_atom(table)}, column: #{inspect(destination_attribute)}",
"name: #{inspect(reference.name)}",
"type: #{inspect(reference_type(attribute, reference))}",
size,
option("prefix", destination_schema),
on_delete(reference),
on_update(reference),
")"
])
end
def down(op) do
up(%{
op
| old_attribute: op.new_attribute,
new_attribute: op.old_attribute,
old_multitenancy: op.multitenancy,
multitenancy: op.old_multitenancy
})
end
end
defmodule DropForeignKey do
@moduledoc false
# We only run this migration in one direction, based on the input
# This is because the creation of a foreign key is handled by `references/3`
# We only need to drop it before altering an attribute with `references/3`
defstruct [:attribute, :schema, :table, :multitenancy, :direction, no_phase: true]
import Helper
def up(%{table: table, schema: schema, attribute: %{references: reference}, direction: :up}) do
"drop constraint(:#{as_atom(table)}, #{join([inspect(reference.name), option("prefix", schema)])})"
end
def up(_) do
""
end
def down(%{
table: table,
schema: schema,
attribute: %{references: reference},
direction: :down
}) do
"drop constraint(:#{as_atom(table)}, #{join([inspect(reference.name), option("prefix", schema)])})"
end
def down(_) do
""
end
end
defmodule RenameAttribute do
@moduledoc false
defstruct [
:old_attribute,
:new_attribute,
:table,
:schema,
:multitenancy,
:old_multitenancy,
no_phase: true
]
import Helper
def up(%{
old_attribute: old_attribute,
new_attribute: new_attribute,
schema: schema,
table: table
}) do
table_statement = join([":#{as_atom(table)}", option("prefix", schema)])
"rename table(#{table_statement}), #{inspect(old_attribute.source)}, to: #{inspect(new_attribute.source)}"
end
def down(
%{
old_attribute: old_attribute,
new_attribute: new_attribute
} = data
) do
up(%{data | new_attribute: old_attribute, old_attribute: new_attribute})
end
end
defmodule RemoveAttribute do
@moduledoc false
defstruct [:attribute, :schema, :table, :multitenancy, :old_multitenancy, commented?: true]
def up(%{attribute: attribute, commented?: true}) do
"""
# Attribute removal has been commented out to avoid data loss. See the migration generator documentation for more
# If you uncomment this, be sure to also uncomment the corresponding attribute *addition* in the `down` migration
# remove #{inspect(attribute.source)}
"""
end
def up(%{attribute: attribute}) do
"remove #{inspect(attribute.source)}"
end
def down(%{attribute: attribute, multitenancy: multitenancy, commented?: true}) do
prefix = """
# This is the `down` migration of the statement:
#
# remove #{inspect(attribute.source)}
#
"""
contents =
%AshSqlite.MigrationGenerator.Operation.AddAttribute{
attribute: attribute,
multitenancy: multitenancy
}
|> AshSqlite.MigrationGenerator.Operation.AddAttribute.up()
|> String.split("\n")
|> Enum.map_join("\n", &"# #{&1}")
prefix <> "\n" <> contents
end
def down(%{attribute: attribute, multitenancy: multitenancy, table: table, schema: schema}) do
AshSqlite.MigrationGenerator.Operation.AddAttribute.up(
%AshSqlite.MigrationGenerator.Operation.AddAttribute{
attribute: attribute,
table: table,
schema: schema,
multitenancy: multitenancy
}
)
end
end
defmodule AddUniqueIndex do
@moduledoc false
defstruct [:identity, :table, :schema, :multitenancy, :old_multitenancy, no_phase: true]
import Helper
def up(%{
identity: %{name: name, keys: keys, base_filter: base_filter, index_name: index_name},
table: table,
schema: schema,
multitenancy: multitenancy
}) do
keys =
case multitenancy.strategy do
:attribute ->
[multitenancy.attribute | keys]
_ ->
keys
end
index_name = index_name || "#{table}_#{name}_index"
if base_filter do
"create unique_index(:#{as_atom(table)}, [#{Enum.map_join(keys, ", ", &inspect/1)}], where: \"#{base_filter}\", #{join(["name: \"#{index_name}\"", option("prefix", schema)])})"
else
"create unique_index(:#{as_atom(table)}, [#{Enum.map_join(keys, ", ", &inspect/1)}], #{join(["name: \"#{index_name}\"", option("prefix", schema)])})"
end
end
def down(%{
identity: %{name: name, keys: keys, index_name: index_name},
table: table,
schema: schema,
multitenancy: multitenancy
}) do
keys =
case multitenancy.strategy do
:attribute ->
[multitenancy.attribute | keys]
_ ->
keys
end
index_name = index_name || "#{table}_#{name}_index"
"drop_if_exists unique_index(:#{as_atom(table)}, [#{Enum.map_join(keys, ", ", &inspect/1)}], #{join(["name: \"#{index_name}\"", option("prefix", schema)])})"
end
end
defmodule AddCustomStatement do
@moduledoc false
defstruct [:statement, :table, no_phase: true]
def up(%{statement: %{up: up, code?: false}}) do
"""
execute(\"\"\"
#{String.trim(up)}
\"\"\")
"""
end
def up(%{statement: %{up: up, code?: true}}) do
up
end
def down(%{statement: %{down: down, code?: false}}) do
"""
execute(\"\"\"
#{String.trim(down)}
\"\"\")
"""
end
def down(%{statement: %{down: down, code?: true}}) do
down
end
end
defmodule RemoveCustomStatement do
@moduledoc false
defstruct [:statement, :table, no_phase: true]
def up(%{statement: statement, table: table}) do
AddCustomStatement.down(%AddCustomStatement{statement: statement, table: table})
end
def down(%{statement: statement, table: table}) do
AddCustomStatement.up(%AddCustomStatement{statement: statement, table: table})
end
end
defmodule AddCustomIndex do
@moduledoc false
defstruct [:table, :schema, :index, :base_filter, :multitenancy, no_phase: true]
import Helper
def up(%{
index: index,
table: table,
schema: schema,
base_filter: base_filter,
multitenancy: multitenancy
}) do
keys =
case multitenancy.strategy do
:attribute ->
[to_string(multitenancy.attribute) | Enum.map(index.fields, &to_string/1)]
_ ->
Enum.map(index.fields, &to_string/1)
end
index =
if index.where && base_filter do
%{index | where: base_filter <> " AND " <> index.where}
else
index
end
opts =
join([
option(:name, index.name),
option(:unique, index.unique),
option(:concurrently, index.concurrently),
option(:using, index.using),
option(:prefix, index.prefix),
option(:where, index.where),
option(:include, index.include),
option(:prefix, schema)
])
if opts == "",
do: "create index(:#{as_atom(table)}, [#{Enum.map_join(keys, ", ", &inspect/1)}])",
else:
"create index(:#{as_atom(table)}, [#{Enum.map_join(keys, ", ", &inspect/1)}], #{opts})"
end
def down(%{schema: schema, index: index, table: table, multitenancy: multitenancy}) do
index_name = AshSqlite.CustomIndex.name(table, index)
keys =
case multitenancy.strategy do
:attribute ->
[to_string(multitenancy.attribute) | Enum.map(index.fields, &to_string/1)]
_ ->
Enum.map(index.fields, &to_string/1)
end
"drop_if_exists index(:#{as_atom(table)}, [#{Enum.map_join(keys, ", ", &inspect/1)}], #{join(["name: \"#{index_name}\"", option(:prefix, schema)])})"
end
end
defmodule RemovePrimaryKey do
@moduledoc false
defstruct [:schema, :table, no_phase: true]
def up(%{schema: schema, table: table}) do
if schema do
"drop constraint(#{inspect(table)}, \"#{table}_pkey\", prefix: \"#{schema}\")"
else
"drop constraint(#{inspect(table)}, \"#{table}_pkey\")"
end
end
def down(_) do
""
end
end
defmodule RemovePrimaryKeyDown do
@moduledoc false
defstruct [:schema, :table, no_phase: true]
def up(_) do
""
end
def down(%{schema: schema, table: table}) do
if schema do
"drop constraint(#{inspect(table)}, \"#{table}_pkey\", prefix: \"#{schema}\")"
else
"drop constraint(#{inspect(table)}, \"#{table}_pkey\")"
end
end
end
defmodule RemoveCustomIndex do
@moduledoc false
defstruct [:schema, :table, :index, :base_filter, :multitenancy, no_phase: true]
import Helper
def up(%{index: index, table: table, multitenancy: multitenancy, schema: schema}) do
index_name = AshSqlite.CustomIndex.name(table, index)
keys =
case multitenancy.strategy do
:attribute ->
[to_string(multitenancy.attribute) | Enum.map(index.fields, &to_string/1)]
_ ->
Enum.map(index.fields, &to_string/1)
end
"drop_if_exists index(:#{as_atom(table)}, [#{Enum.map_join(keys, ", ", &inspect/1)}], #{join(["name: \"#{index_name}\"", option(:prefix, schema)])})"
end
def down(%{
index: index,
table: table,
schema: schema,
base_filter: base_filter,
multitenancy: multitenancy
}) do
keys =
case multitenancy.strategy do
:attribute ->
[to_string(multitenancy.attribute) | Enum.map(index.fields, &to_string/1)]
_ ->
Enum.map(index.fields, &to_string/1)
end
index =
if index.where && base_filter do
%{index | where: base_filter <> " AND " <> index.where}
else
index
end
opts =
join([
option(:name, index.name),
option(:unique, index.unique),
option(:concurrently, index.concurrently),
option(:using, index.using),
option(:prefix, index.prefix),
option(:where, index.where),
option(:include, index.include),
option(:prefix, schema)
])
if opts == "" do
"create index(:#{as_atom(table)}, [#{Enum.map_join(keys, ", ", &inspect/1)}])"
else
"create index(:#{as_atom(table)}, [#{Enum.map_join(keys, ", ", &inspect/1)}], #{opts})"
end
end
end
defmodule RenameUniqueIndex do
@moduledoc false
defstruct [
:new_identity,
:old_identity,
:table,
:schema,
:multitenancy,
:old_multitenancy,
no_phase: true
]
defp prefix_name(name, prefix) do
if prefix do
"#{prefix}.#{name}"
else
name
end
end
def up(%{
old_identity: %{index_name: old_index_name, name: old_name},
new_identity: %{index_name: new_index_name},
schema: schema,
table: table
}) do
old_index_name = old_index_name || "#{table}_#{old_name}_index"
"execute(\"ALTER INDEX #{prefix_name(old_index_name, schema)} " <>
"RENAME TO #{prefix_name(new_index_name, schema)}\")\n"
end
def down(%{
old_identity: %{index_name: old_index_name, name: old_name},
new_identity: %{index_name: new_index_name},
schema: schema,
table: table
}) do
old_index_name = old_index_name || "#{table}_#{old_name}_index"
"execute(\"ALTER INDEX #{prefix_name(new_index_name, schema)} " <>
"RENAME TO #{prefix_name(old_index_name, schema)}\")\n"
end
end
defmodule RemoveUniqueIndex do
@moduledoc false
defstruct [:identity, :schema, :table, :multitenancy, :old_multitenancy, no_phase: true]
import Helper
def up(%{
identity: %{name: name, keys: keys, index_name: index_name},
table: table,
schema: schema,
old_multitenancy: multitenancy
}) do
keys =
case multitenancy.strategy do
:attribute ->
[multitenancy.attribute | keys]
_ ->
keys
end
index_name = index_name || "#{table}_#{name}_index"
"drop_if_exists unique_index(:#{as_atom(table)}, [#{Enum.map_join(keys, ", ", &inspect/1)}], #{join(["name: \"#{index_name}\"", option(:prefix, schema)])})"
end
def down(%{
identity: %{name: name, keys: keys, base_filter: base_filter, index_name: index_name},
table: table,
schema: schema,
multitenancy: multitenancy
}) do
keys =
case multitenancy.strategy do
:attribute ->
[multitenancy.attribute | keys]
_ ->
keys
end
index_name = index_name || "#{table}_#{name}_index"
if base_filter do
"create unique_index(:#{as_atom(table)}, [#{Enum.map_join(keys, ", ", &inspect/1)}], where: \"#{base_filter}\", #{join(["name: \"#{index_name}\"", option(:prefix, schema)])})"
else
"create unique_index(:#{as_atom(table)}, [#{Enum.map_join(keys, ", ", &inspect/1)}], #{join(["name: \"#{index_name}\"", option(:prefix, schema)])})"
end
end
end
defmodule AddCheckConstraint do
@moduledoc false
defstruct [:table, :schema, :constraint, :multitenancy, :old_multitenancy, no_phase: true]
import Helper
def up(%{
schema: schema,
constraint: %{
name: name,
check: check,
base_filter: base_filter
},
table: table
}) do
if base_filter do
"create constraint(:#{as_atom(table)}, :#{as_atom(name)}, #{join(["check: \"#{base_filter} AND #{check}\")", option(:prefix, schema)])}"
else
"create constraint(:#{as_atom(table)}, :#{as_atom(name)}, #{join(["check: \"#{check}\")", option(:prefix, schema)])}"
end
end
def down(%{
constraint: %{name: name},
schema: schema,
table: table
}) do
"drop_if_exists constraint(:#{as_atom(table)}, #{join([":#{as_atom(name)}", option(:prefix, schema)])})"
end
end
defmodule RemoveCheckConstraint do
@moduledoc false
defstruct [:table, :schema, :constraint, :multitenancy, :old_multitenancy, no_phase: true]
import Helper
def up(%{constraint: %{name: name}, schema: schema, table: table}) do
"drop_if_exists constraint(:#{as_atom(table)}, #{join([":#{as_atom(name)}", option(:prefix, schema)])})"
end
def down(%{
constraint: %{
name: name,
check: check,
base_filter: base_filter
},
schema: schema,
table: table
}) do
if base_filter do
"create constraint(:#{as_atom(table)}, :#{as_atom(name)}, #{join(["check: \"#{base_filter} AND #{check}\")", option(:prefix, schema)])}"
else
"create constraint(:#{as_atom(table)}, :#{as_atom(name)}, #{join(["check: \"#{check}\")", option(:prefix, schema)])}"
end
end
end
end

View file

@ -0,0 +1,86 @@
defmodule AshSqlite.MigrationGenerator.Phase do
@moduledoc false
defmodule Create do
@moduledoc false
defstruct [:table, :schema, :multitenancy, operations: [], commented?: false]
import AshSqlite.MigrationGenerator.Operation.Helper, only: [as_atom: 1]
def up(%{schema: schema, table: table, operations: operations, multitenancy: multitenancy}) do
opts =
if schema do
", prefix: \"#{schema}\""
else
""
end
"create table(:#{as_atom(table)}, primary_key: false#{opts}) do\n" <>
Enum.map_join(operations, "\n", fn operation -> operation.__struct__.up(operation) end) <>
"\nend"
end
def down(%{schema: schema, table: table, multitenancy: multitenancy}) do
opts =
if schema do
", prefix: \"#{schema}\""
else
""
end
"drop table(:#{as_atom(table)}#{opts})"
end
end
defmodule Alter do
@moduledoc false
defstruct [:schema, :table, :multitenancy, operations: [], commented?: false]
import AshSqlite.MigrationGenerator.Operation.Helper, only: [as_atom: 1]
def up(%{table: table, schema: schema, operations: operations, multitenancy: multitenancy}) do
body =
operations
|> Enum.map_join("\n", fn operation -> operation.__struct__.up(operation) end)
|> String.trim()
if body == "" do
""
else
opts =
if schema do
", prefix: \"#{schema}\""
else
""
end
"alter table(:#{as_atom(table)}#{opts}) do\n" <>
body <>
"\nend"
end
end
def down(%{table: table, schema: schema, operations: operations, multitenancy: multitenancy}) do
body =
operations
|> Enum.reverse()
|> Enum.map_join("\n", fn operation -> operation.__struct__.down(operation) end)
|> String.trim()
if body == "" do
""
else
opts =
if schema do
", prefix: \"#{schema}\""
else
""
end
"alter table(:#{as_atom(table)}#{opts}) do\n" <>
body <>
"\nend"
end
end
end
end

133
lib/mix/helpers.ex Normal file
View file

@ -0,0 +1,133 @@
defmodule AshSqlite.MixHelpers do
@moduledoc false
def apis!(opts, args) do
apps =
if apps_paths = Mix.Project.apps_paths() do
apps_paths |> Map.keys() |> Enum.sort()
else
[Mix.Project.config()[:app]]
end
configured_apis = Enum.flat_map(apps, &Application.get_env(&1, :ash_apis, []))
apis =
if opts[:apis] && opts[:apis] != "" do
opts[:apis]
|> Kernel.||("")
|> String.split(",")
|> Enum.flat_map(fn
"" ->
[]
api ->
[Module.concat([api])]
end)
else
configured_apis
end
apis
|> Enum.map(&ensure_compiled(&1, args))
|> case do
[] ->
raise "must supply the --apis argument, or set `config :my_app, ash_apis: [...]` in config"
apis ->
apis
end
end
def repos!(opts, args) do
apis = apis!(opts, args)
resources =
apis
|> Enum.flat_map(&Ash.Api.Info.resources/1)
|> Enum.filter(&(Ash.DataLayer.data_layer(&1) == AshSqlite.DataLayer))
|> case do
[] ->
raise """
No resources with `data_layer: AshSqlite.DataLayer` found in the apis #{Enum.map_join(apis, ",", &inspect/1)}.
Must be able to find at least one resource with `data_layer: AshSqlite.DataLayer`.
"""
resources ->
resources
end
resources
|> Enum.map(&AshSqlite.DataLayer.Info.repo(&1))
|> Enum.uniq()
|> case do
[] ->
raise """
No repos could be found configured on the resources in the apis: #{Enum.map_join(apis, ",", &inspect/1)}
At least one resource must have a repo configured.
The following resources were found with `data_layer: AshSqlite.DataLayer`:
#{Enum.map_join(resources, "\n", &"* #{inspect(&1)}")}
"""
repos ->
repos
end
end
def delete_flag(args, arg) do
case Enum.split_while(args, &(&1 != arg)) do
{left, [_ | rest]} ->
left ++ rest
_ ->
args
end
end
def delete_arg(args, arg) do
case Enum.split_while(args, &(&1 != arg)) do
{left, [_, _ | rest]} ->
left ++ rest
_ ->
args
end
end
defp ensure_compiled(api, args) do
if Code.ensure_loaded?(Mix.Tasks.App.Config) do
Mix.Task.run("app.config", args)
else
Mix.Task.run("loadpaths", args)
"--no-compile" not in args && Mix.Task.run("compile", args)
end
case Code.ensure_compiled(api) do
{:module, _} ->
api
|> Ash.Api.Info.resources()
|> Enum.each(&Code.ensure_compiled/1)
# TODO: We shouldn't need to make sure that the resources are compiled
api
{:error, error} ->
Mix.raise("Could not load #{inspect(api)}, error: #{inspect(error)}. ")
end
end
def migrations_path(opts, repo) do
opts[:migrations_path] || repo.config()[:migrations_path] || derive_migrations_path(repo)
end
def derive_migrations_path(repo) do
config = repo.config()
priv = config[:priv] || "priv/#{repo |> Module.split() |> List.last() |> Macro.underscore()}"
app = Keyword.fetch!(config, :otp_app)
Application.app_dir(app, Path.join(priv, "migrations"))
end
end

View file

@ -0,0 +1,48 @@
defmodule Mix.Tasks.AshSqlite.Create do
use Mix.Task
@shortdoc "Creates the repository storage"
@switches [
quiet: :boolean,
apis: :string,
no_compile: :boolean,
no_deps_check: :boolean
]
@aliases [
q: :quiet
]
@moduledoc """
Create the storage for repos in all resources for the given (or configured) apis.
## Examples
mix ash_sqlite.create
mix ash_sqlite.create --apis MyApp.Api1,MyApp.Api2
## Command line options
* `--apis` - the apis who's repos you want to migrate.
* `--quiet` - do not log output
* `--no-compile` - do not compile before creating
* `--no-deps-check` - do not compile before creating
"""
@doc false
def run(args) do
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
repos = AshSqlite.MixHelpers.repos!(opts, args)
repo_args =
Enum.flat_map(repos, fn repo ->
["-r", to_string(repo)]
end)
rest_opts = AshSqlite.MixHelpers.delete_arg(args, "--apis")
Mix.Task.run("ecto.create", repo_args ++ rest_opts)
end
end

View file

@ -0,0 +1,56 @@
defmodule Mix.Tasks.AshSqlite.Drop do
use Mix.Task
@shortdoc "Drops the repository storage for the repos in the specified (or configured) apis"
@default_opts [force: false, force_drop: false]
@aliases [
f: :force,
q: :quiet
]
@switches [
force: :boolean,
force_drop: :boolean,
quiet: :boolean,
apis: :string,
no_compile: :boolean,
no_deps_check: :boolean
]
@moduledoc """
Drop the storage for the given repository.
## Examples
mix ash_sqlite.drop
mix ash_sqlite.drop -r MyApp.Api1,MyApp.Api2
## Command line options
* `--apis` - the apis who's repos should be dropped
* `-q`, `--quiet` - run the command quietly
* `-f`, `--force` - do not ask for confirmation when dropping the database.
Configuration is asked only when `:start_permanent` is set to true
(typically in production)
* `--no-compile` - do not compile before dropping
* `--no-deps-check` - do not compile before dropping
"""
@doc false
def run(args) do
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
opts = Keyword.merge(@default_opts, opts)
repos = AshSqlite.MixHelpers.repos!(opts, args)
repo_args =
Enum.flat_map(repos, fn repo ->
["-r", to_string(repo)]
end)
rest_opts = AshSqlite.MixHelpers.delete_arg(args, "--apis")
Mix.Task.run("ecto.drop", repo_args ++ rest_opts)
end
end

View file

@ -0,0 +1,96 @@
defmodule Mix.Tasks.AshSqlite.GenerateMigrations do
@moduledoc """
Generates migrations, and stores a snapshot of your resources.
Options:
* `apis` - a comma separated list of API modules, for which migrations will be generated
* `snapshot-path` - a custom path to store the snapshots, defaults to "priv/resource_snapshots"
* `migration-path` - a custom path to store the migrations, defaults to "priv".
Migrations are stored in a folder for each repo, so `priv/repo_name/migrations`
* `drop-columns` - whether or not to drop columns as attributes are removed. See below for more
* `name` -
names the generated migrations, prepending with the timestamp. The default is `migrate_resources_<n>`,
where `<n>` is the count of migrations matching `*migrate_resources*` plus one.
For example, `--name add_special_column` would get a name like `20210708181402_add_special_column.exs`
Flags:
* `quiet` - messages for file creations will not be printed
* `no-format` - files that are created will not be formatted with the code formatter
* `dry-run` - no files are created, instead the new migration is printed
* `check` - no files are created, returns an exit(1) code if the current snapshots and resources don't fit
#### Snapshots
Snapshots are stored in a folder for each table that migrations are generated for. Each snapshot is
stored in a file with a timestamp of when it was generated.
This is important because it allows for simultaneous work to be done on separate branches, and for rolling back
changes more easily, e.g removing a generated migration, and deleting the most recent snapshot, without having to redo
all of it
#### Dropping columns
Generally speaking, it is bad practice to drop columns when you deploy a change that
would remove an attribute. The main reasons for this are backwards compatibility and rolling restarts.
If you deploy an attribute removal, and run migrations. Regardless of your deployment sstrategy, you
won't be able to roll back, because the data has been deleted. In a rolling restart situation, some of
the machines/pods/whatever may still be running after the column has been deleted, causing errors. With
this in mind, its best not to delete those columns until later, after the data has been confirmed unnecessary.
To that end, the migration generator leaves the column dropping code commented. You can pass `--drop_columns`
to tell it to uncomment those statements. Additionally, you can just uncomment that code on a case by case
basis.
#### Conflicts/Multiple Resources
The migration generator can support multiple schemas using the same table.
It will raise on conflicts that it can't resolve, like the same field with different
types. It will prompt to resolve conflicts that can be resolved with human input.
For example, if you remove an attribute and add an attribute, it will ask you if you are renaming
the column in question. If not, it will remove one column and add the other.
Additionally, it lowers things to the database where possible:
#### Defaults
There are three anonymous functions that will translate to database-specific defaults currently:
* `&DateTime.utc_now/0`
Non-function default values will be dumped to their native type and inspected. This may not work for some types,
and may require manual intervention/patches to the migration generator code.
#### Identities
Identities will cause the migration generator to generate unique constraints. If multiple
resources target the same table, you will be asked to select the primary key, and any others
will be added as unique constraints.
"""
use Mix.Task
@shortdoc "Generates migrations, and stores a snapshot of your resources"
def run(args) do
{opts, _} =
OptionParser.parse!(args,
strict: [
apis: :string,
snapshot_path: :string,
migration_path: :string,
quiet: :boolean,
name: :string,
no_format: :boolean,
dry_run: :boolean,
check: :boolean,
drop_columns: :boolean
]
)
apis = AshSqlite.MixHelpers.apis!(opts, args)
opts =
opts
|> Keyword.put(:format, !opts[:no_format])
|> Keyword.delete(:no_format)
AshSqlite.MigrationGenerator.generate(apis, opts)
end
end

View file

@ -0,0 +1,115 @@
defmodule Mix.Tasks.AshSqlite.Migrate do
use Mix.Task
import AshSqlite.MixHelpers,
only: [migrations_path: 2]
@shortdoc "Runs the repository migrations for all repositories in the provided (or congigured) apis"
@aliases [
n: :step
]
@switches [
all: :boolean,
step: :integer,
to: :integer,
quiet: :boolean,
prefix: :string,
pool_size: :integer,
log_sql: :boolean,
strict_version_order: :boolean,
apis: :string,
no_compile: :boolean,
no_deps_check: :boolean,
migrations_path: :keep
]
@moduledoc """
Runs the pending migrations for the given repository.
Migrations are expected at "priv/YOUR_REPO/migrations" directory
of the current application, where "YOUR_REPO" is the last segment
in your repository name. For example, the repository `MyApp.Repo`
will use "priv/repo/migrations". The repository `Whatever.MyRepo`
will use "priv/my_repo/migrations".
This task runs all pending migrations by default. To migrate up to a
specific version number, supply `--to version_number`. To migrate a
specific number of times, use `--step n`.
This is only really useful if your api or apis only use a single repo.
If you have multiple repos and you want to run a single migration and/or
migrate/roll them back to different points, you will need to use the
ecto specific task, `mix ecto.migrate` and provide your repo name.
If a repository has not yet been started, one will be started outside
your application supervision tree and shutdown afterwards.
## Examples
mix ash_sqlite.migrate
mix ash_sqlite.migrate --apis MyApp.Api1,MyApp.Api2
mix ash_sqlite.migrate -n 3
mix ash_sqlite.migrate --step 3
mix ash_sqlite.migrate --to 20080906120000
## Command line options
* `--apis` - the apis who's repos should be migrated
* `--all` - run all pending migrations
* `--step`, `-n` - run n number of pending migrations
* `--to` - run all migrations up to and including version
* `--quiet` - do not log migration commands
* `--pool-size` - the pool size if the repository is started only for the task (defaults to 2)
* `--log-sql` - log the raw sql migrations are running
* `--strict-version-order` - abort when applying a migration with old timestamp
* `--no-compile` - does not compile applications before migrating
* `--no-deps-check` - does not check depedendencies before migrating
* `--migrations-path` - the path to load the migrations from, defaults to
`"priv/repo/migrations"`. This option may be given multiple times in which case the migrations
are loaded from all the given directories and sorted as if they were in the same one.
Note, if you have migrations paths e.g. `a/` and `b/`, and run
`mix ecto.migrate --migrations-path a/`, the latest migrations from `a/` will be run (even
if `b/` contains the overall latest migrations.)
"""
@impl true
def run(args) do
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
repos = AshSqlite.MixHelpers.repos!(opts, args)
repo_args =
Enum.flat_map(repos, fn repo ->
["-r", to_string(repo)]
end)
rest_opts =
args
|> AshSqlite.MixHelpers.delete_arg("--apis")
|> AshSqlite.MixHelpers.delete_arg("--migrations-path")
for repo <- repos do
Mix.Task.run(
"ecto.migrate",
repo_args ++ rest_opts ++ ["--migrations-path", migrations_path(opts, repo)]
)
Mix.Task.reenable("ecto.migrate")
end
end
end

View file

@ -0,0 +1,81 @@
defmodule Mix.Tasks.AshSqlite.Rollback do
use Mix.Task
import AshSqlite.MixHelpers,
only: [migrations_path: 2]
@shortdoc "Rolls back the repository migrations for all repositories in the provided (or configured) apis"
@moduledoc """
Reverts applied migrations in the given repository.
Migrations are expected at "priv/YOUR_REPO/migrations" directory
of the current application but it can be configured by specifying
the `:priv` key under the repository configuration.
Runs the latest applied migration by default. To roll back to
a version number, supply `--to version_number`. To roll back a
specific number of times, use `--step n`. To undo all applied
migrations, provide `--all`.
This is only really useful if your api or apis only use a single repo.
If you have multiple repos and you want to run a single migration and/or
migrate/roll them back to different points, you will need to use the
ecto specific task, `mix ecto.migrate` and provide your repo name.
## Examples
mix ash_sqlite.rollback
mix ash_sqlite.rollback -r Custom.Repo
mix ash_sqlite.rollback -n 3
mix ash_sqlite.rollback --step 3
mix ash_sqlite.rollback -v 20080906120000
mix ash_sqlite.rollback --to 20080906120000
## Command line options
* `--apis` - the apis who's repos should be rolledback
* `--all` - revert all applied migrations
* `--step` / `-n` - revert n number of applied migrations
* `--to` / `-v` - revert all migrations down to and including version
* `--quiet` - do not log migration commands
* `--prefix` - the prefix to run migrations on
* `--pool-size` - the pool size if the repository is started only for the task (defaults to 1)
* `--log-sql` - log the raw sql migrations are running
"""
@doc false
def run(args) do
{opts, _, _} =
OptionParser.parse(args,
switches: [
all: :boolean,
step: :integer,
to: :integer,
start: :boolean,
quiet: :boolean,
prefix: :string,
pool_size: :integer,
log_sql: :boolean
],
aliases: [n: :step, v: :to]
)
repos = AshSqlite.MixHelpers.repos!(opts, args)
repo_args =
Enum.flat_map(repos, fn repo ->
["-r", to_string(repo)]
end)
rest_opts =
args
|> AshSqlite.MixHelpers.delete_arg("--apis")
|> AshSqlite.MixHelpers.delete_arg("--migrations-path")
for repo <- repos do
Mix.Task.run(
"ecto.rollback",
repo_args ++ rest_opts ++ ["--migrations-path", migrations_path(opts, repo)]
)
Mix.Task.reenable("ecto.rollback")
end
end
end

43
lib/reference.ex Normal file
View file

@ -0,0 +1,43 @@
defmodule AshSqlite.Reference do
@moduledoc "Represents the configuration of a reference (i.e foreign key)."
defstruct [:relationship, :on_delete, :on_update, :name, :deferrable, ignore?: false]
def schema do
[
relationship: [
type: :atom,
required: true,
doc: "The relationship to be configured"
],
ignore?: [
type: :boolean,
doc:
"If set to true, no reference is created for the given relationship. This is useful if you need to define it in some custom way"
],
on_delete: [
type: {:one_of, [:delete, :nilify, :nothing, :restrict]},
doc: """
What should happen to records of this resource when the referenced record of the *destination* resource is deleted.
"""
],
on_update: [
type: {:one_of, [:update, :nilify, :nothing, :restrict]},
doc: """
What should happen to records of this resource when the referenced destination_attribute of the *destination* record is update.
"""
],
deferrable: [
type: {:one_of, [false, true, :initially]},
default: false,
doc: """
Wether or not the constraint is deferrable. This only affects the migration generator.
"""
],
name: [
type: :string,
doc:
"The name of the foreign key to generate in the database. Defaults to <table>_<source_attribute>_fkey"
]
]
end
end

172
lib/repo.ex Normal file
View file

@ -0,0 +1,172 @@
defmodule AshSqlite.Repo do
@moduledoc """
Resources that use `AshSqlite.DataLayer` use a `Repo` to access the database.
This repo is a thin wrapper around an `Ecto.Repo`.
You can use `Ecto.Repo`'s `init/2` to configure your repo like normal, but
instead of returning `{:ok, config}`, use `super(config)` to pass the
configuration to the `AshSqlite.Repo` implementation.
## Transaction Hooks
You can define `on_transaction_begin/1`, which will be invoked whenever a transaction is started for Ash.
This will be invoked with a map containing a `type` key and metadata.
```elixir
%{type: :create, %{resource: YourApp.YourResource, action: :action}}
```
"""
@doc "Use this to inform the data layer about what extensions are installed"
@callback installed_extensions() :: [String.t()]
@doc """
Use this to inform the data layer about the oldest potential sqlite version it will be run on.
Must be an integer greater than or equal to 13.
"""
@callback min_pg_version() :: integer()
@callback on_transaction_begin(reason :: Ash.DataLayer.transaction_reason()) :: term
@doc "The path where your migrations are stored"
@callback migrations_path() :: String.t() | nil
@doc "Allows overriding a given migration type for *all* fields, for example if you wanted to always use :timestamptz for :utc_datetime fields"
@callback override_migration_type(atom) :: atom
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
otp_app = opts[:otp_app] || raise("Must configure OTP app")
use Ecto.Repo,
adapter: Ecto.Adapters.SQLite3,
otp_app: otp_app
@behaviour AshSqlite.Repo
defoverridable insert: 2, insert: 1, insert!: 2, insert!: 1
def installed_extensions, do: []
def migrations_path, do: nil
def default_prefix, do: "public"
def override_migration_type(type), do: type
def min_pg_version, do: 10
def init(_, config) do
new_config =
config
|> Keyword.put(:installed_extensions, installed_extensions())
|> Keyword.put(:migrations_path, migrations_path())
|> Keyword.put(:default_prefix, default_prefix())
{:ok, new_config}
end
def on_transaction_begin(_reason), do: :ok
def insert(struct_or_changeset, opts \\ []) do
struct_or_changeset
|> to_ecto()
|> then(fn value ->
repo = get_dynamic_repo()
Ecto.Repo.Schema.insert(
__MODULE__,
repo,
value,
Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:insert, opts))
)
end)
|> from_ecto()
end
def insert!(struct_or_changeset, opts \\ []) do
struct_or_changeset
|> to_ecto()
|> then(fn value ->
repo = get_dynamic_repo()
Ecto.Repo.Schema.insert!(
__MODULE__,
repo,
value,
Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:insert, opts))
)
end)
|> from_ecto()
end
def from_ecto({:ok, result}), do: {:ok, from_ecto(result)}
def from_ecto({:error, _} = other), do: other
def from_ecto(nil), do: nil
def from_ecto(value) when is_list(value) do
Enum.map(value, &from_ecto/1)
end
def from_ecto(%resource{} = record) do
if Spark.Dsl.is?(resource, Ash.Resource) do
empty = struct(resource)
resource
|> Ash.Resource.Info.relationships()
|> Enum.reduce(record, fn relationship, record ->
case Map.get(record, relationship.name) do
%Ecto.Association.NotLoaded{} ->
Map.put(record, relationship.name, Map.get(empty, relationship.name))
value ->
Map.put(record, relationship.name, from_ecto(value))
end
end)
else
record
end
end
def from_ecto(other), do: other
def to_ecto(nil), do: nil
def to_ecto(value) when is_list(value) do
Enum.map(value, &to_ecto/1)
end
def to_ecto(%resource{} = record) do
if Spark.Dsl.is?(resource, Ash.Resource) do
resource
|> Ash.Resource.Info.relationships()
|> Enum.reduce(record, fn relationship, record ->
value =
case Map.get(record, relationship.name) do
%Ash.NotLoaded{} ->
%Ecto.Association.NotLoaded{
__field__: relationship.name,
__cardinality__: relationship.cardinality
}
value ->
to_ecto(value)
end
Map.put(record, relationship.name, value)
end)
else
record
end
end
def to_ecto(other), do: other
defoverridable init: 2,
on_transaction_begin: 1,
installed_extensions: 0,
default_prefix: 0,
override_migration_type: 1,
min_pg_version: 0
end
end
end

139
lib/sort.ex Normal file
View file

@ -0,0 +1,139 @@
defmodule AshSqlite.Sort do
@moduledoc false
require Ecto.Query
def sort(
query,
sort,
resource,
relationship_path \\ [],
binding \\ 0,
return_order_by? \\ false
) do
query = AshSqlite.DataLayer.default_bindings(query, resource)
sort
|> sanitize_sort()
|> Enum.reduce_while({:ok, []}, fn
{order, %Ash.Query.Calculation{} = calc}, {:ok, query_expr} ->
type =
if calc.type do
AshSqlite.Types.parameterized_type(calc.type, calc.constraints)
else
nil
end
calc.opts
|> calc.module.expression(calc.context)
|> Ash.Filter.hydrate_refs(%{
resource: resource,
calculations: %{},
public?: false
})
|> Ash.Filter.move_to_relationship_path(relationship_path)
|> case do
{:ok, expr} ->
expr =
AshSqlite.Expr.dynamic_expr(query, expr, query.__ash_bindings__, false, type)
{:cont, {:ok, query_expr ++ [{order, expr}]}}
{:error, error} ->
{:halt, {:error, error}}
end
{order, sort}, {:ok, query_expr} ->
expr =
Ecto.Query.dynamic(field(as(^binding), ^sort))
{:cont, {:ok, query_expr ++ [{order, expr}]}}
end)
|> case do
{:ok, []} ->
{:ok, query}
{:ok, sort_exprs} ->
if return_order_by? do
{:ok, order_to_fragments(sort_exprs)}
else
new_query = Ecto.Query.order_by(query, ^sort_exprs)
sort_expr = List.last(new_query.order_bys)
new_query =
new_query
|> Map.update!(:windows, fn windows ->
order_by_expr = %{sort_expr | expr: [order_by: sort_expr.expr]}
Keyword.put(windows, :order, order_by_expr)
end)
|> Map.update!(:__ash_bindings__, &Map.put(&1, :__order__?, true))
{:ok, new_query}
end
{:error, error} ->
{:error, error}
end
end
def order_to_fragments([]), do: []
def order_to_fragments(order) when is_list(order) do
Enum.map(order, &do_order_to_fragments(&1))
end
def do_order_to_fragments({order, sort}) do
case order do
:asc ->
Ecto.Query.dynamic([row], fragment("? ASC", ^sort))
:desc ->
Ecto.Query.dynamic([row], fragment("? DESC", ^sort))
:asc_nulls_last ->
Ecto.Query.dynamic([row], fragment("? ASC NULLS LAST", ^sort))
:asc_nulls_first ->
Ecto.Query.dynamic([row], fragment("? ASC NULLS FIRST", ^sort))
:desc_nulls_first ->
Ecto.Query.dynamic([row], fragment("? DESC NULLS FIRST", ^sort))
:desc_nulls_last ->
Ecto.Query.dynamic([row], fragment("? DESC NULLS LAST", ^sort))
"DESC NULLS LAST"
end
end
def order_to_sqlite_order(dir) do
case dir do
:asc -> nil
:asc_nils_last -> " ASC NULLS LAST"
:asc_nils_first -> " ASC NULLS FIRST"
:desc -> " DESC"
:desc_nils_last -> " DESC NULLS LAST"
:desc_nils_first -> " DESC NULLS FIRST"
end
end
defp sanitize_sort(sort) do
sort
|> List.wrap()
|> Enum.map(fn
{sort, {order, context}} ->
{ash_to_ecto_order(order), {sort, context}}
{sort, order} ->
{ash_to_ecto_order(order), sort}
sort ->
sort
end)
end
defp ash_to_ecto_order(:asc_nils_last), do: :asc_nulls_last
defp ash_to_ecto_order(:asc_nils_first), do: :asc_nulls_first
defp ash_to_ecto_order(:desc_nils_last), do: :desc_nulls_last
defp ash_to_ecto_order(:desc_nils_first), do: :desc_nulls_first
defp ash_to_ecto_order(other), do: other
end

45
lib/statement.ex Normal file
View file

@ -0,0 +1,45 @@
defmodule AshSqlite.Statement do
@moduledoc "Represents a custom statement to be run in generated migrations"
@fields [
:name,
:up,
:down,
:code?
]
defstruct @fields
def fields, do: @fields
@schema [
name: [
type: :atom,
required: true,
doc: """
The name of the statement, must be unique within the resource
"""
],
code?: [
type: :boolean,
default: false,
doc: """
By default, we place the strings inside of ecto migration's `execute/1` function and assume they are sql. Use this option if you want to provide custom elixir code to be placed directly in the migrations
"""
],
up: [
type: :string,
doc: """
How to create the structure of the statement
""",
required: true
],
down: [
type: :string,
doc: "How to tear down the structure of the statement",
required: true
]
]
def schema, do: @schema
end

View file

@ -0,0 +1,30 @@
defmodule AshSqlite.Transformers.EnsureTableOrPolymorphic do
@moduledoc false
use Spark.Dsl.Transformer
alias Spark.Dsl.Transformer
def transform(dsl) do
if Transformer.get_option(dsl, [:sqlite], :polymorphic?) ||
Transformer.get_option(dsl, [:sqlite], :table) do
{:ok, dsl}
else
resource = Transformer.get_persisted(dsl, :module)
raise Spark.Error.DslError,
module: resource,
message: """
Must configure a table for #{inspect(resource)}.
For example:
```elixir
sqlite do
table "the_table"
repo YourApp.Repo
end
```
""",
path: [:sqlite, :table]
end
end
end

View file

@ -0,0 +1,23 @@
defmodule AshSqlite.Transformers.ValidateReferences do
@moduledoc false
use Spark.Dsl.Transformer
alias Spark.Dsl.Transformer
def after_compile?, do: true
def transform(dsl) do
dsl
|> AshSqlite.DataLayer.Info.references()
|> Enum.each(fn reference ->
unless Ash.Resource.Info.relationship(dsl, reference.relationship) do
raise Spark.Error.DslError,
path: [:sqlite, :references, reference.relationship],
module: Transformer.get_persisted(dsl, :module),
message:
"Found reference configuration for relationship `#{reference.relationship}`, but no such relationship exists"
end
end)
{:ok, dsl}
end
end

View file

@ -0,0 +1,22 @@
defmodule AshSqlite.Transformers.VerifyRepo do
@moduledoc false
use Spark.Dsl.Transformer
alias Spark.Dsl.Transformer
def after_compile?, do: true
def transform(dsl) do
repo = Transformer.get_option(dsl, [:sqlite], :repo)
cond do
match?({:error, _}, Code.ensure_compiled(repo)) ->
{:error, "Could not find repo module #{repo}"}
repo.__adapter__() != Ecto.Adapters.Sqlite3 ->
{:error, "Expected a repo using the sqlite adapter `Ecto.Adapters.SQLite3`"}
true ->
{:ok, dsl}
end
end
end

19
lib/type.ex Normal file
View file

@ -0,0 +1,19 @@
defmodule AshSqlite.Type do
@moduledoc """
Sqlite specific callbacks for `Ash.Type`.
Use this in addition to `Ash.Type`.
"""
@callback value_to_sqlite_default(Ash.Type.t(), Ash.Type.constraints(), term) ::
{:ok, String.t()} | :error
defmacro __using__(_) do
quote do
@behaviour AshSqlite.Type
def value_to_sqlite_default(_, _, _), do: :error
defoverridable value_to_sqlite_default: 3
end
end
end

View file

@ -0,0 +1,14 @@
defmodule Ash.Type.CiStringWrapper do
@moduledoc false
use Ash.Type
@impl true
def storage_type(_), do: :citext
@impl true
defdelegate cast_input(value, constraints), to: Ash.Type.CiString
@impl true
defdelegate cast_stored(value, constraints), to: Ash.Type.CiString
@impl true
defdelegate dump_to_native(value, constraints), to: Ash.Type.CiString
end

View file

@ -0,0 +1,14 @@
defmodule Ash.Type.StringWrapper do
@moduledoc false
use Ash.Type
@impl true
def storage_type(_), do: :text
@impl true
defdelegate cast_input(value, constraints), to: Ash.Type.String
@impl true
defdelegate cast_stored(value, constraints), to: Ash.Type.String
@impl true
defdelegate dump_to_native(value, constraints), to: Ash.Type.String
end

190
lib/types/types.ex Normal file
View file

@ -0,0 +1,190 @@
defmodule AshSqlite.Types do
@moduledoc false
alias Ash.Query.Ref
def parameterized_type({:parameterized, _, _} = type, _) do
type
end
def parameterized_type({:in, type}, constraints) do
parameterized_type({:array, type}, constraints)
end
def parameterized_type({:array, type}, constraints) do
case parameterized_type(type, constraints[:items] || []) do
nil ->
nil
type ->
{:array, type}
end
end
def parameterized_type(Ash.Type.CiString, constraints) do
parameterized_type(Ash.Type.CiStringWrapper, constraints)
end
def parameterized_type(Ash.Type.String.EctoType, constraints) do
parameterized_type(Ash.Type.StringWrapper, constraints)
end
def parameterized_type(type, _constraints) when type in [Ash.Type.Map, Ash.Type.Map.EctoType],
do: nil
def parameterized_type(type, constraints) do
if Ash.Type.ash_type?(type) do
cast_in_query? =
if function_exported?(Ash.Type, :cast_in_query?, 2) do
Ash.Type.cast_in_query?(type, constraints)
else
Ash.Type.cast_in_query?(type)
end
if cast_in_query? do
parameterized_type(Ash.Type.ecto_type(type), constraints)
else
nil
end
else
if is_atom(type) && :erlang.function_exported(type, :type, 1) do
{:parameterized, type, constraints || []}
else
type
end
end
end
def determine_types(mod, values) do
Code.ensure_compiled(mod)
cond do
:erlang.function_exported(mod, :types, 0) ->
mod.types()
:erlang.function_exported(mod, :args, 0) ->
mod.args()
true ->
[:any]
end
|> Enum.map(fn types ->
case types do
:same ->
types =
for _ <- values do
:same
end
closest_fitting_type(types, values)
:any ->
for _ <- values do
:any
end
types ->
closest_fitting_type(types, values)
end
end)
|> Enum.filter(fn types ->
Enum.all?(types, &(vagueness(&1) == 0))
end)
|> case do
[type] ->
if type == :any || type == {:in, :any} do
nil
else
type
end
# There are things we could likely do here
# We only say "we know what types these are" when we explicitly know
_ ->
Enum.map(values, fn _ -> nil end)
end
end
defp closest_fitting_type(types, values) do
types_with_values = Enum.zip(types, values)
types_with_values
|> fill_in_known_types()
|> clarify_types()
end
defp clarify_types(types) do
basis =
types
|> Enum.map(&elem(&1, 0))
|> Enum.min_by(&vagueness(&1))
Enum.map(types, fn {type, _value} ->
replace_same(type, basis)
end)
end
defp replace_same({:in, type}, basis) do
{:in, replace_same(type, basis)}
end
defp replace_same(:same, :same) do
:any
end
defp replace_same(:same, {:in, :same}) do
{:in, :any}
end
defp replace_same(:same, basis) do
basis
end
defp replace_same(other, _basis) do
other
end
defp fill_in_known_types(types) do
Enum.map(types, &fill_in_known_type/1)
end
defp fill_in_known_type(
{vague_type, %Ref{attribute: %{type: type, constraints: constraints}}} = ref
)
when vague_type in [:any, :same] do
if Ash.Type.ash_type?(type) do
type = type |> parameterized_type(constraints) |> array_to_in()
{type || :any, ref}
else
type =
if is_atom(type) && :erlang.function_exported(type, :type, 1) do
{:parameterized, type, []} |> array_to_in()
else
type |> array_to_in()
end
{type, ref}
end
end
defp fill_in_known_type(
{{:array, type}, %Ref{attribute: %{type: {:array, type}} = attribute} = ref}
) do
{:in, fill_in_known_type({type, %{ref | attribute: %{attribute | type: type}}})}
end
defp fill_in_known_type({type, value}), do: {array_to_in(type), value}
defp array_to_in({:array, v}), do: {:in, array_to_in(v)}
defp array_to_in({:parameterized, type, constraints}),
do: {:parameterized, array_to_in(type), constraints}
defp array_to_in(v), do: v
defp vagueness({:in, type}), do: vagueness(type)
defp vagueness(:same), do: 2
defp vagueness(:any), do: 1
defp vagueness(_), do: 0
end

BIN
logos/small-logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3 KiB

231
mix.exs Normal file
View file

@ -0,0 +1,231 @@
defmodule AshSqlite.MixProject do
use Mix.Project
@description """
A sqlite data layer for `Ash` resources. Leverages Ecto's sqlite
support, and delegates to a configured repo.
"""
@version "0.1.0"
def project do
[
app: :ash_sqlite,
version: @version,
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: @description,
test_coverage: [tool: ExCoveralls],
elixirc_paths: elixirc_paths(Mix.env()),
preferred_cli_env: [
coveralls: :test,
"coveralls.github": :test,
"test.create": :test,
"test.migrate": :test,
"test.rollback": :test,
"test.check_migrations": :test,
"test.drop": :test,
"test.generate_migrations": :test,
"test.reset": :test
],
dialyzer: [
plt_add_apps: [:ecto, :ash, :mix]
],
docs: docs(),
aliases: aliases(),
package: package(),
source_url: "https://github.com/ash-project/ash_sqlite",
homepage_url: "https://github.com/ash-project/ash_sqlite",
consolidate_protocols: Mix.env() != :test
]
end
if Mix.env() == :test do
def application() do
[applications: [:ecto, :ecto_sql, :jason, :ash, :postgrex], mod: {AshSqlite.TestApp, []}]
end
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp package do
[
name: :ash_sqlite,
licenses: ["MIT"],
files: ~w(lib .formatter.exs mix.exs README* LICENSE*
CHANGELOG* documentation),
links: %{
GitHub: "https://github.com/ash-project/ash_sqlite"
}
]
end
defp extras() do
"documentation/**/*.{md,livemd,cheatmd}"
|> Path.wildcard()
|> Enum.map(fn path ->
title =
path
|> Path.basename(".md")
|> Path.basename(".livemd")
|> Path.basename(".cheatmd")
|> String.split(~r/[-_]/)
|> Enum.map_join(" ", &capitalize/1)
|> case do
"F A Q" ->
"FAQ"
other ->
other
end
{String.to_atom(path),
[
title: title
]}
end)
end
defp capitalize(string) do
string
|> String.split(" ")
|> Enum.map(fn string ->
[hd | tail] = String.graphemes(string)
String.capitalize(hd) <> Enum.join(tail)
end)
end
defp groups_for_extras() do
[
Tutorials: [
~r'documentation/tutorials'
],
"How To": ~r'documentation/how_to',
Topics: ~r'documentation/topics',
DSLs: ~r'documentation/dsls'
]
end
defp docs do
[
main: "get-started-with-sqlite",
source_ref: "v#{@version}",
logo: "logos/small-logo.png",
extras: extras(),
spark: [
mix_tasks: [
SQLite: [
Mix.Tasks.AshSqlite.GenerateMigrations,
Mix.Tasks.AshSqlite.Create,
Mix.Tasks.AshSqlite.Drop,
Mix.Tasks.AshSqlite.Migrate,
Mix.Tasks.AshSqlite.Rollback
]
],
extensions: [
%{
module: AshSqlite.DataLayer,
name: "AshSqlite",
target: "Ash.Resource",
type: "DataLayer"
}
]
],
groups_for_extras: groups_for_extras(),
groups_for_modules: [
AshSqlite: [
AshSqlite,
AshSqlite.Repo,
AshSqlite.DataLayer
],
Utilities: [
AshSqlite.ManualRelationship
],
Introspection: [
AshSqlite.DataLayer.Info,
AshSqlite.CheckConstraint,
AshSqlite.CustomExtension,
AshSqlite.CustomIndex,
AshSqlite.Reference,
AshSqlite.Statement
],
Types: [
AshSqlite.Type
],
"Sqlite Migrations": [
EctoMigrationDefault
],
Expressions: [
AshSqlite.Functions.Fragment,
AshSqlite.Functions.Like
],
Internals: ~r/.*/
]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ecto_sql, "~> 3.9"},
{:ecto_sqlite3, "~> 0.11"},
{:ecto, "~> 3.9"},
{:jason, "~> 1.0"},
{:postgrex, ">= 0.0.0"},
{:ash, ash_version("~> 2.14 and >= 2.14.18")},
{:git_ops, "~> 2.5", only: [:dev, :test]},
{:ex_doc, "~> 0.22", only: [:dev, :test], runtime: false},
{:ex_check, "~> 0.14", only: [:dev, :test]},
{:credo, ">= 0.0.0", only: [:dev, :test], runtime: false},
{:dialyxir, ">= 0.0.0", only: [:dev, :test], runtime: false},
{:sobelow, ">= 0.0.0", only: [:dev, :test], runtime: false},
{:excoveralls, "~> 0.14", only: [:dev, :test]}
]
end
defp ash_version(default_version) do
case System.get_env("ASH_VERSION") do
nil ->
default_version
"local" ->
[path: "../ash"]
"main" ->
[git: "https://github.com/ash-project/ash.git"]
version when is_binary(version) ->
"~> #{version}"
version ->
version
end
end
defp aliases do
[
sobelow:
"sobelow --skip -i Config.Secrets --ignore-files lib/migration_generator/migration_generator.ex",
credo: "credo --strict",
docs: [
"spark.cheat_sheets",
"docs",
"ash.replace_doc_links",
"spark.cheat_sheets_in_search"
],
"spark.formatter": "spark.formatter --extensions AshSqlite.DataLayer",
"spark.cheat_sheets": "spark.cheat_sheets --extensions AshSqlite.DataLayer",
"spark.cheat_sheets_in_search":
"spark.cheat_sheets_in_search --extensions AshSqlite.DataLayer",
"test.generate_migrations": "ash_sqlite.generate_migrations",
"test.check_migrations": "ash_sqlite.generate_migrations --check",
"test.migrate": "ash_sqlite.migrate",
"test.rollback": "ash_sqlite.rollback",
"test.create": "ash_sqlite.create",
"test.reset": ["test.drop", "test.create", "test.migrate"],
"test.drop": "ash_sqlite.drop"
]
end
end

46
mix.lock Normal file
View file

@ -0,0 +1,46 @@
%{
"ash": {:hex, :ash, "2.14.18", "ac2fd2f274f4989d3c71de3df9a603941bc47ac6c8d27006df78f78844114969", [:mix], [{:comparable, "~> 1.0", [hex: :comparable, repo: "hexpm", optional: false]}, {:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:earmark, "~> 1.4", [hex: :earmark, repo: "hexpm", optional: true]}, {:ecto, "~> 3.7", [hex: :ecto, repo: "hexpm", optional: false]}, {:ets, "~> 0.8", [hex: :ets, repo: "hexpm", optional: false]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: false]}, {:picosat_elixir, "~> 0.2", [hex: :picosat_elixir, repo: "hexpm", optional: false]}, {:plug, ">= 0.0.0", [hex: :plug, repo: "hexpm", optional: true]}, {:spark, ">= 1.1.20 and < 2.0.0-0", [hex: :spark, repo: "hexpm", optional: false]}, {:stream_data, "~> 0.5", [hex: :stream_data, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.1", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ec44ad258eb71a2dd5210f67bd882698ea112f6dad79505b156594be06e320e5"},
"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"},
"cc_precompiler": {:hex, :cc_precompiler, "0.1.8", "933a5f4da3b19ee56539a076076ce4d7716d64efc8db46fd066996a7e46e2bfd", [:mix], [{:elixir_make, "~> 0.7.3", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "176bdf4366956e456bf761b54ad70bc4103d0269ca9558fd7cee93d1b3f116db"},
"certifi": {:hex, :certifi, "2.9.0", "6f2a475689dd47f19fb74334859d460a2dc4e3252a3324bd2111b8f0429e7e21", [:rebar3], [], "hexpm", "266da46bdb06d6c6d35fde799bcb28d36d985d424ad7c08b5bb48f5b5cdd4641"},
"comparable": {:hex, :comparable, "1.0.0", "bb669e91cedd14ae9937053e5bcbc3c52bb2f22422611f43b6e38367d94a495f", [:mix], [{:typable, "~> 0.1", [hex: :typable, repo: "hexpm", optional: false]}], "hexpm", "277c11eeb1cd726e7cd41c6c199e7e52fa16ee6830b45ad4cdc62e51f62eb60c"},
"credo": {:hex, :credo, "1.6.4", "ddd474afb6e8c240313f3a7b0d025cc3213f0d171879429bf8535d7021d9ad78", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "c28f910b61e1ff829bffa056ef7293a8db50e87f2c57a9b5c3f57eee124536b7"},
"db_connection": {:hex, :db_connection, "2.5.0", "bb6d4f30d35ded97b29fe80d8bd6f928a1912ca1ff110831edcd238a1973652c", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c92d5ba26cd69ead1ff7582dbb860adeedfff39774105a4f1c92cbb654b55aa2"},
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
"dialyxir": {:hex, :dialyxir, "1.1.0", "c5aab0d6e71e5522e77beff7ba9e08f8e02bad90dfbeffae60eaf0cb47e29488", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "07ea8e49c45f15264ebe6d5b93799d4dd56a44036cf42d0ad9c960bc266c0b9a"},
"earmark_parser": {:hex, :earmark_parser, "1.4.35", "437773ca9384edf69830e26e9e7b2e0d22d2596c4a6b17094a3b29f01ea65bb8", [:mix], [], "hexpm", "8652ba3cb85608d0d7aa2d21b45c6fad4ddc9a1f9a1f1b30ca3a246f0acc33f6"},
"ecto": {:hex, :ecto, "3.10.3", "eb2ae2eecd210b4eb8bece1217b297ad4ff824b4384c0e3fdd28aaf96edd6135", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "44bec74e2364d491d70f7e42cd0d690922659d329f6465e89feb8a34e8cd3433"},
"ecto_sql": {:hex, :ecto_sql, "3.10.2", "6b98b46534b5c2f8b8b5f03f126e75e2a73c64f3c071149d32987a5378b0fdbd", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.10.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "68c018debca57cb9235e3889affdaec7a10616a4e3a80c99fa1d01fdafaa9007"},
"ecto_sqlite3": {:hex, :ecto_sqlite3, "0.11.0", "1e094ade9ff1bc7c33c5c6b114f8a5156d0b7c5ddf9038d61cb8fdd61e7c4c55", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ecto, "~> 3.10", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:exqlite, "~> 0.9", [hex: :exqlite, repo: "hexpm", optional: false]}], "hexpm", "3d5b9a69b9a9547329413b278b4b072b9bbadf4fd599a746b3d6b0e174a418bb"},
"elixir_make": {:hex, :elixir_make, "0.7.7", "7128c60c2476019ed978210c245badf08b03dbec4f24d05790ef791da11aa17c", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}], "hexpm", "5bc19fff950fad52bbe5f211b12db9ec82c6b34a9647da0c2224b8b8464c7e6c"},
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
"ets": {:hex, :ets, "0.9.0", "79c6a6c205436780486f72d84230c6cba2f8a9920456750ddd1e47389107d5fd", [:mix], [], "hexpm", "2861fdfb04bcaeff370f1a5904eec864f0a56dcfebe5921ea9aadf2a481c822b"},
"ex_check": {:hex, :ex_check, "0.14.0", "d6fbe0bcc51cf38fea276f5bc2af0c9ae0a2bb059f602f8de88709421dae4f0e", [:mix], [], "hexpm", "8a602e98c66e6a4be3a639321f1f545292042f290f91fa942a285888c6868af0"},
"ex_doc": {:hex, :ex_doc, "0.30.6", "5f8b54854b240a2b55c9734c4b1d0dd7bdd41f71a095d42a70445c03cf05a281", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "bd48f2ddacf4e482c727f9293d9498e0881597eae6ddc3d9562bd7923375109f"},
"excoveralls": {:hex, :excoveralls, "0.14.4", "295498f1ae47bdc6dce59af9a585c381e1aefc63298d48172efaaa90c3d251db", [:mix], [{:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "e3ab02f2df4c1c7a519728a6f0a747e71d7d6e846020aae338173619217931c1"},
"exqlite": {:hex, :exqlite, "0.14.0", "f275c6fe1ce35d383b4ed52461ca98c02354eeb2c651c13f5b4badcfd39b743f", [:make, :mix], [{:cc_precompiler, "~> 0.1", [hex: :cc_precompiler, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "e335eca54749d04dcdedcbc87be85e2176030aab3d7b74b6323fda7e3552ee4c"},
"file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
"git_cli": {:hex, :git_cli, "0.3.0", "a5422f9b95c99483385b976f5d43f7e8233283a47cda13533d7c16131cb14df5", [:mix], [], "hexpm", "78cb952f4c86a41f4d3511f1d3ecb28edb268e3a7df278de2faa1bd4672eaf9b"},
"git_ops": {:hex, :git_ops, "2.5.5", "4f8369f3c9347e06a7f289de98fadfc95194149156335c5292479a53eddbccd2", [:mix], [{:git_cli, "~> 0.2", [hex: :git_cli, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "3b1e3b12968f9da6f79b5e2b2274477206949376e3579d05a5f3d439eda0b746"},
"hackney": {:hex, :hackney, "1.18.1", "f48bf88f521f2a229fc7bae88cf4f85adc9cd9bcf23b5dc8eb6a1788c662c4f6", [:rebar3], [{:certifi, "~>2.9.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~>6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~>1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.3.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~>1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "a4ecdaff44297e9b5894ae499e9a070ea1888c84afdd1fd9b7b2bc384950128e"},
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
"jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"},
"makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"},
"makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"},
"makeup_erlang": {:hex, :makeup_erlang, "0.1.2", "ad87296a092a46e03b7e9b0be7631ddcf64c790fa68a9ef5323b6cbb36affc72", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f3f5a1ca93ce6e092d92b6d9c049bcda58a3b617a8d888f8e7231c85630e8108"},
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
"mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"},
"nimble_options": {:hex, :nimble_options, "1.0.2", "92098a74df0072ff37d0c12ace58574d26880e522c22801437151a159392270e", [:mix], [], "hexpm", "fd12a8db2021036ce12a309f26f564ec367373265b53e25403f0ee697380f1b8"},
"nimble_parsec": {:hex, :nimble_parsec, "1.3.1", "2c54013ecf170e249e9291ed0a62e5832f70a476c61da16f6aac6dca0189f2af", [:mix], [], "hexpm", "2682e3c0b2eb58d90c6375fc0cc30bc7be06f365bf72608804fb9cffa5e1b167"},
"parse_trans": {:hex, :parse_trans, "3.3.1", "16328ab840cc09919bd10dab29e431da3af9e9e7e7e6f0089dd5a2d2820011d8", [:rebar3], [], "hexpm", "07cd9577885f56362d414e8c4c4e6bdf10d43a8767abb92d24cbe8b24c54888b"},
"picosat_elixir": {:hex, :picosat_elixir, "0.2.3", "bf326d0f179fbb3b706bb2c15fbc367dacfa2517157d090fdfc32edae004c597", [:make, :mix], [{:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "f76c9db2dec9d2561ffaa9be35f65403d53e984e8cd99c832383b7ab78c16c66"},
"postgrex": {:hex, :postgrex, "0.17.2", "a3ec9e3239d9b33f1e5841565c4eb200055c52cc0757a22b63ca2d529bbe764c", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "80a918a9e9531d39f7bd70621422f3ebc93c01618c645f2d91306f50041ed90c"},
"sobelow": {:hex, :sobelow, "0.11.1", "23438964486f8112b41e743bbfd402da3e5b296fdc9eacab29914b79c48916dd", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "9897363a7eff96f4809304a90aad819e2ad5e5d24db547af502885146746a53c"},
"sourceror": {:hex, :sourceror, "0.14.0", "b6b8552d0240400d66b6f107c1bab7ac1726e998efc797f178b7b517e928e314", [:mix], [], "hexpm", "809c71270ad48092d40bbe251a133e49ae229433ce103f762a2373b7a10a8d8b"},
"spark": {:hex, :spark, "1.1.39", "f143b84a5b796bf2d83ec8fb4793ee9e66e67510c40d785f9a67050bb88e7677", [:mix], [{:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.5 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:sourceror, "~> 0.1", [hex: :sourceror, repo: "hexpm", optional: false]}], "hexpm", "d71bc26014c7e7abcdcf553f4cf7c5a5ff96f8365b1e20be3768ce503aafb203"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"},
"stream_data": {:hex, :stream_data, "0.6.0", "e87a9a79d7ec23d10ff83eb025141ef4915eeb09d4491f79e52f2562b73e5f47", [:mix], [], "hexpm", "b92b5031b650ca480ced047578f1d57ea6dd563f5b57464ad274718c9c29501c"},
"telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"},
"typable": {:hex, :typable, "0.3.0", "0431e121d124cd26f312123e313d2689b9a5322b15add65d424c07779eaa3ca1", [:mix], [], "hexpm", "880a0797752da1a4c508ac48f94711e04c86156f498065a83d160eef945858f8"},
"unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"},
}

14
test/ash_sqlite_test.exs Normal file
View file

@ -0,0 +1,14 @@
defmodule AshSqliteTest do
use AshSqlite.RepoCase, async: false
test "transaction metadata is given to on_transaction_begin" do
AshSqlite.Test.Post
|> Ash.Changeset.new(%{title: "title"})
|> AshSqlite.Test.Api.create!()
assert_receive %{
type: :create,
metadata: %{action: :create, actor: nil, resource: AshSqlite.Test.Post}
}
end
end

59
test/atomics_test.exs Normal file
View file

@ -0,0 +1,59 @@
defmodule AshSqlite.AtomicsTest do
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Post}
import Ash.Expr
test "a basic atomic works" do
post =
Post
|> Ash.Changeset.for_create(:create, %{title: "foo", price: 1})
|> Api.create!()
assert %{price: 2} =
post
|> Ash.Changeset.for_update(:update, %{})
|> Ash.Changeset.atomic_update(:price, expr(price + 1))
|> Api.update!()
end
test "an atomic that violates a constraint will return the proper error" do
post =
Post
|> Ash.Changeset.for_create(:create, %{title: "foo", price: 1})
|> Api.create!()
assert_raise Ash.Error.Invalid, ~r/does not exist/, fn ->
post
|> Ash.Changeset.for_update(:update, %{})
|> Ash.Changeset.atomic_update(:organization_id, Ash.UUID.generate())
|> Api.update!()
end
end
test "an atomic can refer to a calculation" do
post =
Post
|> Ash.Changeset.for_create(:create, %{title: "foo", price: 1})
|> Api.create!()
post =
post
|> Ash.Changeset.for_update(:update, %{})
|> Ash.Changeset.atomic_update(:score, expr(score_after_winning))
|> Api.update!()
assert post.score == 1
end
test "an atomic can be attached to an action" do
post =
Post
|> Ash.Changeset.for_create(:create, %{title: "foo", price: 1})
|> Api.create!()
assert Post.increment_score!(post, 2).score == 2
assert Post.increment_score!(post, 2).score == 4
end
end

239
test/bulk_create_test.exs Normal file
View file

@ -0,0 +1,239 @@
defmodule AshSqlite.BulkCreateTest do
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Post}
describe "bulk creates" do
test "bulk creates insert each input" do
Api.bulk_create!([%{title: "fred"}, %{title: "george"}], Post, :create)
assert [%{title: "fred"}, %{title: "george"}] =
Post
|> Ash.Query.sort(:title)
|> Api.read!()
end
test "bulk creates can be streamed" do
assert [{:ok, %{title: "fred"}}, {:ok, %{title: "george"}}] =
Api.bulk_create!([%{title: "fred"}, %{title: "george"}], Post, :create,
return_stream?: true,
return_records?: true
)
|> Enum.sort_by(fn {:ok, result} -> result.title end)
end
test "bulk creates can upsert" do
assert [
{:ok, %{title: "fred", uniq_one: "one", uniq_two: "two", price: 10}},
{:ok, %{title: "george", uniq_one: "three", uniq_two: "four", price: 20}}
] =
Api.bulk_create!(
[
%{title: "fred", uniq_one: "one", uniq_two: "two", price: 10},
%{title: "george", uniq_one: "three", uniq_two: "four", price: 20}
],
Post,
:create,
return_stream?: true,
return_records?: true
)
|> Enum.sort_by(fn {:ok, result} -> result.title end)
assert [
{:ok, %{title: "fred", uniq_one: "one", uniq_two: "two", price: 1000}},
{:ok, %{title: "george", uniq_one: "three", uniq_two: "four", price: 20_000}}
] =
Api.bulk_create!(
[
%{title: "something", uniq_one: "one", uniq_two: "two", price: 1000},
%{title: "else", uniq_one: "three", uniq_two: "four", price: 20_000}
],
Post,
:create,
upsert?: true,
upsert_identity: :uniq_one_and_two,
upsert_fields: [:price],
return_stream?: true,
return_records?: true
)
|> Enum.sort_by(fn
{:ok, result} ->
result.title
_ ->
nil
end)
end
# confirmed that this doesn't work because it can't. An upsert must map to a potentially successful insert.
# leaving this test here for posterity
# test "bulk creates can upsert with id" do
# org_id = Ash.UUID.generate()
# _new_org =
# Organization
# |> Ash.Changeset.for_create(:create, %{
# id: org_id,
# title: "Avengers"
# })
# |> Api.create!()
# assert [
# {:ok,
# %{
# name: "Bruce Banner",
# code: "BB01",
# must_be_present: "I am Hulk",
# organization_id: org_id
# }},
# {:ok,
# %{
# name: "Tony Stark",
# code: "TS01",
# must_be_present: "I am Iron Man",
# organization_id: org_id
# }}
# ] =
# Api.bulk_create!(
# [
# %{
# name: "Tony Stark",
# code: "TS01",
# must_be_present: "I am Iron Man",
# organization_id: org_id
# },
# %{
# name: "Bruce Banner",
# code: "BB01",
# must_be_present: "I am Hulk",
# organization_id: org_id
# }
# ],
# Manager,
# :create,
# return_stream?: true,
# return_records?: true,
# return_errors?: true
# )
# |> Enum.sort_by(fn {:ok, result} -> result.name end)
# assert [
# {:ok,
# %{
# name: "Bruce Banner",
# code: "BB01",
# must_be_present: "I am Hulk",
# organization_id: org_id,
# role: "bone breaker"
# }},
# {:ok,
# %{
# name: "Tony Stark",
# code: "TS01",
# must_be_present: "I am Iron Man",
# organization_id: org_id,
# role: "master in chief"
# }}
# ] =
# Api.bulk_create!(
# [
# %{
# name: "Tony Stark",
# code: "TS01",
# organization_id: org_id,
# role: "master in chief"
# },
# %{
# name: "Brice Brenner",
# code: "BB01",
# organization_id: org_id,
# role: "bone breaker"
# }
# ],
# Manager,
# :create,
# upsert?: true,
# upsert_identity: :uniq_code,
# upsert_fields: [:role],
# return_stream?: true,
# return_records?: true,
# return_errors?: true
# )
# |> Enum.sort_by(fn
# {:ok, result} ->
# result.name
# _ ->
# nil
# end)
# end
test "bulk creates can create relationships" do
Api.bulk_create!(
[%{title: "fred", rating: %{score: 5}}, %{title: "george", rating: %{score: 0}}],
Post,
:create
)
assert [
%{title: "fred", ratings: [%{score: 5}]},
%{title: "george", ratings: [%{score: 0}]}
] =
Post
|> Ash.Query.sort(:title)
|> Ash.Query.load(:ratings)
|> Api.read!()
end
end
describe "validation errors" do
test "skips invalid by default" do
assert %{records: [_], errors: [_]} =
Api.bulk_create!([%{title: "fred"}, %{title: "not allowed"}], Post, :create,
return_records?: true,
return_errors?: true
)
end
test "returns errors in the stream" do
assert [{:ok, _}, {:error, _}] =
Api.bulk_create!([%{title: "fred"}, %{title: "not allowed"}], Post, :create,
return_records?: true,
return_stream?: true,
return_errors?: true
)
|> Enum.to_list()
end
end
describe "database errors" do
test "database errors affect the entire batch" do
# assert %{records: [_], errors: [_]} =
Api.bulk_create(
[%{title: "fred"}, %{title: "george", organization_id: Ash.UUID.generate()}],
Post,
:create,
return_records?: true
)
assert [] =
Post
|> Ash.Query.sort(:title)
|> Api.read!()
end
test "database errors don't affect other batches" do
Api.bulk_create(
[%{title: "george", organization_id: Ash.UUID.generate()}, %{title: "fred"}],
Post,
:create,
return_records?: true,
batch_size: 1
)
assert [%{title: "fred"}] =
Post
|> Ash.Query.sort(:title)
|> Api.read!()
end
end
end

381
test/calculation_test.exs Normal file
View file

@ -0,0 +1,381 @@
defmodule AshSqlite.CalculationTest do
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Account, Api, Author, Comment, Post, User}
require Ash.Query
import Ash.Expr
test "calculations can refer to embedded attributes" do
author =
Author
|> Ash.Changeset.for_create(:create, %{bio: %{title: "Mr.", bio: "Bones"}})
|> Api.create!()
assert %{title: "Mr."} =
Author
|> Ash.Query.filter(id == ^author.id)
|> Ash.Query.load(:title)
|> Api.read_one!()
end
test "calculations can use the || operator" do
author =
Author
|> Ash.Changeset.for_create(:create, %{bio: %{title: "Mr.", bio: "Bones"}})
|> Api.create!()
assert %{first_name_or_bob: "bob"} =
Author
|> Ash.Query.filter(id == ^author.id)
|> Ash.Query.load(:first_name_or_bob)
|> Api.read_one!()
end
test "calculations can use the && operator" do
author =
Author
|> Ash.Changeset.for_create(:create, %{
first_name: "fred",
bio: %{title: "Mr.", bio: "Bones"}
})
|> Api.create!()
assert %{first_name_and_bob: "bob"} =
Author
|> Ash.Query.filter(id == ^author.id)
|> Ash.Query.load(:first_name_and_bob)
|> Api.read_one!()
end
test "concat calculation can be filtered on" do
author =
Author
|> Ash.Changeset.new(%{first_name: "is", last_name: "match"})
|> Api.create!()
Author
|> Ash.Changeset.new(%{first_name: "not", last_name: "match"})
|> Api.create!()
author_id = author.id
assert %{id: ^author_id} =
Author
|> Ash.Query.load(:full_name)
|> Ash.Query.filter(full_name == "is match")
|> Api.read_one!()
end
test "conditional calculations can be filtered on" do
author =
Author
|> Ash.Changeset.new(%{first_name: "tom"})
|> Api.create!()
Author
|> Ash.Changeset.new(%{first_name: "tom", last_name: "holland"})
|> Api.create!()
author_id = author.id
assert %{id: ^author_id} =
Author
|> Ash.Query.load([:conditional_full_name, :full_name])
|> Ash.Query.filter(conditional_full_name == "(none)")
|> Api.read_one!()
end
test "parameterized calculations can be filtered on" do
Author
|> Ash.Changeset.new(%{first_name: "tom", last_name: "holland"})
|> Api.create!()
assert %{param_full_name: "tom holland"} =
Author
|> Ash.Query.load(:param_full_name)
|> Api.read_one!()
assert %{param_full_name: "tom~holland"} =
Author
|> Ash.Query.load(param_full_name: [separator: "~"])
|> Api.read_one!()
assert %{} =
Author
|> Ash.Query.filter(param_full_name(separator: "~") == "tom~holland")
|> Api.read_one!()
end
test "parameterized related calculations can be filtered on" do
author =
Author
|> Ash.Changeset.new(%{first_name: "tom", last_name: "holland"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "match"})
|> Ash.Changeset.manage_relationship(:author, author, type: :append_and_remove)
|> Api.create!()
assert %{title: "match"} =
Comment
|> Ash.Query.filter(author.param_full_name(separator: "~") == "tom~holland")
|> Api.read_one!()
assert %{title: "match"} =
Comment
|> Ash.Query.filter(
author.param_full_name(separator: "~") == "tom~holland" and
author.param_full_name(separator: " ") == "tom holland"
)
|> Api.read_one!()
end
test "parameterized calculations can be sorted on" do
Author
|> Ash.Changeset.new(%{first_name: "tom", last_name: "holland"})
|> Api.create!()
Author
|> Ash.Changeset.new(%{first_name: "abc", last_name: "def"})
|> Api.create!()
assert [%{first_name: "abc"}, %{first_name: "tom"}] =
Author
|> Ash.Query.sort(param_full_name: [separator: "~"])
|> Api.read!()
end
test "calculations using if and literal boolean results can run" do
Post
|> Ash.Query.load(:was_created_in_the_last_month)
|> Ash.Query.filter(was_created_in_the_last_month == true)
|> Api.read!()
end
test "nested conditional calculations can be loaded" do
Author
|> Ash.Changeset.new(%{last_name: "holland"})
|> Api.create!()
Author
|> Ash.Changeset.new(%{first_name: "tom"})
|> Api.create!()
assert [%{nested_conditional: "No First Name"}, %{nested_conditional: "No Last Name"}] =
Author
|> Ash.Query.load(:nested_conditional)
|> Ash.Query.sort(:nested_conditional)
|> Api.read!()
end
test "loading a calculation loads its dependent loads" do
user =
User
|> Ash.Changeset.for_create(:create, %{is_active: true})
|> Api.create!()
account =
Account
|> Ash.Changeset.for_create(:create, %{is_active: true})
|> Ash.Changeset.manage_relationship(:user, user, type: :append_and_remove)
|> Api.create!()
|> Api.load!([:active])
assert account.active
end
describe "string join expression" do
test "no nil values" do
author =
Author
|> Ash.Changeset.for_create(:create, %{
first_name: "Bill",
last_name: "Jones",
bio: %{title: "Mr.", bio: "Bones"}
})
|> Api.create!()
assert %{
full_name_with_nils: "Bill Jones",
full_name_with_nils_no_joiner: "BillJones"
} =
Author
|> Ash.Query.filter(id == ^author.id)
|> Ash.Query.load(:full_name_with_nils)
|> Ash.Query.load(:full_name_with_nils_no_joiner)
|> Api.read_one!()
end
test "with nil value" do
author =
Author
|> Ash.Changeset.for_create(:create, %{
first_name: "Bill",
bio: %{title: "Mr.", bio: "Bones"}
})
|> Api.create!()
assert %{
full_name_with_nils: "Bill",
full_name_with_nils_no_joiner: "Bill"
} =
Author
|> Ash.Query.filter(id == ^author.id)
|> Ash.Query.load(:full_name_with_nils)
|> Ash.Query.load(:full_name_with_nils_no_joiner)
|> Api.read_one!()
end
end
test "arguments with cast_in_query?: false are not cast" do
Post
|> Ash.Changeset.new(%{title: "match", score: 42})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "not", score: 42})
|> Api.create!()
assert [post] =
Post
|> Ash.Query.filter(similarity(search: expr(query(search: "match"))))
|> Api.read!()
assert post.title == "match"
end
describe "string split expression" do
test "with the default delimiter" do
author =
Author
|> Ash.Changeset.for_create(:create, %{
first_name: "Bill",
last_name: "Jones",
bio: %{title: "Mr.", bio: "Bones"}
})
|> Api.create!()
assert %{
split_full_name: ["Bill", "Jones"]
} =
Author
|> Ash.Query.filter(id == ^author.id)
|> Ash.Query.load(:split_full_name)
|> Api.read_one!()
end
test "trimming whitespace" do
author =
Author
|> Ash.Changeset.for_create(:create, %{
first_name: "Bill ",
last_name: "Jones ",
bio: %{title: "Mr.", bio: "Bones"}
})
|> Api.create!()
assert %{
split_full_name_trim: ["Bill", "Jones"],
split_full_name: ["Bill", "Jones"]
} =
Author
|> Ash.Query.filter(id == ^author.id)
|> Ash.Query.load([:split_full_name_trim, :split_full_name])
|> Api.read_one!()
end
end
describe "-/1" do
test "makes numbers negative" do
Post
|> Ash.Changeset.new(%{title: "match", score: 42})
|> Api.create!()
assert [%{negative_score: -42}] =
Post
|> Ash.Query.load(:negative_score)
|> Api.read!()
end
end
describe "maps" do
test "maps can be constructed" do
Post
|> Ash.Changeset.new(%{title: "match", score: 42})
|> Api.create!()
assert [%{score_map: %{negative_score: %{foo: -42}}}] =
Post
|> Ash.Query.load(:score_map)
|> Api.read!()
end
end
describe "at/2" do
test "selects items by index" do
author =
Author
|> Ash.Changeset.for_create(:create, %{
first_name: "Bill ",
last_name: "Jones ",
bio: %{title: "Mr.", bio: "Bones"}
})
|> Api.create!()
assert %{
first_name_from_split: "Bill"
} =
Author
|> Ash.Query.filter(id == ^author.id)
|> Ash.Query.load([:first_name_from_split])
|> Api.read_one!()
end
end
test "dependent calc" do
post =
Post
|> Ash.Changeset.new(%{title: "match", price: 10_024})
|> Api.create!()
Post.get_by_id(post.id,
query: Post |> Ash.Query.select([:id]) |> Ash.Query.load([:price_string_with_currency_sign])
)
end
test "nested get_path works" do
assert "thing" =
Post
|> Ash.Changeset.new(%{title: "match", price: 10_024, stuff: %{foo: %{bar: "thing"}}})
|> Ash.Changeset.deselect(:stuff)
|> Api.create!()
|> Api.load!(:foo_bar_from_stuff)
|> Map.get(:foo_bar_from_stuff)
end
test "runtime expression calcs" do
author =
Author
|> Ash.Changeset.for_create(:create, %{
first_name: "Bill",
last_name: "Jones",
bio: %{title: "Mr.", bio: "Bones"}
})
|> Api.create!()
assert %AshSqlite.Test.Money{} =
Post
|> Ash.Changeset.new(%{title: "match", price: 10_024})
|> Ash.Changeset.manage_relationship(:author, author, type: :append_and_remove)
|> Api.create!()
|> Api.load!(:calc_returning_json)
|> Map.get(:calc_returning_json)
assert [%AshSqlite.Test.Money{}] =
author
|> Api.load!(posts: :calc_returning_json)
|> Map.get(:posts)
|> Enum.map(&Map.get(&1, :calc_returning_json))
end
end

15
test/constraint_test.exs Normal file
View file

@ -0,0 +1,15 @@
defmodule AshSqlite.ConstraintTest do
@moduledoc false
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Post}
require Ash.Query
test "constraint messages are properly raised" do
assert_raise Ash.Error.Invalid, ~r/yo, bad price/, fn ->
Post
|> Ash.Changeset.new(%{title: "title", price: -1})
|> Api.create!()
end
end
end

View file

@ -0,0 +1,24 @@
defmodule AshSqlite.Test.CustomIndexTest do
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Post}
require Ash.Query
test "unique constraint errors are properly caught" do
Post
|> Ash.Changeset.new(%{title: "first", uniq_custom_one: "what", uniq_custom_two: "what2"})
|> Api.create!()
assert_raise Ash.Error.Invalid,
~r/Invalid value provided for uniq_custom_one: dude what the heck/,
fn ->
Post
|> Ash.Changeset.new(%{
title: "first",
uniq_custom_one: "what",
uniq_custom_two: "what2"
})
|> Api.create!()
end
end
end

171
test/distinct_test.exs Normal file
View file

@ -0,0 +1,171 @@
defmodule AshSqlite.DistinctTest do
@moduledoc false
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Post}
require Ash.Query
setup do
Post
|> Ash.Changeset.new(%{title: "title", score: 1})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "title", score: 1})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "foo", score: 2})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "foo", score: 2})
|> Api.create!()
:ok
end
test "records returned are distinct on the provided field" do
results =
Post
|> Ash.Query.distinct(:title)
|> Ash.Query.sort(:title)
|> Api.read!()
assert [%{title: "foo"}, %{title: "title"}] = results
end
test "distinct pairs well with sort" do
results =
Post
|> Ash.Query.distinct(:title)
|> Ash.Query.sort(title: :desc)
|> Api.read!()
assert [%{title: "title"}, %{title: "foo"}] = results
end
test "distinct pairs well with sort that does not match the distinct" do
results =
Post
|> Ash.Query.distinct(:title)
|> Ash.Query.sort(id: :desc)
|> Ash.Query.limit(3)
|> Api.read!()
assert [_, _] = results
end
test "distinct pairs well with sort that does not match the distinct using a limit" do
results =
Post
|> Ash.Query.distinct(:title)
|> Ash.Query.sort(id: :desc)
|> Ash.Query.limit(3)
|> Api.read!()
assert [_, _] = results
end
test "distinct pairs well with sort that does not match the distinct using a limit #2" do
results =
Post
|> Ash.Query.distinct(:title)
|> Ash.Query.sort(id: :desc)
|> Ash.Query.limit(1)
|> Api.read!()
assert [_] = results
end
test "distinct can use calculations sort that does not match the distinct using a limit #2" do
results =
Post
|> Ash.Query.distinct(:negative_score)
|> Ash.Query.sort(:negative_score)
|> Ash.Query.load(:negative_score)
|> Api.read!()
assert [
%{title: "foo", negative_score: -2},
%{title: "title", negative_score: -1}
] = results
results =
Post
|> Ash.Query.distinct(:negative_score)
|> Ash.Query.sort(negative_score: :desc)
|> Ash.Query.load(:negative_score)
|> Api.read!()
assert [
%{title: "title", negative_score: -1},
%{title: "foo", negative_score: -2}
] = results
results =
Post
|> Ash.Query.distinct(:negative_score)
|> Ash.Query.sort(:title)
|> Ash.Query.load(:negative_score)
|> Api.read!()
assert [
%{title: "foo", negative_score: -2},
%{title: "title", negative_score: -1}
] = results
end
test "distinct, join filters and sort can be combined" do
Post
|> Ash.Changeset.new(%{title: "a", score: 2})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "a", score: 1})
|> Api.create!()
assert [] =
Post
|> Ash.Query.distinct(:negative_score)
|> Ash.Query.filter(author.first_name == "a")
|> Ash.Query.sort(:negative_score)
|> Api.read!()
end
test "distinct sort is applied" do
Post
|> Ash.Changeset.new(%{title: "a", score: 2})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "a", score: 1})
|> Api.create!()
results =
Post
|> Ash.Query.distinct(:negative_score)
|> Ash.Query.distinct_sort(:title)
|> Ash.Query.sort(:negative_score)
|> Ash.Query.load(:negative_score)
|> Api.read!()
assert [
%{title: "a", negative_score: -2},
%{title: "a", negative_score: -1}
] = results
results =
Post
|> Ash.Query.distinct(:negative_score)
|> Ash.Query.distinct_sort(title: :desc)
|> Ash.Query.sort(:negative_score)
|> Ash.Query.load(:negative_score)
|> Api.read!()
assert [
%{title: "foo", negative_score: -2},
%{title: "title", negative_score: -1}
] = results
end
end

View file

@ -0,0 +1,12 @@
defmodule AshSqlite.EctoCompatibilityTest do
use AshSqlite.RepoCase, async: false
require Ash.Query
test "call Ecto.Repo.insert! via Ash Repo" do
org =
%AshSqlite.Test.Organization{name: "The Org"}
|> AshSqlite.TestRepo.insert!()
assert org.name == "The Org"
end
end

View file

@ -0,0 +1,34 @@
defmodule AshSqlite.EmbeddableResourceTest do
@moduledoc false
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Author, Bio, Post}
require Ash.Query
setup do
post =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
%{post: post}
end
test "calculations can load json", %{post: post} do
assert %{calc_returning_json: %AshSqlite.Test.Money{amount: 100, currency: :usd}} =
Api.load!(post, :calc_returning_json)
end
test "embeds with list attributes set to nil are loaded as nil" do
post =
Author
|> Ash.Changeset.new(%{bio: %Bio{list_of_strings: nil}})
|> Api.create!()
assert is_nil(post.bio.list_of_strings)
post = Api.reload!(post)
assert is_nil(post.bio.list_of_strings)
end
end

13
test/enum_test.exs Normal file
View file

@ -0,0 +1,13 @@
defmodule AshSqlite.EnumTest do
@moduledoc false
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Post}
require Ash.Query
test "valid values are properly inserted" do
Post
|> Ash.Changeset.new(%{title: "title", status: :open})
|> Api.create!()
end
end

850
test/filter_test.exs Normal file
View file

@ -0,0 +1,850 @@
defmodule AshSqlite.FilterTest do
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Author, Comment, Post}
require Ash.Query
describe "with no filter applied" do
test "with no data" do
assert [] = Api.read!(Post)
end
test "with data" do
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
assert [%Post{title: "title"}] = Api.read!(Post)
end
end
describe "invalid uuid" do
test "with an invalid uuid, an invalid error is raised" do
assert_raise Ash.Error.Invalid, fn ->
Post
|> Ash.Query.filter(id == "foo")
|> Api.read!()
end
end
end
describe "with a simple filter applied" do
test "with no data" do
results =
Post
|> Ash.Query.filter(title == "title")
|> Api.read!()
assert [] = results
end
test "with data that matches" do
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
results =
Post
|> Ash.Query.filter(title == "title")
|> Api.read!()
assert [%Post{title: "title"}] = results
end
test "with some data that matches and some data that doesnt" do
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
results =
Post
|> Ash.Query.filter(title == "no_title")
|> Api.read!()
assert [] = results
end
test "with related data that doesn't match" do
post =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "not match"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
results =
Post
|> Ash.Query.filter(comments.title == "match")
|> Api.read!()
assert [] = results
end
test "with related data two steps away that matches" do
author =
Author
|> Ash.Changeset.new(%{first_name: "match"})
|> Api.create!()
post =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Ash.Changeset.manage_relationship(:author, author, type: :append_and_remove)
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "title2"})
|> Ash.Changeset.manage_relationship(:linked_posts, [post], type: :append_and_remove)
|> Ash.Changeset.manage_relationship(:author, author, type: :append_and_remove)
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "not match"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Ash.Changeset.manage_relationship(:author, author, type: :append_and_remove)
|> Api.create!()
results =
Comment
|> Ash.Query.filter(author.posts.linked_posts.title == "title")
|> Api.read!()
assert [_] = results
end
test "with related data that does match" do
post =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "match"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
results =
Post
|> Ash.Query.filter(comments.title == "match")
|> Api.read!()
assert [%Post{title: "title"}] = results
end
test "with related data that does and doesn't match" do
post =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "match"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "not match"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
results =
Post
|> Ash.Query.filter(comments.title == "match")
|> Api.read!()
assert [%Post{title: "title"}] = results
end
end
describe "in" do
test "it properly filters" do
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "title1"})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "title2"})
|> Api.create!()
assert [%Post{title: "title1"}, %Post{title: "title2"}] =
Post
|> Ash.Query.filter(title in ["title1", "title2"])
|> Ash.Query.sort(title: :asc)
|> Api.read!()
end
end
describe "with a boolean filter applied" do
test "with no data" do
results =
Post
|> Ash.Query.filter(title == "title" or score == 1)
|> Api.read!()
assert [] = results
end
test "with data that doesn't match" do
Post
|> Ash.Changeset.new(%{title: "no title", score: 2})
|> Api.create!()
results =
Post
|> Ash.Query.filter(title == "title" or score == 1)
|> Api.read!()
assert [] = results
end
test "with data that matches both conditions" do
Post
|> Ash.Changeset.new(%{title: "title", score: 0})
|> Api.create!()
Post
|> Ash.Changeset.new(%{score: 1, title: "nothing"})
|> Api.create!()
results =
Post
|> Ash.Query.filter(title == "title" or score == 1)
|> Api.read!()
|> Enum.sort_by(& &1.score)
assert [%Post{title: "title", score: 0}, %Post{title: "nothing", score: 1}] = results
end
test "with data that matches one condition and data that matches nothing" do
Post
|> Ash.Changeset.new(%{title: "title", score: 0})
|> Api.create!()
Post
|> Ash.Changeset.new(%{score: 2, title: "nothing"})
|> Api.create!()
results =
Post
|> Ash.Query.filter(title == "title" or score == 1)
|> Api.read!()
|> Enum.sort_by(& &1.score)
assert [%Post{title: "title", score: 0}] = results
end
test "with related data in an or statement that matches, while basic filter doesn't match" do
post =
Post
|> Ash.Changeset.new(%{title: "doesn't match"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "match"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
results =
Post
|> Ash.Query.filter(title == "match" or comments.title == "match")
|> Api.read!()
assert [%Post{title: "doesn't match"}] = results
end
test "with related data in an or statement that doesn't match, while basic filter does match" do
post =
Post
|> Ash.Changeset.new(%{title: "match"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "doesn't match"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
results =
Post
|> Ash.Query.filter(title == "match" or comments.title == "match")
|> Api.read!()
assert [%Post{title: "match"}] = results
end
test "with related data and an inner join condition" do
post =
Post
|> Ash.Changeset.new(%{title: "match"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "match"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
results =
Post
|> Ash.Query.filter(title == comments.title)
|> Api.read!()
assert [%Post{title: "match"}] = results
results =
Post
|> Ash.Query.filter(title != comments.title)
|> Api.read!()
assert [] = results
end
end
describe "accessing embeds" do
setup do
Author
|> Ash.Changeset.for_create(:create,
bio: %{title: "Dr.", bio: "Strange", years_of_experience: 10}
)
|> Api.create!()
Author
|> Ash.Changeset.for_create(:create,
bio: %{title: "Highlander", bio: "There can be only one."}
)
|> Api.create!()
:ok
end
test "works using simple equality" do
assert [%{bio: %{title: "Dr."}}] =
Author
|> Ash.Query.filter(bio[:title] == "Dr.")
|> Api.read!()
end
test "works using simple equality for integers" do
assert [%{bio: %{title: "Dr."}}] =
Author
|> Ash.Query.filter(bio[:years_of_experience] == 10)
|> Api.read!()
end
test "works using an expression" do
assert [%{bio: %{title: "Highlander"}}] =
Author
|> Ash.Query.filter(contains(type(bio[:bio], :string), "only one."))
|> Api.read!()
end
test "calculations that use embeds can be filtered on" do
assert [%{bio: %{title: "Dr."}}] =
Author
|> Ash.Query.filter(title == "Dr.")
|> Api.read!()
end
end
describe "basic expressions" do
test "basic expressions work" do
Post
|> Ash.Changeset.new(%{title: "match", score: 4})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "non_match", score: 2})
|> Api.create!()
assert [%{title: "match"}] =
Post
|> Ash.Query.filter(score + 1 == 5)
|> Api.read!()
end
end
describe "case insensitive fields" do
test "it matches case insensitively" do
Post
|> Ash.Changeset.new(%{title: "match", category: "FoObAr"})
|> Api.create!()
Post
|> Ash.Changeset.new(%{category: "bazbuz"})
|> Api.create!()
assert [%{title: "match"}] =
Post
|> Ash.Query.filter(category == "fOoBaR")
|> Api.read!()
end
end
describe "contains/2" do
test "it works when it matches" do
Post
|> Ash.Changeset.new(%{title: "match"})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "bazbuz"})
|> Api.create!()
assert [%{title: "match"}] =
Post
|> Ash.Query.filter(contains(title, "atc"))
|> Api.read!()
end
test "it works when a case insensitive string is provided as a value" do
Post
|> Ash.Changeset.new(%{title: "match"})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "bazbuz"})
|> Api.create!()
assert [%{title: "match"}] =
Post
|> Ash.Query.filter(contains(title, ^%Ash.CiString{string: "ATC"}))
|> Api.read!()
end
test "it works on a case insensitive column" do
Post
|> Ash.Changeset.new(%{category: "match"})
|> Api.create!()
Post
|> Ash.Changeset.new(%{category: "bazbuz"})
|> Api.create!()
assert [%{category: %Ash.CiString{string: "match"}}] =
Post
|> Ash.Query.filter(contains(category, ^"ATC"))
|> Api.read!()
end
test "it works on a case insensitive calculation" do
Post
|> Ash.Changeset.new(%{category: "match"})
|> Api.create!()
Post
|> Ash.Changeset.new(%{category: "bazbuz"})
|> Api.create!()
assert [%{category: %Ash.CiString{string: "match"}}] =
Post
|> Ash.Query.filter(contains(category_label, ^"ATC"))
|> Api.read!()
end
test "it works on related values" do
post =
Post
|> Ash.Changeset.new(%{title: "match"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "abba"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
post2 =
Post
|> Ash.Changeset.new(%{title: "no_match"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "acca"})
|> Ash.Changeset.manage_relationship(:post, post2, type: :append_and_remove)
|> Api.create!()
assert [%{title: "match"}] =
Post
|> Ash.Query.filter(contains(comments.title, ^"bb"))
|> Api.read!()
end
end
describe "length/1" do
test "it works with a list attribute" do
author1 =
Author
|> Ash.Changeset.new(%{badges: [:author_of_the_year]})
|> Api.create!()
_author2 =
Author
|> Ash.Changeset.new(%{badges: []})
|> Api.create!()
author1_id = author1.id
assert [%{id: ^author1_id}] =
Author
|> Ash.Query.filter(length(badges) > 0)
|> Api.read!()
end
test "it works with nil" do
author1 =
Author
|> Ash.Changeset.new(%{badges: [:author_of_the_year]})
|> Api.create!()
_author2 =
Author
|> Ash.Changeset.new()
|> Api.create!()
author1_id = author1.id
assert [%{id: ^author1_id}] =
Author
|> Ash.Query.filter(length(badges || []) > 0)
|> Api.read!()
end
test "it works with a list" do
author1 =
Author
|> Ash.Changeset.new()
|> Api.create!()
author1_id = author1.id
explicit_list = [:foo]
assert [%{id: ^author1_id}] =
Author
|> Ash.Query.filter(length(^explicit_list) > 0)
|> Api.read!()
assert [] =
Author
|> Ash.Query.filter(length(^explicit_list) > 1)
|> Api.read!()
end
test "it raises with bad values" do
Author
|> Ash.Changeset.new()
|> Api.create!()
assert_raise(Ash.Error.Unknown, fn ->
Author
|> Ash.Query.filter(length(first_name) > 0)
|> Api.read!()
end)
end
end
describe "exists/2" do
test "it works with single relationships" do
post =
Post
|> Ash.Changeset.new(%{title: "match"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "abba"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
post2 =
Post
|> Ash.Changeset.new(%{title: "no_match"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "acca"})
|> Ash.Changeset.manage_relationship(:post, post2, type: :append_and_remove)
|> Api.create!()
assert [%{title: "match"}] =
Post
|> Ash.Query.filter(exists(comments, title == ^"abba"))
|> Api.read!()
end
test "it works with many to many relationships" do
post =
Post
|> Ash.Changeset.new(%{title: "a"})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "b"})
|> Ash.Changeset.manage_relationship(:linked_posts, [post], type: :append_and_remove)
|> Api.create!()
assert [%{title: "b"}] =
Post
|> Ash.Query.filter(exists(linked_posts, title == ^"a"))
|> Api.read!()
end
test "it works with join association relationships" do
post =
Post
|> Ash.Changeset.new(%{title: "a"})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "b"})
|> Ash.Changeset.manage_relationship(:linked_posts, [post], type: :append_and_remove)
|> Api.create!()
assert [%{title: "b"}] =
Post
|> Ash.Query.filter(exists(linked_posts, title == ^"a"))
|> Api.read!()
end
test "it works with nested relationships as the path" do
post =
Post
|> Ash.Changeset.new(%{title: "a"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "comment"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "b"})
|> Ash.Changeset.manage_relationship(:linked_posts, [post], type: :append_and_remove)
|> Api.create!()
assert [%{title: "b"}] =
Post
|> Ash.Query.filter(exists(linked_posts.comments, title == ^"comment"))
|> Api.read!()
end
test "it works with an `at_path`" do
post =
Post
|> Ash.Changeset.new(%{title: "a"})
|> Api.create!()
other_post =
Post
|> Ash.Changeset.new(%{title: "other_a"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "comment"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "comment"})
|> Ash.Changeset.manage_relationship(:post, other_post, type: :append_and_remove)
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "b"})
|> Ash.Changeset.manage_relationship(:linked_posts, [post], type: :append_and_remove)
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "b"})
|> Ash.Changeset.manage_relationship(:linked_posts, [other_post], type: :append_and_remove)
|> Api.create!()
assert [%{title: "b"}] =
Post
|> Ash.Query.filter(
linked_posts.title == "a" and
linked_posts.exists(comments, title == ^"comment")
)
|> Api.read!()
assert [%{title: "b"}] =
Post
|> Ash.Query.filter(
linked_posts.title == "a" and
linked_posts.exists(comments, title == ^"comment")
)
|> Api.read!()
end
test "it works with nested relationships inside of exists" do
post =
Post
|> Ash.Changeset.new(%{title: "a"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "comment"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "b"})
|> Ash.Changeset.manage_relationship(:linked_posts, [post], type: :append_and_remove)
|> Api.create!()
assert [%{title: "b"}] =
Post
|> Ash.Query.filter(exists(linked_posts, comments.title == ^"comment"))
|> Api.read!()
end
end
describe "filtering on enum types" do
test "it allows simple filtering" do
Post
|> Ash.Changeset.new(status_enum: "open")
|> Api.create!()
assert %{status_enum: :open} =
Post
|> Ash.Query.filter(status_enum == ^"open")
|> Api.read_one!()
end
test "it allows simple filtering without casting" do
Post
|> Ash.Changeset.new(status_enum_no_cast: "open")
|> Api.create!()
assert %{status_enum_no_cast: :open} =
Post
|> Ash.Query.filter(status_enum_no_cast == ^"open")
|> Api.read_one!()
end
end
describe "atom filters" do
test "it works on matches" do
Post
|> Ash.Changeset.new(%{title: "match"})
|> Api.create!()
result =
Post
|> Ash.Query.filter(type == :sponsored)
|> Api.read!()
assert [%Post{title: "match"}] = result
end
end
describe "like and ilike" do
test "like builds and matches" do
Post
|> Ash.Changeset.new(%{title: "MaTcH"})
|> Api.create!()
results =
Post
|> Ash.Query.filter(like(title, "%aTc%"))
|> Api.read!()
assert [%Post{title: "MaTcH"}] = results
results =
Post
|> Ash.Query.filter(like(title, "%atc%"))
|> Api.read!()
assert [] = results
end
test "ilike builds and matches" do
Post
|> Ash.Changeset.new(%{title: "MaTcH"})
|> Api.create!()
results =
Post
|> Ash.Query.filter(ilike(title, "%aTc%"))
|> Api.read!()
assert [%Post{title: "MaTcH"}] = results
results =
Post
|> Ash.Query.filter(ilike(title, "%atc%"))
|> Api.read!()
assert [%Post{title: "MaTcH"}] = results
end
end
describe "trigram_similarity" do
test "it works on matches" do
Post
|> Ash.Changeset.new(%{title: "match"})
|> Api.create!()
results =
Post
|> Ash.Query.filter(trigram_similarity(title, "match") > 0.9)
|> Api.read!()
assert [%Post{title: "match"}] = results
end
test "it works on non-matches" do
Post
|> Ash.Changeset.new(%{title: "match"})
|> Api.create!()
results =
Post
|> Ash.Query.filter(trigram_similarity(title, "match") < 0.1)
|> Api.read!()
assert [] = results
end
end
describe "fragments" do
test "double replacement works" do
post =
Post
|> Ash.Changeset.new(%{title: "match", score: 4})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "non_match", score: 2})
|> Api.create!()
assert [%{title: "match"}] =
Post
|> Ash.Query.filter(fragment("? = ?", title, ^post.title))
|> Api.read!()
assert [] =
Post
|> Ash.Query.filter(fragment("? = ?", title, "nope"))
|> Api.read!()
end
end
describe "filtering on relationships that themselves have filters" do
test "it doesn't raise an error" do
Comment
|> Ash.Query.filter(not is_nil(popular_ratings.id))
|> Api.read!()
end
test "it doesn't raise an error when nested" do
Post
|> Ash.Query.filter(not is_nil(comments.popular_ratings.id))
|> Api.read!()
end
end
end

245
test/load_test.exs Normal file
View file

@ -0,0 +1,245 @@
defmodule AshSqlite.Test.LoadTest do
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Comment, Post}
require Ash.Query
test "has_many relationships can be loaded" do
assert %Post{comments: %Ash.NotLoaded{type: :relationship}} =
post =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "match"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
results =
Post
|> Ash.Query.load(:comments)
|> Api.read!()
assert [%Post{comments: [%{title: "match"}]}] = results
end
test "belongs_to relationships can be loaded" do
assert %Comment{post: %Ash.NotLoaded{type: :relationship}} =
comment =
Comment
|> Ash.Changeset.new(%{})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "match"})
|> Ash.Changeset.manage_relationship(:comments, [comment], type: :append_and_remove)
|> Api.create!()
results =
Comment
|> Ash.Query.load(:post)
|> Api.read!()
assert [%Comment{post: %{title: "match"}}] = results
end
test "many_to_many loads work" do
source_post =
Post
|> Ash.Changeset.new(%{title: "source"})
|> Api.create!()
destination_post =
Post
|> Ash.Changeset.new(%{title: "destination"})
|> Api.create!()
destination_post2 =
Post
|> Ash.Changeset.new(%{title: "destination"})
|> Api.create!()
source_post
|> Ash.Changeset.new()
|> Ash.Changeset.manage_relationship(:linked_posts, [destination_post, destination_post2],
type: :append_and_remove
)
|> Api.update!()
results =
source_post
|> Api.load!(:linked_posts)
assert %{linked_posts: [%{title: "destination"}, %{title: "destination"}]} = results
end
test "many_to_many loads work when nested" do
source_post =
Post
|> Ash.Changeset.new(%{title: "source"})
|> Api.create!()
destination_post =
Post
|> Ash.Changeset.new(%{title: "destination"})
|> Api.create!()
source_post
|> Ash.Changeset.new()
|> Ash.Changeset.manage_relationship(:linked_posts, [destination_post],
type: :append_and_remove
)
|> Api.update!()
destination_post
|> Ash.Changeset.new()
|> Ash.Changeset.manage_relationship(:linked_posts, [source_post], type: :append_and_remove)
|> Api.update!()
results =
source_post
|> Api.load!(linked_posts: :linked_posts)
assert %{linked_posts: [%{title: "destination", linked_posts: [%{title: "source"}]}]} =
results
end
describe "lateral join loads" do
test "parent references are resolved" do
post1 =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
post2 =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
post2_id = post2.id
post3 =
Post
|> Ash.Changeset.new(%{title: "no match"})
|> Api.create!()
assert [%{posts_with_matching_title: [%{id: ^post2_id}]}] =
Post
|> Ash.Query.load(:posts_with_matching_title)
|> Ash.Query.filter(id == ^post1.id)
|> Api.read!()
assert [%{posts_with_matching_title: []}] =
Post
|> Ash.Query.load(:posts_with_matching_title)
|> Ash.Query.filter(id == ^post3.id)
|> Api.read!()
end
test "parent references work when joining for filters" do
%{id: post1_id} =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
post2 =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "no match"})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "no match"})
|> Api.create!()
assert [%{id: ^post1_id}] =
Post
|> Ash.Query.filter(posts_with_matching_title.id == ^post2.id)
|> Api.read!()
end
test "lateral join loads (loads with limits or offsets) are supported" do
assert %Post{comments: %Ash.NotLoaded{type: :relationship}} =
post =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "abc"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "def"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
comments_query =
Comment
|> Ash.Query.limit(1)
|> Ash.Query.sort(:title)
results =
Post
|> Ash.Query.load(comments: comments_query)
|> Api.read!()
assert [%Post{comments: [%{title: "abc"}]}] = results
comments_query =
Comment
|> Ash.Query.limit(1)
|> Ash.Query.sort(title: :desc)
results =
Post
|> Ash.Query.load(comments: comments_query)
|> Api.read!()
assert [%Post{comments: [%{title: "def"}]}] = results
comments_query =
Comment
|> Ash.Query.limit(2)
|> Ash.Query.sort(title: :desc)
results =
Post
|> Ash.Query.load(comments: comments_query)
|> Api.read!()
assert [%Post{comments: [%{title: "def"}, %{title: "abc"}]}] = results
end
test "loading many to many relationships on records works without loading its join relationship when using code interface" do
source_post =
Post
|> Ash.Changeset.new(%{title: "source"})
|> Api.create!()
destination_post =
Post
|> Ash.Changeset.new(%{title: "abc"})
|> Api.create!()
destination_post2 =
Post
|> Ash.Changeset.new(%{title: "def"})
|> Api.create!()
source_post
|> Ash.Changeset.new()
|> Ash.Changeset.manage_relationship(:linked_posts, [destination_post, destination_post2],
type: :append_and_remove
)
|> Api.update!()
assert %{linked_posts: [_, _]} = Post.get_by_id!(source_post.id, load: [:linked_posts])
end
end
end

57
test/lock_test.exs Normal file
View file

@ -0,0 +1,57 @@
defmodule AshSqlite.Test.LockTest do
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Post}
require Ash.Query
setup do
Application.put_env(:ash, :disable_async?, true)
on_exit(fn ->
Application.put_env(:ash, :disable_async?, false)
AshSqlite.TestNoSandboxRepo.delete_all(Post)
end)
end
test "lock conflicts raise appropriate errors" do
post =
Post
|> Ash.Changeset.for_create(:create, %{title: "locked"})
|> Ash.Changeset.set_context(%{data_layer: %{repo: AshSqlite.TestNoSandboxRepo}})
|> Api.create!()
task1 =
Task.async(fn ->
AshSqlite.TestNoSandboxRepo.transaction(fn ->
Post
|> Ash.Query.lock("FOR UPDATE NOWAIT")
|> Ash.Query.set_context(%{data_layer: %{repo: AshSqlite.TestNoSandboxRepo}})
|> Ash.Query.filter(id == ^post.id)
|> Api.read!()
:timer.sleep(1000)
:ok
end)
end)
task2 =
Task.async(fn ->
try do
AshSqlite.TestNoSandboxRepo.transaction(fn ->
:timer.sleep(100)
Post
|> Ash.Query.lock("FOR UPDATE NOWAIT")
|> Ash.Query.set_context(%{data_layer: %{repo: AshSqlite.TestNoSandboxRepo}})
|> Ash.Query.filter(id == ^post.id)
|> Api.read!()
end)
rescue
e ->
{:error, e}
end
end)
assert [{:ok, :ok}, {:error, %Ash.Error.Invalid{errors: [%Ash.Error.Invalid.Unavailable{}]}}] =
Task.await_many([task1, task2], :infinity)
end
end

View file

@ -0,0 +1,116 @@
defmodule AshSqlite.Test.ManualRelationshipsTest do
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Comment, Post}
require Ash.Query
describe "manual first" do
test "relationships can be filtered on with no data" do
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
assert [] =
Post |> Ash.Query.filter(comments_containing_title.title == "title") |> Api.read!()
end
test "relationships can be filtered on with data" do
post =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "title2"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "title2"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "no match"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
assert [_] =
Post
|> Ash.Query.filter(comments_containing_title.title == "title2")
|> Api.read!()
end
end
describe "manual last" do
test "relationships can be filtered on with no data" do
post =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "no match"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
assert [] =
Comment
|> Ash.Query.filter(post.comments_containing_title.title == "title2")
|> Api.read!()
end
test "relationships can be filtered on with data" do
post =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "title2"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "title2"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "no match"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
assert [_, _] =
Comment
|> Ash.Query.filter(post.comments_containing_title.title == "title2")
|> Api.read!()
end
end
describe "manual middle" do
test "relationships can be filtered on with data" do
post =
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "title2"})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "title2"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "no match"})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
assert [_, _] =
Comment
|> Ash.Query.filter(post.comments_containing_title.post.title == "title")
|> Api.read!()
end
end
end

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,29 @@
defmodule AshSqlite.PolymorphismTest do
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Post, Rating}
require Ash.Query
test "you can create related data" do
Post
|> Ash.Changeset.for_create(:create, rating: %{score: 10})
|> Api.create!()
assert [%{score: 10}] =
Rating
|> Ash.Query.set_context(%{data_layer: %{table: "post_ratings"}})
|> Api.read!()
end
test "you can read related data" do
Post
|> Ash.Changeset.for_create(:create, rating: %{score: 10})
|> Api.create!()
assert [%{score: 10}] =
Post
|> Ash.Query.load(:ratings)
|> Api.read_one!()
|> Map.get(:ratings)
end
end

51
test/primary_key_test.exs Normal file
View file

@ -0,0 +1,51 @@
defmodule AshSqlite.Test.PrimaryKeyTest do
@moduledoc false
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, IntegerPost, Post, PostView}
require Ash.Query
test "creates record with integer primary key" do
assert %IntegerPost{} = IntegerPost |> Ash.Changeset.new(%{title: "title"}) |> Api.create!()
end
test "creates record with uuid primary key" do
assert %Post{} = Post |> Ash.Changeset.new(%{title: "title"}) |> Api.create!()
end
describe "resources without a primary key" do
test "records can be created" do
post =
Post
|> Ash.Changeset.for_action(:create, %{title: "not very interesting"})
|> Api.create!()
assert {:ok, view} =
PostView
|> Ash.Changeset.for_action(:create, %{browser: :firefox, post_id: post.id})
|> Api.create()
assert view.browser == :firefox
assert view.post_id == post.id
assert DateTime.diff(DateTime.utc_now(), view.time, :microsecond) < 1_000_000
end
test "records can be queried" do
post =
Post
|> Ash.Changeset.for_action(:create, %{title: "not very interesting"})
|> Api.create!()
expected =
PostView
|> Ash.Changeset.for_action(:create, %{browser: :firefox, post_id: post.id})
|> Api.create!()
assert {:ok, [actual]} = Api.read(PostView)
assert actual.time == expected.time
assert actual.browser == expected.browser
assert actual.post_id == expected.post_id
end
end
end

15
test/select_test.exs Normal file
View file

@ -0,0 +1,15 @@
defmodule AshSqlite.SelectTest do
@moduledoc false
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Post}
require Ash.Query
test "values not selected in the query are not present in the response" do
Post
|> Ash.Changeset.new(%{title: "title"})
|> Api.create!()
assert [%{title: nil}] = Api.read!(Ash.Query.select(Post, :id))
end
end

175
test/sort_test.exs Normal file
View file

@ -0,0 +1,175 @@
defmodule AshSqlite.SortTest do
@moduledoc false
use AshSqlite.RepoCase, async: false
alias AshSqlite.Test.{Api, Comment, Post, PostLink}
require Ash.Query
test "multi-column sorts work" do
Post
|> Ash.Changeset.new(%{title: "aaa", score: 0})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "aaa", score: 1})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "bbb", score: 0})
|> Api.create!()
assert [
%{title: "aaa", score: 0},
%{title: "aaa", score: 1},
%{title: "bbb"}
] =
Api.read!(
Post
|> Ash.Query.sort(title: :asc, score: :asc)
)
end
test "multi-column sorts work on inclusion" do
post =
Post
|> Ash.Changeset.new(%{title: "aaa", score: 0})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "aaa", score: 1})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "bbb", score: 0})
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "aaa", likes: 1})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "bbb", likes: 1})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
Comment
|> Ash.Changeset.new(%{title: "aaa", likes: 2})
|> Ash.Changeset.manage_relationship(:post, post, type: :append_and_remove)
|> Api.create!()
posts =
Post
|> Ash.Query.load(
comments:
Comment
|> Ash.Query.sort([:title, :likes])
|> Ash.Query.select([:title, :likes])
|> Ash.Query.limit(1)
)
|> Ash.Query.sort([:title, :score])
|> Api.read!()
assert [
%{title: "aaa", comments: [%{title: "aaa"}]},
%{title: "aaa"},
%{title: "bbb"}
] = posts
end
test "multicolumn sort works with a select statement" do
Post
|> Ash.Changeset.new(%{title: "aaa", score: 0})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "aaa", score: 1})
|> Api.create!()
Post
|> Ash.Changeset.new(%{title: "bbb", score: 0})
|> Api.create!()
assert [
%{title: "aaa", score: 0},
%{title: "aaa", score: 1},
%{title: "bbb"}
] =
Api.read!(
Post
|> Ash.Query.sort(title: :asc, score: :asc)
|> Ash.Query.select([:title, :score])
)
end
test "sorting when joining to a many to many relationship sorts properly" do
post1 =
Post
|> Ash.Changeset.new(%{title: "aaa", score: 0})
|> Api.create!()
post2 =
Post
|> Ash.Changeset.new(%{title: "bbb", score: 1})
|> Api.create!()
post3 =
Post
|> Ash.Changeset.new(%{title: "ccc", score: 0})
|> Api.create!()
PostLink
|> Ash.Changeset.new()
|> Ash.Changeset.manage_relationship(:source_post, post1, type: :append)
|> Ash.Changeset.manage_relationship(:destination_post, post3, type: :append)
|> Api.create!()
PostLink
|> Ash.Changeset.new()
|> Ash.Changeset.manage_relationship(:source_post, post2, type: :append)
|> Ash.Changeset.manage_relationship(:destination_post, post2, type: :append)
|> Api.create!()
PostLink
|> Ash.Changeset.new()
|> Ash.Changeset.manage_relationship(:source_post, post3, type: :append)
|> Ash.Changeset.manage_relationship(:destination_post, post1, type: :append)
|> Api.create!()
assert [
%{title: "aaa"},
%{title: "bbb"},
%{title: "ccc"}
] =
Api.read!(
Post
|> Ash.Query.sort(title: :asc)
|> Ash.Query.filter(linked_posts.title in ["aaa", "bbb", "ccc"])
)
assert [
%{title: "ccc"},
%{title: "bbb"},
%{title: "aaa"}
] =
Api.read!(
Post
|> Ash.Query.sort(title: :desc)
|> Ash.Query.filter(linked_posts.title in ["aaa", "bbb", "ccc"] or title == "aaa")
)
assert [
%{title: "ccc"},
%{title: "bbb"},
%{title: "aaa"}
] =
Api.read!(
Post
|> Ash.Query.sort(title: :desc)
|> Ash.Query.filter(
linked_posts.title in ["aaa", "bbb", "ccc"] or
post_links.source_post_id == ^post2.id
)
)
end
end

8
test/support/api.ex Normal file
View file

@ -0,0 +1,8 @@
defmodule AshSqlite.Test.Api do
@moduledoc false
use Ash.Api
resources do
registry(AshSqlite.Test.Registry)
end
end

35
test/support/concat.ex Normal file
View file

@ -0,0 +1,35 @@
defmodule AshSqlite.Test.Concat do
@moduledoc false
use Ash.Calculation
require Ash.Query
def init(opts) do
if opts[:keys] && is_list(opts[:keys]) && Enum.all?(opts[:keys], &is_atom/1) do
{:ok, opts}
else
{:error, "Expected a `keys` option for which keys to concat"}
end
end
def expression(opts, %{separator: separator}) do
Enum.reduce(opts[:keys], nil, fn key, expr ->
if expr do
if separator do
Ash.Query.expr(^expr <> ^separator <> ref(^key))
else
Ash.Query.expr(^expr <> ref(^key))
end
else
Ash.Query.expr(ref(^key))
end
end)
end
def calculate(records, opts, %{separator: separator}) do
Enum.map(records, fn record ->
Enum.map_join(opts[:keys], separator, fn key ->
to_string(Map.get(record, key))
end)
end)
end
end

19
test/support/registry.ex Normal file
View file

@ -0,0 +1,19 @@
defmodule AshSqlite.Test.Registry do
@moduledoc false
use Ash.Registry
entries do
entry(AshSqlite.Test.Post)
entry(AshSqlite.Test.Comment)
entry(AshSqlite.Test.IntegerPost)
entry(AshSqlite.Test.Rating)
entry(AshSqlite.Test.PostLink)
entry(AshSqlite.Test.PostView)
entry(AshSqlite.Test.Author)
entry(AshSqlite.Test.Profile)
entry(AshSqlite.Test.User)
entry(AshSqlite.Test.Account)
entry(AshSqlite.Test.Organization)
entry(AshSqlite.Test.Manager)
end
end

View file

@ -0,0 +1,48 @@
defmodule AshSqlite.Test.Post.CommentsContainingTitle do
@moduledoc false
use Ash.Resource.ManualRelationship
use AshSqlite.ManualRelationship
require Ash.Query
require Ecto.Query
def load(posts, _opts, %{query: query, actor: actor, authorize?: authorize?}) do
post_ids = Enum.map(posts, & &1.id)
{:ok,
query
|> Ash.Query.filter(post_id in ^post_ids)
|> Ash.Query.filter(contains(title, post.title))
|> AshSqlite.Test.Api.read!(actor: actor, authorize?: authorize?)
|> Enum.group_by(& &1.post_id)}
end
def ash_sqlite_join(query, _opts, current_binding, as_binding, :inner, destination_query) do
{:ok,
Ecto.Query.from(_ in query,
join: dest in ^destination_query,
as: ^as_binding,
on: dest.post_id == as(^current_binding).id,
on: fragment("strpos(?, ?) > 0", dest.title, as(^current_binding).title)
)}
end
def ash_sqlite_join(query, _opts, current_binding, as_binding, :left, destination_query) do
{:ok,
Ecto.Query.from(_ in query,
left_join: dest in ^destination_query,
as: ^as_binding,
on: dest.post_id == as(^current_binding).id,
on: fragment("strpos(?, ?) > 0", dest.title, as(^current_binding).title)
)}
end
def ash_sqlite_subquery(_opts, current_binding, as_binding, destination_query) do
{:ok,
Ecto.Query.from(_ in destination_query,
where: parent_as(^current_binding).id == as(^as_binding).post_id,
where:
fragment("strpos(?, ?) > 0", as(^as_binding).title, parent_as(^current_binding).title)
)}
end
end

28
test/support/repo_case.ex Normal file
View file

@ -0,0 +1,28 @@
defmodule AshSqlite.RepoCase do
@moduledoc false
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
using do
quote do
alias AshSqlite.TestRepo
import Ecto
import Ecto.Query
import AshSqlite.RepoCase
# and any other stuff
end
end
setup tags do
:ok = Sandbox.checkout(AshSqlite.TestRepo)
unless tags[:async] do
Sandbox.mode(AshSqlite.TestRepo, {:shared, self()})
end
:ok
end
end

View file

@ -0,0 +1,30 @@
defmodule AshSqlite.Test.Account do
@moduledoc false
use Ash.Resource, data_layer: AshSqlite.DataLayer
actions do
defaults([:create, :read, :update, :destroy])
end
attributes do
uuid_primary_key(:id)
attribute(:is_active, :boolean)
end
calculations do
calculate(
:active,
:boolean,
expr(is_active)
)
end
sqlite do
table "accounts"
repo(AshSqlite.TestRepo)
end
relationships do
belongs_to(:user, AshSqlite.Test.User)
end
end

View file

@ -0,0 +1,80 @@
defmodule AshSqlite.Test.Author do
@moduledoc false
use Ash.Resource,
data_layer: AshSqlite.DataLayer
sqlite do
table("authors")
repo(AshSqlite.TestRepo)
end
attributes do
uuid_primary_key(:id, writable?: true)
attribute(:first_name, :string)
attribute(:last_name, :string)
attribute(:bio, AshSqlite.Test.Bio)
attribute(:badges, {:array, :atom})
end
actions do
defaults([:create, :read, :update, :destroy])
end
relationships do
has_one(:profile, AshSqlite.Test.Profile)
has_many(:posts, AshSqlite.Test.Post)
end
calculations do
calculate(:title, :string, expr(bio[:title]))
calculate(:full_name, :string, expr(first_name <> " " <> last_name))
calculate(:full_name_with_nils, :string, expr(string_join([first_name, last_name], " ")))
calculate(:full_name_with_nils_no_joiner, :string, expr(string_join([first_name, last_name])))
calculate(:split_full_name, {:array, :string}, expr(string_split(full_name)))
calculate(
:split_full_name_trim,
{:array, :string},
expr(string_split(full_name, " ", trim?: true))
)
calculate(:first_name_from_split, :string, expr(at(split_full_name_trim, 0)))
calculate(:first_name_or_bob, :string, expr(first_name || "bob"))
calculate(:first_name_and_bob, :string, expr(first_name && "bob"))
calculate(
:conditional_full_name,
:string,
expr(
if(
is_nil(first_name) or is_nil(last_name),
"(none)",
first_name <> " " <> last_name
)
)
)
calculate(
:nested_conditional,
:string,
expr(
if(
is_nil(first_name),
"No First Name",
if(
is_nil(last_name),
"No Last Name",
first_name <> " " <> last_name
)
)
)
)
calculate :param_full_name,
:string,
{AshSqlite.Test.Concat, keys: [:first_name, :last_name]} do
argument(:separator, :string, default: " ", constraints: [allow_empty?: true, trim?: false])
end
end
end

View file

@ -0,0 +1,19 @@
defmodule AshSqlite.Test.Bio do
@moduledoc false
use Ash.Resource, data_layer: :embedded
actions do
defaults([:create, :read, :update, :destroy])
end
attributes do
attribute(:title, :string)
attribute(:bio, :string)
attribute(:years_of_experience, :integer)
attribute :list_of_strings, {:array, :string} do
allow_nil?(true)
default(nil)
end
end
end

View file

@ -0,0 +1,59 @@
defmodule AshSqlite.Test.Comment do
@moduledoc false
use Ash.Resource,
data_layer: AshSqlite.DataLayer,
authorizers: [
Ash.Policy.Authorizer
]
policies do
bypass action_type(:read) do
# Check that the comment is in the same org (via post) as actor
authorize_if(relates_to_actor_via([:post, :organization, :users]))
end
end
sqlite do
table "comments"
repo(AshSqlite.TestRepo)
references do
reference(:post, on_delete: :delete, on_update: :update, name: "special_name_fkey")
end
end
actions do
defaults([:read, :update, :destroy])
create :create do
primary?(true)
argument(:rating, :map)
change(manage_relationship(:rating, :ratings, on_missing: :ignore, on_match: :create))
end
end
attributes do
uuid_primary_key(:id)
attribute(:title, :string)
attribute(:likes, :integer)
attribute(:arbitrary_timestamp, :utc_datetime_usec)
create_timestamp(:created_at, writable?: true)
end
relationships do
belongs_to(:post, AshSqlite.Test.Post)
belongs_to(:author, AshSqlite.Test.Author)
has_many(:ratings, AshSqlite.Test.Rating,
destination_attribute: :resource_id,
relationship_context: %{data_layer: %{table: "comment_ratings"}}
)
has_many(:popular_ratings, AshSqlite.Test.Rating,
destination_attribute: :resource_id,
relationship_context: %{data_layer: %{table: "comment_ratings"}},
filter: expr(score > 5)
)
end
end

View file

@ -0,0 +1,19 @@
defmodule AshSqlite.Test.IntegerPost do
@moduledoc false
use Ash.Resource,
data_layer: AshSqlite.DataLayer
sqlite do
table "integer_posts"
repo AshSqlite.TestRepo
end
actions do
defaults([:create, :read, :update, :destroy])
end
attributes do
integer_primary_key(:id)
attribute(:title, :string)
end
end

View file

@ -0,0 +1,39 @@
defmodule AshSqlite.Test.Manager do
@moduledoc false
use Ash.Resource,
data_layer: AshSqlite.DataLayer
sqlite do
table("managers")
repo(AshSqlite.TestRepo)
end
actions do
defaults([:read, :update, :destroy])
create :create do
primary?(true)
argument(:organization_id, :uuid, allow_nil?: false)
change(manage_relationship(:organization_id, :organization, type: :append_and_remove))
end
end
identities do
identity(:uniq_code, :code)
end
attributes do
uuid_primary_key(:id)
attribute(:name, :string)
attribute(:code, :string, allow_nil?: false)
attribute(:must_be_present, :string, allow_nil?: false)
attribute(:role, :string)
end
relationships do
belongs_to :organization, AshSqlite.Test.Organization do
attribute_writable?(true)
end
end
end

View file

@ -0,0 +1,25 @@
defmodule AshSqlite.Test.Organization do
@moduledoc false
use Ash.Resource,
data_layer: AshSqlite.DataLayer
sqlite do
table("orgs")
repo(AshSqlite.TestRepo)
end
actions do
defaults([:create, :read, :update, :destroy])
end
attributes do
uuid_primary_key(:id, writable?: true)
attribute(:name, :string)
end
relationships do
has_many(:users, AshSqlite.Test.User)
has_many(:posts, AshSqlite.Test.Post)
has_many(:managers, AshSqlite.Test.Manager)
end
end

View file

@ -0,0 +1,236 @@
defmodule AshSqlite.Test.Post do
@moduledoc false
use Ash.Resource,
data_layer: AshSqlite.DataLayer,
authorizers: [
Ash.Policy.Authorizer
]
policies do
bypass action_type(:read) do
# Check that the post is in the same org as actor
authorize_if(relates_to_actor_via([:organization, :users]))
end
end
sqlite do
table("posts")
repo(AshSqlite.TestRepo)
base_filter_sql("type = 'sponsored'")
check_constraints do
check_constraint(:price, "price_must_be_positive",
message: "yo, bad price",
check: "price > 0"
)
end
custom_indexes do
index([:uniq_custom_one, :uniq_custom_two],
unique: true,
concurrently: true,
message: "dude what the heck"
)
end
end
resource do
base_filter(expr(type == type(:sponsored, ^Ash.Type.Atom)))
end
actions do
defaults([:update, :destroy])
read :read do
primary?(true)
end
read :paginated do
pagination(offset?: true, required?: true, countable: true)
end
create :create do
primary?(true)
argument(:rating, :map)
change(
manage_relationship(:rating, :ratings,
on_missing: :ignore,
on_no_match: :create,
on_match: :create
)
)
end
update :increment_score do
argument(:amount, :integer, default: 1)
change(atomic_update(:score, expr((score || 0) + ^arg(:amount))))
end
end
identities do
identity(:uniq_one_and_two, [:uniq_one, :uniq_two])
end
attributes do
uuid_primary_key(:id, writable?: true)
attribute(:title, :string)
attribute(:score, :integer)
attribute(:public, :boolean)
attribute(:category, :string)
attribute(:type, :atom, default: :sponsored, private?: true, writable?: false)
attribute(:price, :integer)
attribute(:decimal, :decimal, default: Decimal.new(0))
attribute(:status, AshSqlite.Test.Types.Status)
attribute(:status_enum, AshSqlite.Test.Types.StatusEnum)
attribute(:status_enum_no_cast, AshSqlite.Test.Types.StatusEnumNoCast, source: :status_enum)
attribute(:point, AshSqlite.Test.Point)
attribute(:stuff, :map)
attribute(:uniq_one, :string)
attribute(:uniq_two, :string)
attribute(:uniq_custom_one, :string)
attribute(:uniq_custom_two, :string)
create_timestamp(:created_at)
update_timestamp(:updated_at)
end
code_interface do
define_for(AshSqlite.Test.Api)
define(:get_by_id, action: :read, get_by: [:id])
define(:increment_score, args: [{:optional, :amount}])
end
relationships do
belongs_to :organization, AshSqlite.Test.Organization do
attribute_writable?(true)
end
belongs_to(:author, AshSqlite.Test.Author)
has_many :posts_with_matching_title, __MODULE__ do
no_attributes?(true)
filter(expr(parent(title) == title and parent(id) != id))
end
has_many(:comments, AshSqlite.Test.Comment, destination_attribute: :post_id)
has_many :comments_matching_post_title, AshSqlite.Test.Comment do
filter(expr(title == parent_expr(title)))
end
has_many :popular_comments, AshSqlite.Test.Comment do
destination_attribute(:post_id)
filter(expr(likes > 10))
end
has_many :comments_containing_title, AshSqlite.Test.Comment do
manual(AshSqlite.Test.Post.CommentsContainingTitle)
end
has_many(:ratings, AshSqlite.Test.Rating,
destination_attribute: :resource_id,
relationship_context: %{data_layer: %{table: "post_ratings"}}
)
has_many(:post_links, AshSqlite.Test.PostLink,
destination_attribute: :source_post_id,
filter: [state: :active]
)
many_to_many(:linked_posts, __MODULE__,
through: AshSqlite.Test.PostLink,
join_relationship: :post_links,
source_attribute_on_join_resource: :source_post_id,
destination_attribute_on_join_resource: :destination_post_id
)
has_many(:views, AshSqlite.Test.PostView)
end
validations do
validate(attribute_does_not_equal(:title, "not allowed"))
end
calculations do
calculate(:score_after_winning, :integer, expr((score || 0) + 1))
calculate(:negative_score, :integer, expr(-score))
calculate(:category_label, :string, expr("(" <> category <> ")"))
calculate(:score_with_score, :string, expr(score <> score))
calculate(:foo_bar_from_stuff, :string, expr(stuff[:foo][:bar]))
calculate(
:score_map,
:map,
expr(%{
negative_score: %{foo: negative_score, bar: negative_score}
})
)
calculate(
:calc_returning_json,
AshSqlite.Test.Money,
expr(
fragment("""
'{"amount":100, "currency": "usd"}'::json
""")
)
)
calculate(
:was_created_in_the_last_month,
:boolean,
expr(
# This is written in a silly way on purpose, to test a regression
if(
fragment("(? <= (? - '1 month'::interval))", now(), created_at),
true,
false
)
)
)
calculate(
:price_string,
:string,
CalculatePostPriceString
)
calculate(
:price_string_with_currency_sign,
:string,
CalculatePostPriceStringWithSymbol
)
end
end
defmodule CalculatePostPriceString do
@moduledoc false
use Ash.Calculation
@impl true
def select(_, _, _), do: [:price]
@impl true
def calculate(records, _, _) do
Enum.map(records, fn %{price: price} ->
dollars = div(price, 100)
cents = rem(price, 100)
"#{dollars}.#{cents}"
end)
end
end
defmodule CalculatePostPriceStringWithSymbol do
@moduledoc false
use Ash.Calculation
@impl true
def load(_, _, _), do: [:price_string]
@impl true
def calculate(records, _, _) do
Enum.map(records, fn %{price_string: price_string} ->
"#{price_string}$"
end)
end
end

View file

@ -0,0 +1,37 @@
defmodule AshSqlite.Test.PostLink do
@moduledoc false
use Ash.Resource,
data_layer: AshSqlite.DataLayer
sqlite do
table "post_links"
repo AshSqlite.TestRepo
end
actions do
defaults([:create, :read, :update, :destroy])
end
identities do
identity(:unique_link, [:source_post_id, :destination_post_id])
end
attributes do
attribute :state, :atom do
constraints(one_of: [:active, :archived])
default(:active)
end
end
relationships do
belongs_to :source_post, AshSqlite.Test.Post do
allow_nil?(false)
primary_key?(true)
end
belongs_to :destination_post, AshSqlite.Test.Post do
allow_nil?(false)
primary_key?(true)
end
end
end

View file

@ -0,0 +1,33 @@
defmodule AshSqlite.Test.PostView do
@moduledoc false
use Ash.Resource, data_layer: AshSqlite.DataLayer
actions do
defaults([:create, :read])
end
attributes do
create_timestamp(:time)
attribute(:browser, :atom, constraints: [one_of: [:firefox, :chrome, :edge]])
end
relationships do
belongs_to :post, AshSqlite.Test.Post do
allow_nil?(false)
attribute_writable?(true)
end
end
resource do
require_primary_key?(false)
end
sqlite do
table "post_views"
repo AshSqlite.TestRepo
references do
reference :post, ignore?: true
end
end
end

View file

@ -0,0 +1,24 @@
defmodule AshSqlite.Test.Profile do
@moduledoc false
use Ash.Resource,
data_layer: AshSqlite.DataLayer
sqlite do
table("profile")
schema("profiles")
repo(AshSqlite.TestRepo)
end
attributes do
uuid_primary_key(:id, writable?: true)
attribute(:description, :string)
end
actions do
defaults([:create, :read, :update, :destroy])
end
relationships do
belongs_to(:author, AshSqlite.Test.Author)
end
end

View file

@ -0,0 +1,20 @@
defmodule AshSqlite.Test.Rating do
@moduledoc false
use Ash.Resource,
data_layer: AshSqlite.DataLayer
sqlite do
polymorphic?(true)
repo AshSqlite.TestRepo
end
actions do
defaults([:create, :read, :update, :destroy])
end
attributes do
uuid_primary_key(:id)
attribute(:score, :integer)
attribute(:resource_id, :uuid)
end
end

View file

@ -0,0 +1,23 @@
defmodule AshSqlite.Test.User do
@moduledoc false
use Ash.Resource, data_layer: AshSqlite.DataLayer
actions do
defaults([:create, :read, :update, :destroy])
end
attributes do
uuid_primary_key(:id)
attribute(:is_active, :boolean)
end
sqlite do
table "users"
repo(AshSqlite.TestRepo)
end
relationships do
belongs_to(:organization, AshSqlite.Test.Organization)
has_many(:accounts, AshSqlite.Test.Account)
end
end

13
test/support/test_app.ex Normal file
View file

@ -0,0 +1,13 @@
defmodule AshSqlite.TestApp do
@moduledoc false
def start(_type, _args) do
children = [
AshSqlite.TestRepo
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: AshSqlite.Supervisor]
Supervisor.start_link(children, opts)
end
end

View file

@ -0,0 +1,38 @@
defmodule AshSqlite.TestCustomExtension do
@moduledoc false
use AshSqlite.CustomExtension, name: "demo-functions", latest_version: 1
@impl true
def install(0) do
"""
execute(\"\"\"
CREATE OR REPLACE FUNCTION ash_demo_functions()
RETURNS boolean AS $$ SELECT TRUE $$
LANGUAGE SQL
IMMUTABLE;
\"\"\")
"""
end
@impl true
def install(1) do
"""
execute(\"\"\"
CREATE OR REPLACE FUNCTION ash_demo_functions()
RETURNS boolean AS $$ SELECT FALSE $$
LANGUAGE SQL
IMMUTABLE;
\"\"\")
"""
end
@impl true
def uninstall(_version) do
"""
execute(\"\"\"
DROP FUNCTION IF EXISTS ash_demo_functions()
\"\"\")
"""
end
end

View file

@ -0,0 +1,13 @@
defmodule AshSqlite.TestNoSandboxRepo do
@moduledoc false
use AshSqlite.Repo,
otp_app: :ash_sqlite
def on_transaction_begin(data) do
send(self(), data)
end
def installed_extensions do
["ash-functions", AshSqlite.TestCustomExtension]
end
end

13
test/support/test_repo.ex Normal file
View file

@ -0,0 +1,13 @@
defmodule AshSqlite.TestRepo do
@moduledoc false
use AshSqlite.Repo,
otp_app: :ash_sqlite
def on_transaction_begin(data) do
send(self(), data)
end
def installed_extensions do
["ash-functions", AshSqlite.TestCustomExtension]
end
end

Some files were not shown because too many files have changed in this diff Show more