feat: create and read works.
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
James Harton 2023-08-07 17:47:06 +12:00 committed by james
parent cfb8ebb73d
commit 74d878b70e
25 changed files with 1392 additions and 245 deletions

34
.check.exs Normal file
View file

@ -0,0 +1,34 @@
[
## don't run tools concurrently
# parallel: false,
## don't print info about skipped tools
# skipped: false,
## always run tools in fix mode (put it in ~/.check.exs locally, not in project config)
# fix: true,
## don't retry automatically even if last run resulted in failures
# retry: false,
## list of tools (see `mix check` docs for a list of default curated tools)
tools: [
## curated tools may be disabled (e.g. the check for compilation warnings)
# {:compiler, false},
## ...or have command & args adjusted (e.g. enable skip comments for sobelow)
{:sobelow, false},
## ...or reordered (e.g. to see output from dialyzer before others)
# {:dialyzer, order: -1},
## ...or reconfigured (e.g. disable parallel execution of ex_unit in umbrella)
# {:ex_unit, umbrella: [parallel: false]},
## custom new tools may be added (Mix tasks or arbitrary commands)
# {:my_task, "mix my_task", env: %{"MIX_ENV" => "prod"}},
# {:my_tool, ["my_tool", "arg with spaces"]}
{:spark_formatter, "mix spark.formatter --check"},
{:spark_cheat_sheets, "mix spark.cheat_sheets --check"}
]
]

17
.doctor.exs Normal file
View file

@ -0,0 +1,17 @@
%Doctor.Config{
ignore_modules: [
~r/^Inspect\./,
~r/^Support\./
],
ignore_paths: [],
min_module_doc_coverage: 40,
min_module_spec_coverage: 0,
min_overall_doc_coverage: 50,
min_overall_spec_coverage: 0,
min_overall_moduledoc_coverage: 100,
exception_moduledoc_required: true,
raise: false,
reporter: Doctor.Reporters.Full,
struct_type_spec_required: true,
umbrella: false
}

View file

@ -26,7 +26,6 @@ steps:
- name: restore build cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
@ -70,10 +69,10 @@ steps:
- mix local.rebar --if-missing --force
- mix deps.get
- mix deps.compile
- mix dialyzer --plt
- name: store ASDF cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
@ -97,7 +96,6 @@ steps:
- name: store build cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
@ -123,65 +121,8 @@ steps:
- .mix
- .rebar3
---
kind: pipeline
type: docker
name: test
depends_on:
- build
steps:
- name: restore ASDF cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'asdf-{{ os }}-{{ arch }}-{{ checksum ".tool-versions" }}'
mount:
- .asdf
- name: restore build cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'elixir-{{ checksum "mix.lock" }}-{{ checksum ".tool-versions" }}'
mount:
- deps
- _build
- .hex
- .mix
- .rebar3
- name: mix compile
image: code.harton.nz/james/asdf_container:latest
pull: "always"
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -189,14 +130,12 @@ steps:
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- restore ASDF cache
- restore build cache
- install dependencies
commands:
- asdf mix compile --warnings-as-errors
- name: mix test
image: code.harton.nz/james/asdf_container:latest
pull: "always"
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -210,7 +149,6 @@ steps:
- name: mix credo
image: code.harton.nz/james/asdf_container:latest
pull: "always"
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -224,7 +162,6 @@ steps:
- name: mix hex.audit
image: code.harton.nz/james/asdf_container:latest
pull: "always"
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -238,7 +175,6 @@ steps:
- name: mix format
image: code.harton.nz/james/asdf_container:latest
pull: "always"
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -250,9 +186,34 @@ steps:
commands:
- asdf mix format --check-formatted
- name: mix spark.formatter
image: code.harton.nz/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- mix compile
commands:
- asdf mix spark.formatter --check
- name: mix spark.cheat_sheets
image: code.harton.nz/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- mix compile
commands:
- asdf mix spark.cheat_sheets --check
- name: mix deps.unlock
image: code.harton.nz/james/asdf_container:latest
pull: "always"
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -266,7 +227,6 @@ steps:
- name: mix doctor
image: code.harton.nz/james/asdf_container:latest
pull: "always"
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -280,7 +240,6 @@ steps:
- name: mix git_ops.check_message
image: code.harton.nz/james/asdf_container:latest
pull: "always"
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -293,73 +252,24 @@ steps:
- git log -1 --format=%s > .last_commit_message
- asdf mix git_ops.check_message .last_commit_message
---
kind: pipeline
type: docker
name: git ops
trigger:
branch:
- main
event:
- push
depends_on:
- test
steps:
- name: restore ASDF cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'asdf-{{ os }}-{{ arch }}-{{ checksum ".tool-versions" }}'
mount:
- .asdf
- name: restore build cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'elixir-{{ checksum "mix.lock" }}-{{ checksum ".tool-versions" }}'
mount:
- deps
- _build
- .hex
- .mix
- .rebar3
- name: mix git_ops.release
image: code.harton.nz/james/asdf_container:latest
pull: "always"
when:
branch:
- main
event:
exclude:
- pull_request
depends_on:
- restore ASDF cache
- restore build cache
- mix test
- mix credo
- mix hex.audit
- mix format
- mix spark.formatter
- mix spark.cheat_sheets
- mix deps.unlock
- mix doctor
- mix git_ops.check_message
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -384,73 +294,24 @@ steps:
- git push $${GIT_URL} "HEAD:${DRONE_COMMIT_REF}" "refs/tags/v$${NEW_APP_VERSION}"
- fi
---
kind: pipeline
type: docker
name: release
trigger:
ref:
include:
- refs/tags/v**
depends_on:
- test
steps:
- name: restore ASDF cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'asdf-{{ os }}-{{ arch }}-{{ checksum ".tool-versions" }}'
mount:
- .asdf
- name: restore build cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'elixir-{{ checksum "mix.lock" }}-{{ checksum ".tool-versions" }}'
mount:
- deps
- _build
- .hex
- .mix
- .rebar3
- name: build artifacts
image: code.harton.nz/james/asdf_container:latest
pull: "always"
when:
event:
- tag
refs:
include:
- refs/tags/v*
depends_on:
- restore ASDF cache
- restore build cache
- mix test
- mix credo
- mix hex.audit
- mix format
- mix spark.formatter
- mix spark.cheat_sheets
- mix deps.unlock
- mix doctor
- mix git_ops.check_message
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -472,6 +333,12 @@ steps:
- name: gitea release
image: plugins/gitea-release
when:
event:
- tag
refs:
include:
- refs/tags/v*
depends_on:
- build artifacts
settings:
@ -483,21 +350,49 @@ steps:
title: tag_subject
note: tag_body
# - name: hex release
# image: code.harton.nz/james/asdf_container:latest
# pull: "always"
# depends_on:
# - restore ASDF cache
# - restore build cache
# environment:
# MIX_ENV: test
# HEX_HOME: /drone/src/.hex
# MIX_HOME: /drone/src/.mix
# REBAR_BASE_DIR: /drone/src/.rebar3
# ASDF_DATA_DIR: /drone/src/.asdf
# ASDF_DIR: /root/.asdf
# HEX_API_KEY:
# from_secret: HEX_API_KEY
# commands:
# - . $ASDF_DIR/asdf.sh
# - mix hex.publish --yes
- name: docs release
when:
event:
- tag
refs:
include:
- refs/tags/v*
image: minio/mc
environment:
S3_ENDPOINT:
from_secret: S3_ENDPOINT
ACCESS_KEY:
from_secret: ACCESS_KEY_ID
SECRET_KEY:
from_secret: SECRET_ACCESS_KEY
depends_on:
- build artifacts
commands:
- mc alias set store $${S3_ENDPOINT} $${ACCESS_KEY} $${SECRET_KEY}
- mc mb -p store/docs.harton.nz
- mc anonymous set download store/docs.harton.nz
- mc mirror --overwrite doc/ store/docs.harton.nz/$${DRONE_REPO}/$${DRONE_TAG}
- mc mirror --overwrite doc/ store/docs.harton.nz/$${DRONE_REPO}
- name: hex release
image: code.harton.nz/james/asdf_container:latest
when:
event:
- tag
refs:
include:
- refs/tags/v*
depends_on:
- build artifacts
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
ASDF_DIR: /root/.asdf
HEX_API_KEY:
from_secret: HEX_API_KEY
commands:
- . $ASDF_DIR/asdf.sh
- mix hex.publish --yes

View file

@ -1,4 +1,20 @@
# Used by "mix format"
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
spark_locals_without_parens = [
auto_compact?: 1,
auto_file_sync?: 1,
directory: 1,
name: 1,
otp_app: 1
]
[
import_deps: [:ash, :spark],
inputs: [
"*.{ex,exs}",
"{config,lib,test}/**/*.{ex,exs}"
],
plugins: [Spark.Formatter],
locals_without_parens: spark_locals_without_parens,
export: [
locals_without_parens: spark_locals_without_parens
]
]

View file

@ -7,3 +7,7 @@ config :git_ops,
manage_mix_version?: true,
version_tag_prefix: "v",
manage_readme_version: "README.md"
if Mix.env() in ~w[dev test]a do
config :ash_cubdb, ash_apis: [Support.Api]
end

View file

@ -0,0 +1,98 @@
# DSL: AshCubDB.DataLayer
A CubDB data layer for Ash.
<!--- ash-hq-hide-start --> <!--- -->
## DSL Documentation
### Index
* cubdb
### Docs
## cubdb
CubDB data layer configuration.
Examples:
```
cubdb do
directory "/opt/storage/my_awesome_resource"
auto_compact? true
auto_file_sync? true
name :my_awesome_resource
end
```
---
* `:directory` - The directory within which to store the CubDB data.
If none is supplied, then one will be automatically generated in the
`priv` directory of the parent OTP application.
* `:otp_app` (`t:atom/0`) - The OTP application in whose `priv` directory data should be stored.
Only used if `directory` is not supplied. When not provided
`Application.get_application/1` will be called for the resource.
* `:auto_compact?` (`t:boolean/0`) - Whether or not to automatically compact the CubDB database.
See [the CubDB documentation](https://hexdocs.pm/cubdb/faq.html#what-is-compaction) for more information. The default value is `true`.
* `:auto_file_sync?` (`t:boolean/0`) - Whether or not to automatically flush the buffer to disk on write.
See [the CubDB documentation](https://hexdocs.pm/cubdb/faq.html#what-does-file-sync-mean) The default value is `true`.
* `:name` (`t:atom/0`) - The name of the CubDB database.
By default this is the name of the resource module, however in some
(rare) circumstances you may wish to specifically name the database.
<!--- ash-hq-hide-stop --> <!--- -->
## cubdb
CubDB data layer configuration.
### Examples
```
cubdb do
directory "/opt/storage/my_awesome_resource"
auto_compact? true
auto_file_sync? true
name :my_awesome_resource
end
```
### Options
| Name | Type | Default | Docs |
| --- | --- | --- | --- |
| `directory` | `nil \| String.t` | | The directory within which to store the CubDB data. If none is supplied, then one will be automatically generated in the `priv` directory of the parent OTP application. |
| `otp_app` | `atom` | | The OTP application in whose `priv` directory data should be stored. Only used if `directory` is not supplied. When not provided `Application.get_application/1` will be called for the resource. |
| `auto_compact?` | `boolean` | `true` | Whether or not to automatically compact the CubDB database. See [the CubDB documentation](https://hexdocs.pm/cubdb/faq.html#what-is-compaction) for more information. |
| `auto_file_sync?` | `boolean` | `true` | Whether or not to automatically flush the buffer to disk on write. See [the CubDB documentation](https://hexdocs.pm/cubdb/faq.html#what-does-file-sync-mean) |
| `name` | `atom` | | The name of the CubDB database. By default this is the name of the resource module, however in some (rare) circumstances you may wish to specifically name the database. |

View file

@ -1,18 +1,161 @@
defmodule AshCubDB do
@moduledoc """
Documentation for `AshCubDB`.
`AshCubDB` is an [Ash DataLayer](https://ash-hq.org/docs/module/ash/latest/ash-datalayer)
which adds support for persisting Ash resources with [CubDB](https://hex.pm/packages/cubdb).
CubDB is an Elixir-based key value store which supports all Erlang-native
terms. More information can be found in
[the CubDB readme](https://hexdocs.pm/cubdb/readme.html).
"""
alias AshCubDB.{Info, Migration}
@doc """
Hello world.
## Examples
iex> AshCubDB.hello()
:world
Ensure that the CubDB process is running for the specified resource.
"""
def hello do
:world
@spec start(Ash.Resource.t()) :: {:ok, pid} | {:error, any}
def start(resource) do
directory = Info.cubdb_directory!(resource)
auto_compact? = Info.cubdb_auto_compact?(resource)
auto_file_sync? = Info.cubdb_auto_file_sync?(resource)
name = via(resource)
with {:ok, pid} <-
DynamicSupervisor.start_child(
AshCubDB.DynamicSupervisor,
{CubDB, [data_dir: directory, name: name]}
),
:ok <- CubDB.set_auto_compact(pid, auto_compact?),
:ok <- CubDB.set_auto_file_sync(pid, auto_file_sync?),
:ok <- Migration.check(pid, resource) do
{:ok, pid}
else
{:error, {:already_started, pid}} -> {:ok, pid}
{:error, reason} -> {:error, reason}
end
end
@doc """
Stop the CubDB process running for a specific resource.
"""
@spec stop(Ash.Resource.t()) :: :ok
def stop(resource) do
AshCubDB
|> Registry.lookup(resource)
|> Enum.each(&DynamicSupervisor.terminate_child(AshCubDB.DynamicSupervisor, &1))
end
@doc """
Creates a backup of the database into the target directory path.
Wrapper around `CubDB.back_up/2`
"""
@spec back_up(Ash.Resource.t(), Path.t()) :: :ok | {:error, any}
def back_up(resource, target_path) do
case start(resource) do
{:ok, pid} -> CubDB.back_up(pid, target_path)
{:error, reason} -> {:error, reason}
end
end
@doc """
Deletes all entries, resulting in an empty database.
Wrapper around `CubDB.clear/1`
"""
@spec clear(Ash.Resource.t()) :: :ok
def clear(resource) do
case start(resource) do
{:ok, pid} -> CubDB.clear(pid)
_ -> :ok
end
end
@doc """
Runs a database compaction.
Wrapper around `CubDB.compact/1`
"""
@spec compact(Ash.Resource.t()) :: :ok | {:error, any}
def compact(resource) do
case start(resource) do
{:ok, pid} -> CubDB.compact(pid)
{:error, reason} -> {:error, reason}
end
end
@doc """
Returns true if a compaction operation is currently running, false otherwise.
Wrapper around `CubDB.compacting?/1`
"""
@spec compacting?(Ash.Resource.t()) :: boolean
def compacting?(resource) do
case start(resource) do
{:ok, pid} -> CubDB.compacting?(pid)
_ -> false
end
end
@doc """
Returns the path of the current database file.
Wrapper around `CubDB.current_db_file/1`
"""
@spec current_db_file(Ash.Resource.t()) :: String.t()
def current_db_file(resource) do
resource
|> via()
|> CubDB.current_db_file()
end
@doc """
Returns the path of the data directory, as given when the `CubDB` process was started.
Wrapper around `CubDB.data_dir/1`
"""
@spec data_dir(Ash.Resource.t()) :: String.t()
def data_dir(resource) do
resource
|> via()
|> CubDB.data_dir()
end
@doc """
Returns the dirt factor.
Wrapper around `CubDB.dirt_factor/1`
"""
@spec dirt_factor(Ash.Resource.t()) :: float
def dirt_factor(resource) do
resource
|> via()
|> CubDB.dirt_factor()
end
@doc """
Performs an `fsync`, forcing to flush all data that might be buffered by the OS to disk.
Wrapper around `CubDB.file_sync/1`
"""
@spec file_sync(Ash.Resource.t()) :: :ok
def file_sync(resource) do
resource
|> via()
|> CubDB.file_sync()
end
@doc """
Stops a running compaction.
Wrapper around `CubDB.halt_compaction/1`
"""
@spec halt_compaction(Ash.Resource.t()) :: :ok | {:error, :no_compaction_running}
def halt_compaction(resource) do
resource
|> via()
|> CubDB.halt_compaction()
end
defp via(resource), do: {:via, Registry, {AshCubDB.Registry, resource}}
end

View file

@ -1,19 +1,16 @@
defmodule AshCubDB.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
@doc false
@impl true
def start(_type, _args) do
children = [
# Starts a worker by calling: AshCubDB.Worker.start_link(arg)
# {AshCubDB.Worker, arg}
{DynamicSupervisor, strategy: :one_for_one, name: AshCubDB.DynamicSupervisor},
{Registry, keys: :unique, name: AshCubDB.Registry}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: AshCubDB.Supervisor]
Supervisor.start_link(children, opts)
end

View file

@ -0,0 +1,265 @@
defmodule AshCubDB.DataLayer do
alias AshCubDB.{
CacheLayoutTransformer,
ConfigureDirectoryTransformer,
Dir,
Dsl,
Info,
Query,
Serde
}
alias Ash.{
Actions.Sort,
Changeset,
Error,
Error.Changes.InvalidAttribute,
Error.Invalid.TenantRequired,
Filter.Runtime,
Resource
}
alias Ecto.Schema.Metadata
alias Spark.Dsl.Extension
import AshCubDB, only: [start: 1]
@moduledoc """
A CubDB data layer for Ash.
<!--- ash-hq-hide-start --> <!--- -->
## DSL Documentation
### Index
#{Extension.doc_index(Dsl.sections())}
### Docs
#{Extension.doc(Dsl.sections())}
<!--- ash-hq-hide-stop --> <!--- -->
"""
@behaviour Ash.DataLayer
use Extension,
sections: Dsl.sections(),
transformers: [ConfigureDirectoryTransformer, CacheLayoutTransformer]
@doc false
@impl true
def can?(resource, :create), do: Dir.writable?(resource)
def can?(resource, :upsert), do: Dir.writable?(resource)
def can?(resource, :read), do: Dir.readable?(resource)
def can?(_, :multitenancy), do: true
def can?(_, :filter), do: true
def can?(_, {:filter_expr, _}), do: true
def can?(_, :boolean_filter), do: true
def can?(_, _), do: false
@doc false
@impl true
def create(resource, changeset) do
with :ok <- validate_tenant_configuration(resource, changeset.tenant),
{:ok, db} <- start(resource),
{:ok, record} <- Changeset.apply_attributes(changeset),
{:ok, key, data} <- Serde.serialise(record),
{:ok, key} <- maybe_wrap_in_tenant(key, changeset),
:ok <- CubDB.put_new(db, key, data) do
{:ok, set_loaded(record)}
else
{:error, :exists} ->
errors =
resource
|> Resource.Info.primary_key()
|> Enum.map(
&InvalidAttribute.exception(
field: &1,
message: "has already been taken"
)
)
{:error, errors}
{:error, reason} ->
{:error, reason}
end
end
@doc false
@impl true
def upsert(resource, changeset, keys) do
pkey = Resource.Info.primary_key(resource)
keys = keys || pkey
{key_layout, _} = Info.field_layout(resource)
cond do
Enum.any?(keys, &is_nil(Changeset.get_attribute(changeset, &1))) ->
create(resource, changeset)
Tuple.to_list(key_layout) == Enum.sort(keys) ->
do_direct_upsert(resource, changeset)
true ->
do_search_upsert(resource, changeset, keys)
end
end
@doc false
@impl true
def run_query(query, resource, parent \\ nil) do
with :ok <- validate_tenant_configuration(resource, query.tenant),
{:ok, db} <- start(resource),
{:ok, stream} <- get_records(resource, db, query.tenant),
{:ok, records} <- filter_matches(stream, query, parent),
{:ok, records} <- runtime_sort(records, query) do
{:ok, records}
else
{:error, reason} -> {:error, Error.to_ash_error(reason)}
end
end
@doc false
@impl true
def resource_to_query(resource, api), do: %Query{resource: resource, api: api}
@doc false
@impl true
def limit(query, limit, _), do: {:ok, %{query | limit: limit}}
@doc false
@impl true
def offset(query, offset, _), do: {:ok, %{query | offset: offset}}
@doc false
@impl true
def add_calculation(query, calculation, _, _),
do: {:ok, %{query | calculations: [calculation | query.calculations]}}
@doc false
@impl true
def add_aggregate(query, aggregate, _),
do: {:ok, %{query | aggregates: [aggregate | query.aggregates]}}
@doc false
@impl true
def set_tenant(_resource, query, tenant) do
{:ok, %{query | tenant: tenant}}
end
@doc false
@impl true
def filter(query, filter, _resource) do
if query.filter do
{:ok, %{query | filter: Ash.Filter.add_to_filter!(query.filter, filter)}}
else
{:ok, %{query | filter: filter}}
end
end
@doc false
@impl true
def sort(query, sort, _resource) do
{:ok, %{query | sort: sort}}
end
@doc false
@impl true
def distinct(query, distinct, _resource) do
{:ok, %{query | distinct: distinct}}
end
@impl true
def distinct_sort(query, distinct_sort, _resource) do
{:ok, %{query | distinct_sort: distinct_sort}}
end
defp set_loaded(record),
do: %{record | __meta__: %Metadata{state: :loaded, schema: record.__struct__}}
defp do_direct_upsert(resource, changeset) do
with :ok <- validate_tenant_configuration(resource, changeset.tenant),
{:ok, db} <- start(resource),
{:ok, record} <- Changeset.apply_attributes(changeset),
{:ok, key, data} <- Serde.serialise(record),
{:ok, key} <- maybe_wrap_in_tenant(key, changeset),
:ok <- CubDB.put(db, key, data) do
{:ok, set_loaded(record)}
end
end
defp do_search_upsert(_resource, _changeset, _keys) do
{:error, :not_implemented}
end
defp get_records(resource, db, tenant) do
stream =
db
|> CubDB.select()
|> Stream.filter(&is_tuple(elem(&1, 0)))
stream =
if Resource.Info.multitenancy_strategy(resource) == :context do
stream
|> Stream.filter(fn {{t, _}, _} -> t == tenant end)
|> Stream.map(fn {{_, key}, value} -> {key, value} end)
else
stream
end
stream =
stream
|> Stream.map(&Serde.deserialise!(resource, &1))
{:ok, stream}
end
defp maybe_wrap_in_tenant(key, changeset) do
if Resource.Info.multitenancy_strategy(changeset.resource) == :context do
{:ok, {changeset.tenant, key}}
else
{:ok, key}
end
end
defp validate_tenant_configuration(resource, tenant) do
strategy = Resource.Info.multitenancy_strategy(resource)
global? = Resource.Info.multitenancy_global?(resource)
case {strategy, global?, tenant} do
{strategy, false, nil} when not is_nil(strategy) ->
{:error, TenantRequired.exception(resource: resource)}
_ ->
:ok
end
end
defp filter_matches(stream, query, _parent) when is_nil(query.filter), do: {:ok, stream}
defp filter_matches(stream, query, parent) do
records =
stream
|> Enum.to_list()
query.api
|> Runtime.filter_matches(records, query.filter, parent: parent)
end
defp runtime_sort(records, query) do
records =
records
|> Sort.runtime_sort(query.distinct_sort || query.sort, api: query.api)
|> Sort.runtime_distinct(query.distinct, api: query.api)
|> Sort.runtime_sort(query.sort, api: query.api)
|> Enum.drop(query.offset || 0)
|> do_limit(query.limit)
{:ok, records}
end
defp do_limit(records, :infinity), do: records
defp do_limit(records, limit), do: Enum.take(records, limit)
end

41
lib/ash_cub_db/dir.ex Normal file
View file

@ -0,0 +1,41 @@
defmodule AshCubDB.Dir do
@moduledoc """
Utilities for working with the underlying data directory.
"""
alias AshCubDB.Info
@doc """
Is the directory able to be written to by the current user?
"""
def writable?(resource) do
with {:ok, dir} <- Info.cubdb_directory(resource),
{:ok, stat} when stat.access in ~w[read_write write]a <- dir_stat(dir) do
true
else
_ -> false
end
end
@doc """
Is the directory able to be read from by the current user?
"""
def readable?(resource) do
with {:ok, dir} <- Info.cubdb_directory(resource),
{:ok, stat} when stat.access in ~w[read read_write]a <- dir_stat(dir) do
true
else
_ -> false
end
end
defp dir_stat(directory) do
with {:error, :enoent} <- File.stat(directory),
{:error, error} <- File.mkdir_p(directory) do
{:error, "Unable to create directory: #{inspect(error)}"}
else
:ok -> File.stat(directory)
{:ok, stat} -> {:ok, stat}
end
end
end

79
lib/ash_cub_db/dsl.ex Normal file
View file

@ -0,0 +1,79 @@
defmodule AshCubDB.Dsl do
@moduledoc false
alias Spark.Dsl.Section
@cubdb %Section{
name: :cubdb,
describe: """
CubDB data layer configuration.
""",
examples: [
"""
cubdb do
directory "/opt/storage/my_awesome_resource"
auto_compact? true
auto_file_sync? true
name :my_awesome_resource
end
"""
],
schema: [
directory: [
type: {:or, [nil, :string]},
required: false,
doc: """
The directory within which to store the CubDB data.
If none is supplied, then one will be automatically generated in the
`priv` directory of the parent OTP application.
"""
],
otp_app: [
type: :atom,
required: false,
doc: """
The OTP application in whose `priv` directory data should be stored.
Only used if `directory` is not supplied. When not provided
`Application.get_application/1` will be called for the resource.
"""
],
auto_compact?: [
type: :boolean,
default: true,
required: false,
doc: """
Whether or not to automatically compact the CubDB database.
See [the CubDB documentation](https://hexdocs.pm/cubdb/faq.html#what-is-compaction) for more information.
"""
],
auto_file_sync?: [
type: :boolean,
default: true,
required: false,
doc: """
Whether or not to automatically flush the buffer to disk on write.
See [the CubDB documentation](https://hexdocs.pm/cubdb/faq.html#what-does-file-sync-mean)
"""
],
name: [
type: :atom,
required: false,
doc: """
The name of the CubDB database.
By default this is the name of the resource module, however in some
(rare) circumstances you may wish to specifically name the database.
"""
]
]
}
@sections [@cubdb]
@doc false
@spec sections :: [Section.t()]
def sections, do: @sections
end

15
lib/ash_cub_db/info.ex Normal file
View file

@ -0,0 +1,15 @@
defmodule AshCubDB.Info do
@moduledoc """
Auto-generated introspection for the AshCubDB DSL.
"""
use Spark.InfoGenerator, sections: [:cubdb], extension: AshCubDB.DataLayer
alias Spark.Dsl.Extension
@doc """
Retrieve the cached field layout for the resource.
"""
@spec field_layout(Ash.Resource.t() | Spark.Dsl.t()) :: nil | {tuple, tuple}
def field_layout(resource_or_dsl_state),
do: Extension.get_persisted(resource_or_dsl_state, :cubdb_field_layout)
end

View file

@ -0,0 +1,31 @@
defmodule AshCubDB.Migration do
@moduledoc """
We store and check metadata when opening a database to ensure that the
resource and attributes match, and possibly perform migrations.
"""
alias AshCubDB.Info
@doc """
Check that a newly opened database doesn't need to be migrated.
"""
@spec check(GenServer.server(), Ash.Resource.t()) :: :ok | {:error, any}
def check(db, resource) do
layout = Info.field_layout(resource)
case CubDB.fetch(db, :__metadata_) do
:error ->
CubDB.put(db, :__metadata__, %{resource: resource, layout: layout})
{:ok, metadata} when metadata.resource == resource and metadata.layout == layout ->
:ok
{:ok, metadata} when metadata.resource != resource ->
{:error,
"CubDB database refers to resource `#{metadata.resource}`, but should be `#{inspect(resource)}`."}
{:ok, _} ->
{:error, "CubDB database needs to be migrated."}
end
end
end

35
lib/ash_cub_db/query.ex Normal file
View file

@ -0,0 +1,35 @@
defmodule AshCubDB.Query do
@moduledoc """
A struct which holds information about a resource query as it is being built.
"""
alias Ash.{Api, Filter, Resource}
defstruct aggregates: [],
api: nil,
calculations: [],
distinct: nil,
distinct_sort: nil,
filter: nil,
limit: :infinity,
offset: 0,
relationships: %{},
resource: nil,
sort: nil,
tenant: nil
@type t :: %__MODULE__{
aggregates: [Resource.Aggregate.t()],
api: Api.t(),
calculations: [Resource.Calculation.t()],
distinct: Ash.Sort.t(),
distinct_sort: Ash.Sort.t(),
filter: nil | Filter.t(),
limit: :infinity | non_neg_integer(),
offset: non_neg_integer(),
relationships: %{optional(atom) => Ash.Resource.Relationships.relationship()},
resource: Ash.Resource.t(),
sort: nil | Ash.Sort.t(),
tenant: any
}
end

100
lib/ash_cub_db/serde.ex Normal file
View file

@ -0,0 +1,100 @@
defmodule AshCubDB.Serde do
@moduledoc """
Handle serialising and deserialising of records into CubDB.
"""
alias Ash.{Resource, Type}
alias AshCubDB.Info
alias Ecto.Schema.Metadata
@doc """
Serialise the record into key and value tuples for storage in CubDB.
"""
@spec serialise(Resource.record()) :: {:ok, tuple, tuple} | {:error, any}
def serialise(record) do
{key_layout, data_layout} =
record.__struct__
|> Info.field_layout()
with {:ok, key} <- serialise_with_layout(record, key_layout),
{:ok, data} <- serialise_with_layout(record, data_layout) do
{:ok, key, data}
end
end
@doc false
@spec deserialise!(Resource.t(), {tuple, tuple}) :: Resource.record() | no_return
def deserialise!(resource, {key, data}) do
case deserialise(resource, key, data) do
{:ok, record} -> record
{:error, reason} -> raise reason
end
end
@doc """
Convert the key and data back into a record..
"""
@spec deserialise(Resource.t(), tuple, tuple) :: {:ok, Resource.record()} | {:error, any}
def deserialise(resource, key, data) do
{key_layout, data_layout} = Info.field_layout(resource)
with {:ok, key_map} <- deserialise_with_layout(resource, key, key_layout),
{:ok, data_map} <- deserialise_with_layout(resource, data, data_layout) do
attrs = Map.merge(key_map, data_map)
record = struct(resource, attrs)
{:ok, %{record | __meta__: %Metadata{state: :loaded, schema: resource}}}
end
end
defp serialise_with_layout(record, layout) do
layout
|> Tuple.to_list()
|> Enum.reduce_while({:ok, {}}, fn attr, {:ok, result} ->
with {:ok, value} <- fetch_record_attribute(record, attr),
{:ok, attr} <- fetch_attribute_definition(record.__struct__, attr),
{:ok, casted} <- Type.dump_to_native(attr.type, value, attr.constraints) do
{:cont, {:ok, Tuple.append(result, casted)}}
else
:error -> {:halt, {:error, "Failed to dump value as type `#{attr.type}`"}}
{:error, reason} -> {:halt, {:error, reason}}
end
end)
end
defp deserialise_with_layout(resource, data, layout) do
layout
|> Tuple.to_list()
|> Enum.zip(Tuple.to_list(data))
|> Enum.reduce_while({:ok, %{}}, fn {attr, value}, {:ok, result} ->
with {:ok, attr} <- fetch_attribute_definition(resource, attr),
{:ok, value} <- Type.cast_stored(attr.type, value, attr.constraints) do
{:cont, {:ok, Map.put(result, attr.name, value)}}
else
:error -> {:halt, {:error, "Failed to load `#{inspect(value)}`."}}
{:error, reason} -> {:halt, {:error, reason}}
end
end)
end
defp fetch_record_attribute(record, attribute_name) do
case Map.fetch(record, attribute_name) do
{:ok, value} ->
{:ok, value}
:error ->
{:error,
"Unable to retreive attribute `#{attribute_name}` from resource `#{inspect(record.__struct__)}`"}
end
end
defp fetch_attribute_definition(resource, attribute_name) do
case Resource.Info.attribute(resource, attribute_name) do
nil ->
{:error, "Attribute `#{attribute_name}` not found on resource `#{inspect(resource)}`"}
attribute ->
{:ok, attribute}
end
end
end

View file

@ -0,0 +1,39 @@
defmodule AshCubDB.CacheLayoutTransformer do
@moduledoc false
alias Ash.Resource.Info
alias Spark.{Dsl, Dsl.Transformer, Error.DslError}
use Transformer
@doc false
@impl true
@spec after?(module) :: boolean
def after?(_), do: true
@doc false
@impl true
@spec transform(Dsl.t()) :: {:ok, Dsl.t()} | {:error, DslError.t()}
def transform(dsl_state) do
key =
dsl_state
|> Info.attributes()
|> Enum.filter(& &1.primary_key?)
|> Enum.map(& &1.name)
|> Enum.sort()
|> Enum.uniq()
|> List.to_tuple()
attributes =
dsl_state
|> Info.attributes()
|> Enum.reject(& &1.primary_key?)
|> Enum.map(& &1.name)
|> Enum.sort()
|> Enum.uniq()
|> List.to_tuple()
layout = {key, attributes}
{:ok, Transformer.persist(dsl_state, :cubdb_field_layout, layout)}
end
end

View file

@ -0,0 +1,68 @@
defmodule AshCubDB.ConfigureDirectoryTransformer do
@moduledoc false
alias Spark.{Dsl, Dsl.Transformer, Error.DslError}
use Transformer
@doc false
@impl true
@spec transform(Dsl.t()) :: {:ok, Dsl.t()} | {:error, DslError.t()}
def transform(dsl_state) do
module = Transformer.get_persisted(dsl_state, :module)
with nil <- Transformer.get_option(dsl_state, [:cubdb], :directory),
nil <- Transformer.get_option(dsl_state, [:cubdb], :otp_app),
nil <- Application.get_application(module) do
message = """
Unable to infer a data storage path for this resource.
You can either set the `cubdb.directory` DSL option directly, or set the `cubdb.otp_app` option
to use the application's priv directory for storage.
"""
{:error, DslError.exception(module: module, path: [:cubdb], message: message)}
else
path when is_binary(path) ->
verify_directory(dsl_state, path)
otp_app when is_atom(otp_app) ->
dsl_state =
dsl_state
|> Transformer.set_option([:cubdb], :otp_app, otp_app)
|> Transformer.set_option([:cubdb], :directory, generate_directory(dsl_state))
{:ok, dsl_state}
end
end
defp generate_directory(dsl_state) do
otp_app = Transformer.get_option(dsl_state, [:cubdb], :otp_app)
short_name =
dsl_state
|> Transformer.get_persisted(:module)
|> Module.split()
|> List.last()
|> Macro.underscore()
otp_app
|> :code.priv_dir()
|> Path.join("cubdb")
|> Path.join(short_name)
end
defp verify_directory(dsl_state, path) do
case Path.type(path) do
:absolute ->
{:ok, dsl_state}
_ ->
{:error,
DslError.exception(
module: Transformer.get_persisted(dsl_state, :module),
path: [:cubdb],
message: "Directory must be an absolute path"
)}
end
end
end

35
mix.exs
View file

@ -13,12 +13,33 @@ defmodule AshCubDB.MixProject do
version: @version,
elixir: "~> 1.15",
start_permanent: Mix.env() == :prod,
consolidate_protocols: Mix.env() != :test,
elixirc_paths: elixirc_paths(Mix.env()),
deps: deps(),
description: @moduledoc,
package: package(),
source_url: "https://code.harton.nz/james/ash_cubdb",
homepage_url: "https://code.harton.nz/james/ash_cubdb",
aliases: aliases()
aliases: aliases(),
dialyzer: [plt_add_apps: [:faker, :smokestack]],
docs: [
main: "AshCubDB",
extra_section: "Guides",
formatters: ["html"],
filter_modules: ~r/^Elixir.AshCubDB/,
source_url_pattern:
"https://code.harton.nz/james/ash_cub_db/src/branch/main/%{path}#L%{line}",
spark: [
extensions: [
%{
module: AshCubDB.DataLayer,
name: "AshCubDB.DataLayer",
target: "Ash.Resource",
type: "Ash.DataLayer"
}
]
]
]
]
end
@ -45,21 +66,29 @@ defmodule AshCubDB.MixProject do
opts = [only: ~w[dev test]a, runtime: false]
[
{:ash, "~> 2.13"},
{:cubdb, "~> 2.0"},
{:spark, "~> 1.1 and >= 1.1.39"},
{:credo, "~> 1.7", opts},
{:dialyxir, "~> 1.3", opts},
{:doctor, "~> 0.21", opts},
{:earmark, ">= 0.0.0", opts},
{:ex_check, "~> 0.15", opts},
{:ex_doc, ">= 0.0.0", opts},
{:faker, "~> 0.17", opts},
{:git_ops, "~> 2.6", opts},
{:mix_audit, "~> 2.1", opts}
{:mix_audit, "~> 2.1", opts},
{:smokestack, "~> 0.3", opts}
]
end
defp aliases do
[
"spark.formatter": "spark.formatter --extensions=AshCubDB.DataLayer"
"spark.formatter": "spark.formatter --extensions=AshCubDB.DataLayer",
"spark.cheat_sheets": "spark.cheat_sheets --extensions=AshCubDB.DataLayer"
]
end
defp elixirc_paths(env) when env in ~w[dev test]a, do: ~w[lib test/support]
defp elixirc_paths(_), do: ~w[lib]
end

View file

@ -1,15 +1,21 @@
%{
"ash": {:hex, :ash, "2.14.16", "22b82dbc7ae9cb1abadec28e599059904bbecd09445ba956c8e1d2229d0c0104", [:mix], [{:comparable, "~> 1.0", [hex: :comparable, repo: "hexpm", optional: false]}, {:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:earmark, "~> 1.4", [hex: :earmark, repo: "hexpm", optional: true]}, {:ecto, "~> 3.7", [hex: :ecto, repo: "hexpm", optional: false]}, {:ets, "~> 0.8.0", [hex: :ets, repo: "hexpm", optional: false]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: false]}, {:picosat_elixir, "~> 0.2", [hex: :picosat_elixir, repo: "hexpm", optional: false]}, {:plug, ">= 0.0.0", [hex: :plug, repo: "hexpm", optional: true]}, {:spark, ">= 1.1.20 and < 2.0.0-0", [hex: :spark, repo: "hexpm", optional: false]}, {:stream_data, "~> 0.5.0", [hex: :stream_data, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.1", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "0d047e50bdaec3f0a182c5449acae6bfaf7587a0156eaa665bb9d897aca6708a"},
"bunt": {:hex, :bunt, "0.2.1", "e2d4792f7bc0ced7583ab54922808919518d0e57ee162901a16a1b6664ef3b14", [:mix], [], "hexpm", "a330bfb4245239787b15005e66ae6845c9cd524a288f0d141c148b02603777a5"},
"comparable": {:hex, :comparable, "1.0.0", "bb669e91cedd14ae9937053e5bcbc3c52bb2f22422611f43b6e38367d94a495f", [:mix], [{:typable, "~> 0.1", [hex: :typable, repo: "hexpm", optional: false]}], "hexpm", "277c11eeb1cd726e7cd41c6c199e7e52fa16ee6830b45ad4cdc62e51f62eb60c"},
"credo": {:hex, :credo, "1.7.0", "6119bee47272e85995598ee04f2ebbed3e947678dee048d10b5feca139435f75", [:mix], [{:bunt, "~> 0.2.1", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "6839fcf63d1f0d1c0f450abc8564a57c43d644077ab96f2934563e68b8a769d7"},
"cubdb": {:hex, :cubdb, "2.0.2", "d4253885084dae37a8ff73887d232864eb38ecac962aa08543e686b0183a1d62", [:mix], [], "hexpm", "c99cc8f9e6c4deb98d16cca5ded1928edd22e48b4736b76e8a1a85367d7fe921"},
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
"dialyxir": {:hex, :dialyxir, "1.3.0", "fd1672f0922b7648ff9ce7b1b26fcf0ef56dda964a459892ad15f6b4410b5284", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "00b2a4bcd6aa8db9dcb0b38c1225b7277dca9bc370b6438715667071a304696f"},
"dialyxir": {:hex, :dialyxir, "1.4.1", "a22ed1e7bd3a3e3f197b68d806ef66acb61ee8f57b3ac85fc5d57354c5482a93", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "84b795d6d7796297cca5a3118444b80c7d94f7ce247d49886e7c291e1ae49801"},
"doctor": {:hex, :doctor, "0.21.0", "20ef89355c67778e206225fe74913e96141c4d001cb04efdeba1a2a9704f1ab5", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "a227831daa79784eb24cdeedfa403c46a4cb7d0eab0e31232ec654314447e4e0"},
"earmark": {:hex, :earmark, "1.4.39", "acdb2f02c536471029dbcc509fbd6b94b89f40ad7729fb3f68f4b6944843f01d", [:mix], [{:earmark_parser, "~> 1.4.33", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "156c9d8ec3cbeccdbf26216d8247bdeeacc8c76b4d9eee7554be2f1b623ea440"},
"earmark_parser": {:hex, :earmark_parser, "1.4.33", "3c3fd9673bb5dcc9edc28dd90f50c87ce506d1f71b70e3de69aa8154bc695d44", [:mix], [], "hexpm", "2d526833729b59b9fdb85785078697c72ac5e5066350663e5be6a1182da61b8f"},
"earmark": {:hex, :earmark, "1.4.40", "ff1a0f8bf3b298113c2a257c4e7a8b29ba9db5d35f5ee6d29291cb8caa09a071", [:mix], [{:earmark_parser, "~> 1.4.35", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "5fb622d5e36046bc313a426211e8bf769ba50db7720744859a21932c6470d75c"},
"earmark_parser": {:hex, :earmark_parser, "1.4.35", "437773ca9384edf69830e26e9e7b2e0d22d2596c4a6b17094a3b29f01ea65bb8", [:mix], [], "hexpm", "8652ba3cb85608d0d7aa2d21b45c6fad4ddc9a1f9a1f1b30ca3a246f0acc33f6"},
"ecto": {:hex, :ecto, "3.10.3", "eb2ae2eecd210b4eb8bece1217b297ad4ff824b4384c0e3fdd28aaf96edd6135", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "44bec74e2364d491d70f7e42cd0d690922659d329f6465e89feb8a34e8cd3433"},
"elixir_make": {:hex, :elixir_make, "0.7.7", "7128c60c2476019ed978210c245badf08b03dbec4f24d05790ef791da11aa17c", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}], "hexpm", "5bc19fff950fad52bbe5f211b12db9ec82c6b34a9647da0c2224b8b8464c7e6c"},
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
"ets": {:hex, :ets, "0.8.1", "8ff9bcda5682b98493f8878fc9dbd990e48d566cba8cce59f7c2a78130da29ea", [:mix], [], "hexpm", "6be41b50adb5bc5c43626f25ea2d0af1f4a242fb3fad8d53f0c67c20b78915cc"},
"ex_check": {:hex, :ex_check, "0.15.0", "074b94c02de11c37bba1ca82ae5cc4926e6ccee862e57a485b6ba60fca2d8dc1", [:mix], [], "hexpm", "33848031a0c7e4209c3b4369ce154019788b5219956220c35ca5474299fb6a0e"},
"ex_doc": {:hex, :ex_doc, "0.30.4", "e8395c8e3c007321abb30a334f9f7c0858d80949af298302daf77553468c0c39", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "9a19f0c50ffaa02435668f5242f2b2a61d46b541ebf326884505dfd3dd7af5e4"},
"ex_doc": {:hex, :ex_doc, "0.30.6", "5f8b54854b240a2b55c9734c4b1d0dd7bdd41f71a095d42a70445c03cf05a281", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "bd48f2ddacf4e482c727f9293d9498e0881597eae6ddc3d9562bd7923375109f"},
"faker": {:hex, :faker, "0.17.0", "671019d0652f63aefd8723b72167ecdb284baf7d47ad3a82a15e9b8a6df5d1fa", [:mix], [], "hexpm", "a7d4ad84a93fd25c5f5303510753789fc2433ff241bf3b4144d3f6f291658a6a"},
"file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
"git_cli": {:hex, :git_cli, "0.3.0", "a5422f9b95c99483385b976f5d43f7e8233283a47cda13533d7c16131cb14df5", [:mix], [], "hexpm", "78cb952f4c86a41f4d3511f1d3ecb28edb268e3a7df278de2faa1bd4672eaf9b"},
"git_ops": {:hex, :git_ops, "2.6.0", "e0791ee1cf5db03f2c61b7ebd70e2e95cba2bb9b9793011f26609f22c0900087", [:mix], [{:git_cli, "~> 0.2", [hex: :git_cli, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "b98fca849b18aaf490f4ac7d1dd8c6c469b0cc3e6632562d366cab095e666ffe"},
@ -18,7 +24,16 @@
"makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"},
"makeup_erlang": {:hex, :makeup_erlang, "0.1.2", "ad87296a092a46e03b7e9b0be7631ddcf64c790fa68a9ef5323b6cbb36affc72", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f3f5a1ca93ce6e092d92b6d9c049bcda58a3b617a8d888f8e7231c85630e8108"},
"mix_audit": {:hex, :mix_audit, "2.1.1", "653aa6d8f291fc4b017aa82bdb79a4017903902ebba57960ef199cbbc8c008a1", [:make, :mix], [{:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:yaml_elixir, "~> 2.9", [hex: :yaml_elixir, repo: "hexpm", optional: false]}], "hexpm", "541990c3ab3a7bb8c4aaa2ce2732a4ae160ad6237e5dcd5ad1564f4f85354db1"},
"nimble_options": {:hex, :nimble_options, "1.0.2", "92098a74df0072ff37d0c12ace58574d26880e522c22801437151a159392270e", [:mix], [], "hexpm", "fd12a8db2021036ce12a309f26f564ec367373265b53e25403f0ee697380f1b8"},
"nimble_parsec": {:hex, :nimble_parsec, "1.3.1", "2c54013ecf170e249e9291ed0a62e5832f70a476c61da16f6aac6dca0189f2af", [:mix], [], "hexpm", "2682e3c0b2eb58d90c6375fc0cc30bc7be06f365bf72608804fb9cffa5e1b167"},
"picosat_elixir": {:hex, :picosat_elixir, "0.2.3", "bf326d0f179fbb3b706bb2c15fbc367dacfa2517157d090fdfc32edae004c597", [:make, :mix], [{:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "f76c9db2dec9d2561ffaa9be35f65403d53e984e8cd99c832383b7ab78c16c66"},
"recase": {:hex, :recase, "0.7.0", "3f2f719f0886c7a3b7fe469058ec539cb7bbe0023604ae3bce920e186305e5ae", [:mix], [], "hexpm", "36f5756a9f552f4a94b54a695870e32f4e72d5fad9c25e61bc4a3151c08a4e0c"},
"smokestack": {:hex, :smokestack, "0.3.1", "f4dbfe28876638c0f185abcd4d4cee299dc091d5c2239b667c65fb785a7d5fb5", [:mix], [{:ash, "~> 2.13", [hex: :ash, repo: "hexpm", optional: false]}, {:recase, "~> 0.7", [hex: :recase, repo: "hexpm", optional: false]}, {:spark, "~> 1.1", [hex: :spark, repo: "hexpm", optional: false]}], "hexpm", "deed6f645859426342723b65169f4794f8c61093423e7b7e014c7de685284f43"},
"sourceror": {:hex, :sourceror, "0.14.0", "b6b8552d0240400d66b6f107c1bab7ac1726e998efc797f178b7b517e928e314", [:mix], [], "hexpm", "809c71270ad48092d40bbe251a133e49ae229433ce103f762a2373b7a10a8d8b"},
"spark": {:hex, :spark, "1.1.39", "f143b84a5b796bf2d83ec8fb4793ee9e66e67510c40d785f9a67050bb88e7677", [:mix], [{:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.5 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:sourceror, "~> 0.1", [hex: :sourceror, repo: "hexpm", optional: false]}], "hexpm", "d71bc26014c7e7abcdcf553f4cf7c5a5ff96f8365b1e20be3768ce503aafb203"},
"stream_data": {:hex, :stream_data, "0.5.0", "b27641e58941685c75b353577dc602c9d2c12292dd84babf506c2033cd97893e", [:mix], [], "hexpm", "012bd2eec069ada4db3411f9115ccafa38540a3c78c4c0349f151fc761b9e271"},
"telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"},
"typable": {:hex, :typable, "0.3.0", "0431e121d124cd26f312123e313d2689b9a5322b15add65d424c07779eaa3ca1", [:mix], [], "hexpm", "880a0797752da1a4c508ac48f94711e04c86156f498065a83d160eef945858f8"},
"yamerl": {:hex, :yamerl, "0.10.0", "4ff81fee2f1f6a46f1700c0d880b24d193ddb74bd14ef42cb0bcf46e81ef2f8e", [:rebar3], [], "hexpm", "346adb2963f1051dc837a2364e4acf6eb7d80097c0f53cbdc3046ec8ec4b4e6e"},
"yaml_elixir": {:hex, :yaml_elixir, "2.9.0", "9a256da867b37b8d2c1ffd5d9de373a4fda77a32a45b452f1708508ba7bbcb53", [:mix], [{:yamerl, "~> 0.10", [hex: :yamerl, repo: "hexpm", optional: false]}], "hexpm", "0cb0e7d4c56f5e99a6253ed1a670ed0e39c13fc45a6da054033928607ac08dfc"},
}

View file

@ -0,0 +1,141 @@
defmodule AshCubDB.DataLayerTest do
use ExUnit.Case, async: true
alias Ash.Query
alias AshCubDB.Info
alias Support.{Api, Author, Post}
import Support.Factory
require Query
setup do
on_exit(fn ->
AshCubDB.clear(Post)
AshCubDB.clear(Author)
end)
end
describe "transformer" do
test "it correctly infers the data directory" do
assert {:ok, path} = Info.cubdb_directory(Post)
assert path =~ ~r/ash_cubdb\/priv\/cubdb\/post$/
end
end
describe "create" do
test "it creates a record" do
params = params!(Post)
assert {:ok, post} = Post.create(params)
assert [{key, value}] = dump(Post)
assert key == {Ecto.UUID.dump!(post.id)}
assert value == {nil, post.body, post.title}
end
test "it honours context multitenancy" do
insert!(Author, count: 3)
assert {:ok, author} =
Author
|> params!()
|> Author.create(tenant: :tenant)
keys =
dump(Author)
|> Enum.map(&elem(&1, 0))
assert {:tenant, {Ecto.UUID.dump!(author.id)}} in keys
assert Enum.count(keys, &(elem(&1, 0) == nil)) == 3
end
test "it doesn't allow IDs to conflict" do
uuid = Ash.UUID.generate()
params =
params!(Post)
|> Map.put(:id, uuid)
assert {:ok, %{id: ^uuid}} = Post.create(params)
assert {:error, invalid} = Post.create(params)
assert Exception.message(invalid) =~ "id: has already been taken"
end
end
describe "upsert" do
test "it creates a record" do
params = params!(Post)
assert {:ok, post} = Post.create(params, upsert?: true)
assert [{key, value}] = dump(Post)
assert key == {Ecto.UUID.dump!(post.id)}
assert value == {nil, post.body, post.title}
end
test "it updates an existing record" do
params = params!(Post)
assert {:ok, post} = Post.create(params)
params =
params
|> Map.put(:title, Faker.Lorem.sentence())
|> Map.put(:id, post.id)
assert {:ok, updated} = Post.create(params, upsert?: true)
assert updated.id == post.id
assert updated.title == params[:title]
assert updated.title != post.title
end
end
describe "read" do
test "non-tenant scoped read" do
expected = insert!(Post, count: 3)
assert {:ok, actual} = Post.read()
assert Enum.all?(actual, &is_struct(&1, Post))
for post <- expected do
assert post.id in Enum.map(actual, & &1.id)
end
end
test "tenant scoped read" do
insert!(Author, count: 3)
expected =
Author
|> params!(count: 3)
|> Enum.map(&Post.create!(&1, tenant: :wat))
assert {:ok, actual} = Post.read(tenant: :wat)
expected_ids = expected |> Enum.map(& &1.id) |> Enum.sort()
actual_ids = actual |> Enum.map(& &1.id) |> Enum.sort()
assert expected_ids == actual_ids
end
test "filters work" do
expected = insert!(Author, attrs: %{name: "Marty McFly"})
insert!(Author, count: 3)
[actual] =
Author
|> Query.filter(name: "Marty McFly")
|> Api.read!()
assert expected.id == actual.id
end
end
defp dump(resource) do
resource
|> via()
|> CubDB.select()
|> Enum.reject(&(elem(&1, 0) == :__metadata__))
|> Enum.to_list()
end
defp via(resource), do: {:via, Registry, {AshCubDB.Registry, resource}}
end

View file

@ -1,8 +1,4 @@
defmodule AshCubDBTest do
use ExUnit.Case
doctest AshCubDB
test "greets the world" do
assert AshCubDB.hello() == :world
end
end

9
test/support/api.ex Normal file
View file

@ -0,0 +1,9 @@
defmodule Support.Api do
@moduledoc false
use Ash.Api
resources do
resource(Support.Author)
resource(Support.Post)
end
end

34
test/support/author.ex Normal file
View file

@ -0,0 +1,34 @@
defmodule Support.Author do
@moduledoc false
use Ash.Resource, data_layer: AshCubDB.DataLayer
cubdb do
otp_app :ash_cubdb
end
multitenancy do
strategy(:context)
global?(true)
end
attributes do
uuid_primary_key(:id)
attribute(:name, :ci_string)
end
relationships do
has_many(:posts, Support.Post)
end
actions do
defaults(~w[create read]a)
end
code_interface do
define_for(Support.Api)
define(:create)
define(:read)
end
end

13
test/support/factory.ex Normal file
View file

@ -0,0 +1,13 @@
defmodule Support.Factory do
@moduledoc false
use Smokestack
factory Support.Post do
attribute(:title, &Faker.Lorem.sentence/0)
attribute(:body, &Faker.Lorem.paragraph/0)
end
factory Support.Author do
attribute(:name, &Faker.Person.name/0)
end
end

33
test/support/post.ex Normal file
View file

@ -0,0 +1,33 @@
defmodule Support.Post do
@moduledoc false
use Ash.Resource, data_layer: AshCubDB.DataLayer
cubdb do
otp_app :ash_cubdb
end
attributes do
uuid_primary_key(:id) do
writable?(true)
end
attribute(:title, :string)
attribute(:body, :string)
end
actions do
# defaults ~w[create read update destroy]a
defaults(~w[create read]a)
end
relationships do
belongs_to(:author, Support.Author)
end
code_interface do
define_for(Support.Api)
define(:create)
define(:read)
end
end