Compare commits

...

119 commits
v0.1.0 ... main

Author SHA1 Message Date
Renovate Bot 6e9d4b00e1 chore(deps): update dependency spark to v2.2.6
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-29 11:40:00 +12:00
Renovate Bot 5532eb56b6 chore(deps): update dependency spark to v2.2.5
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-26 07:30:43 +12:00
Renovate Bot 811846e703 chore(deps): update dependency ash to v3.0.16
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-22 07:11:28 +12:00
Renovate Bot cc47f70b86 chore(deps): update dependency spark to v2.2.4
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-21 06:11:26 +12:00
Renovate Bot cdbc0f6064 chore(deps): update dependency ex_doc to v0.34.1
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-21 02:11:24 +12:00
Renovate Bot 443a8a28d1 chore(deps): update dependency ash to v3.0.15
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-19 11:11:40 +12:00
Renovate Bot cb31be5603 chore(deps): update dependency elixir to v1.17.1
Some checks failed
continuous-integration/drone/pr Build is failing
continuous-integration/drone/push Build is passing
2024-06-19 00:12:25 +12:00
Renovate Bot b34088d4c7 chore(deps): update dependency spark to v2.2.3
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-18 10:11:33 +12:00
Renovate Bot 3c764a608b chore(deps): update dependency spark to v2.2.2
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-18 06:11:23 +12:00
Renovate Bot 0b20baddd2 chore(deps): update dependency ash to v3.0.13
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-18 04:11:25 +12:00
Renovate Bot 8a446f1199 chore(deps): update dependency elixir to v1.17.0
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-15 13:31:25 +12:00
Renovate Bot 073b639574 chore(deps): update dependency ash to v3.0.12
All checks were successful
continuous-integration/drone/push Build is passing
2024-06-15 12:50:13 +12:00
Renovate Bot 4dcea0221c chore(deps): update dependency credo to v1.7.7
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-15 12:38:27 +12:00
Renovate Bot 438844030b chore(deps): update dependency ash to v3.0.11
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-12 03:11:33 +12:00
Renovate Bot 7b26187a28 chore(deps): update dependency spark to v2.1.24
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-11 03:11:23 +12:00
Renovate Bot 535d73c75d chore(deps): update dependency spark to v2.1.23
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-08 02:11:30 +12:00
Renovate Bot 09e0d5d80f chore(deps): update dependency ash to v3.0.10
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-07 06:21:41 +12:00
Renovate Bot d257bf948d chore(deps): update dependency ash to v3.0.9
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-06-01 03:11:24 +12:00
Renovate Bot 867c38062c chore(deps): update dependency smokestack to v0.9.1
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-31 09:11:23 +12:00
Renovate Bot fc1f834e0e chore(deps): update dependency ex_doc to v0.34.0
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-31 01:11:19 +12:00
Renovate Bot 7f7722d9f3 chore(deps): update dependency smokestack to ~> 0.9.0
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-29 10:11:31 +12:00
Renovate Bot ab8c035786 chore(deps): update dependency ash to v3.0.8
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-29 07:11:28 +12:00
Renovate Bot 8e83412e7a chore(deps): update dependency smokestack to v0.8.1
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-28 17:11:30 +12:00
Renovate Bot bfc16b6aab chore(deps): update dependency smokestack to ~> 0.8.0
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-26 21:11:21 +12:00
Renovate Bot 18f32eac02 chore(deps): update dependency ash to v3.0.7
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-24 18:11:32 +12:00
Renovate Bot a06a887ac5 chore(deps): update dependency ash to v3.0.6
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-24 11:11:31 +12:00
Renovate Bot fcca8ce2f9 chore(deps): update dependency ash to v3.0.5
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-24 03:11:24 +12:00
Renovate Bot fbbb52708e chore(deps): update dependency ash to v3.0.4
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-23 11:11:31 +12:00
Renovate Bot d30cd704be chore(deps): update dependency ash to v3.0.3
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-23 10:11:31 +12:00
Renovate Bot 6ad22b4f17 chore(deps): update dependency ex_doc to v0.33.0
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-22 01:11:17 +12:00
Renovate Bot b4c57add31 chore(deps): update dependency elixir to v1.16.3
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-21 11:11:21 +12:00
Renovate Bot e94be7ebff chore(deps): update dependency erlang to v27
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-20 21:32:34 +12:00
Renovate Bot eaadc6ebc0 chore(deps): update dependency smokestack to ~> 0.7.0
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-20 17:11:28 +12:00
Renovate Bot a07b292295 chore(deps): update dependency ash to v3.0.2
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-16 10:11:24 +12:00
Renovate Bot 4c474f2eb7 chore(deps): update dependency spark to v2.1.22
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-15 04:11:19 +12:00
Renovate Bot ebcfc4a2c9 chore(deps): update dependency ash to v3.0.1
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-15 02:28:19 +12:00
James Harton 6cc4acc9dc
chore: release version v0.6.2
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2024-05-11 15:32:18 +12:00
James Harton 45fa64e8bf
chore: update to Ash 3.0 stable. 2024-05-11 15:31:52 +12:00
Renovate Bot d520ffe136 chore(deps): update dependency spark to v2.1.21
All checks were successful
continuous-integration/drone/push Build is passing
2024-05-11 10:39:21 +12:00
Renovate Bot 95295d13aa chore(deps): update dependency git_ops to v2.6.1
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-11 10:29:16 +12:00
Renovate Bot 7e3cb74a95 chore(deps): update dependency ex_doc to v0.32.2
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-10 23:29:31 +12:00
Renovate Bot b9b5feef24 chore(deps): update dependency credo to v1.7.6
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-09 23:48:58 +12:00
Renovate Bot 9efbceeaf7 chore(deps): update dependency erlang to v26.2.5
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-05-03 03:28:44 +12:00
Renovate Bot 225b596527 chore(deps): update dependency spark to v2.1.20
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-04-22 05:25:53 +12:00
Renovate Bot daea09231d chore(deps): update dependency spark to v2.1.19
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-04-22 03:25:58 +12:00
Renovate Bot ec76d46577 chore(deps): update dependency ex_doc to v0.32.1
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-04-13 08:26:08 +12:00
Renovate Bot 8395b38fb4 chore(deps): update dependency spark to v2.1.18
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-04-13 06:26:09 +12:00
Renovate Bot d72b656997 chore(deps): update dependency erlang to v26.2.4
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-04-13 00:27:06 +12:00
Renovate Bot acaa660a3a chore(deps): update dependency spark to v2.1.17
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-04-12 05:26:02 +12:00
Renovate Bot 8db3da995b chore(deps): update dependency spark to v2.1.16
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-04-11 05:26:06 +12:00
Renovate Bot 33aa3e9206 chore(deps): update dependency spark to v2.1.15
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-04-11 04:26:01 +12:00
Renovate Bot 7c356c3b7b chore(deps): update dependency spark to v2.1.14
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-04-11 02:26:02 +12:00
Renovate Bot dfb453edd5 chore(deps): update dependency ex_doc to v0.32.0
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-04-10 21:26:19 +12:00
Renovate Bot 973cc7bdbb chore(deps): update dependency spark to v2.1.13
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-04-06 12:26:07 +13:00
James Harton 7db7b359e7
chore: release version v0.6.1-rc.2
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-04-03 16:34:26 +13:00
James Harton 4565bfdbf1
improvement: Relax RC version dependencies. 2024-04-03 16:33:53 +13:00
Renovate Bot be8e2d87db chore(deps): update dependency ash to == 3.0.0-rc.8
Some checks failed
renovate/artifacts Artifact file update failure
continuous-integration/drone/pr Build is failing
continuous-integration/drone/push Build is failing
2024-04-03 16:20:56 +13:00
James Harton 08197e5cde
chore: release version v0.6.1-rc.1
All checks were successful
continuous-integration/drone/push Build is passing
2024-04-02 10:40:36 +13:00
James Harton 6355e8da54
fix: deprecation warnings in tests. 2024-04-02 10:40:06 +13:00
James Harton b370512b62
chore(deps): Update ash and smokestack rcs. 2024-04-02 10:39:49 +13:00
James Harton 8115760a49
chore: disable auto-releasing for now. 2024-04-02 10:37:04 +13:00
James Harton 15390ef171
chore: release version v0.6.1-rc.0
All checks were successful
continuous-integration/drone/push Build is passing
2024-03-30 18:05:57 +13:00
James Harton c2ebe18e3c
chore: Update ash and smokestack RC's. 2024-03-30 18:04:54 +13:00
Renovate Bot 69f3ce0608 chore(deps): update dependency spark to v2.1.11
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-30 10:24:16 +13:00
Renovate Bot cb6038223b chore(deps): update dependency spark to v2.1.10
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-30 07:51:09 +13:00
Renovate Bot c32fa9aab2 chore(deps): update dependency spark to v2.1.9
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-29 11:35:29 +13:00
Renovate Bot 6481aec9db chore(deps): update dependency spark to v2.1.8
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-28 11:09:10 +13:00
James Harton 98c3883cb1 chore: release version v0.6.0
All checks were successful
continuous-integration/drone/push Build is passing
2024-03-27 21:37:33 +00:00
James Harton 3aa592c262
chore: release version v0.5.0-rc.0
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2024-03-28 10:36:14 +13:00
James Harton 4379a15443
feat!: Ash 3.0 support. 2024-03-28 10:27:46 +13:00
Renovate Bot 258c7df0a1 chore(deps): update dependency ash to v2.21.1
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-25 17:09:19 +13:00
Renovate Bot 333403a7bd chore(deps): update dependency ash to v2.21.0
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-25 13:09:16 +13:00
Renovate Bot ac7de967e1 chore(deps): update dependency smokestack to v0.4.2
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-22 19:21:38 +13:00
Renovate Bot 05aa2e673a chore(deps): update dependency ash to v2.20.3
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-21 09:19:23 +13:00
Renovate Bot f7df16cae5 chore(deps): update dependency ash to v2.20.2
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-19 17:19:31 +13:00
Renovate Bot ab7ebc5f64 chore(deps): update dependency mix_audit to v2.1.3
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-19 14:19:32 +13:00
James Harton 95bbffb767
chore: fix docs release.
All checks were successful
continuous-integration/drone/push Build is passing
2024-03-14 19:21:32 +13:00
Renovate Bot 68fa5d87c5 chore(deps): update dependency mix_audit to v2.1.2
All checks were successful
continuous-integration/drone/push Build is passing
2024-03-14 14:43:43 +13:00
Renovate Bot 313f0908c5 chore(deps): update dependency ash to v2.20.1
All checks were successful
continuous-integration/drone/push Build is passing
2024-03-14 14:36:03 +13:00
Renovate Bot 580d89002c chore(deps): update dependency dialyxir to v1.4.3
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-14 00:38:22 +00:00
Renovate Bot 12b4e77b99 chore(deps): update dependency ex_doc to v0.31.2
All checks were successful
continuous-integration/drone/push Build is passing
2024-03-14 10:22:22 +13:00
Renovate Bot fb8dcd9fe8 chore(deps): update dependency credo to v1.7.5
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-13 20:55:28 +00:00
Renovate Bot 0455f5c4d9 chore(deps): update dependency elixir to v1.16.2
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-11 00:29:12 +13:00
James Harton d6dcf72ce8
chore: fix typo in readme
All checks were successful
continuous-integration/drone/push Build is passing
2024-03-08 14:44:09 +13:00
Renovate Bot 1922576b2b chore(deps): update dependency erlang to v26.2.3
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-07 22:21:38 +13:00
James Harton 3a0729bb74
chore: Update docs and mix links.
All checks were successful
continuous-integration/drone/push Build is passing
2024-03-07 19:25:52 +13:00
Renovate Bot 1a5a0ae77a chore(deps): update dependency ex_check to ~> 0.16
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-03-01 23:18:10 +13:00
Renovate Bot cd385ab538 chore(deps): update dependency faker to ~> 0.18
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-02-29 19:28:40 +13:00
Renovate Bot 1acc11e221 chore(deps): update dependency erlang to v26.2.2
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-02-09 10:35:33 +13:00
James Harton 5a7a78f837 chore: fix spark DSL documentation.
All checks were successful
continuous-integration/drone/push Build is passing
2024-02-05 16:00:30 +13:00
James Harton a502a31983 chore: Update forgejo hostname. 2024-02-05 16:00:21 +13:00
Renovate Bot 9d9aa0e5f9 chore(deps): update dependency elixir to v1.16.1
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2024-01-31 23:16:57 +13:00
James Harton 6a7344edb6
docs: Update documentation and documentation configuration.
All checks were successful
continuous-integration/drone/push Build is passing
Thanks Zach!
2024-01-15 10:19:59 +13:00
Renovate Bot 8c26354241 chore(deps): update dependency elixir to v1.16.0
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/pr Build is passing
2023-12-23 06:27:50 +13:00
Renovate Bot 63bcf500e2 chore(deps): update dependency erlang to v26.2.1
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2023-12-19 05:06:12 +13:00
Renovate Bot 262def5267 chore(deps): update dependency erlang to v26.2
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2023-12-13 21:06:49 +13:00
Renovate Bot 466b9d4d3b chore(deps): update dependency elixir to v1.15.7
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2023-10-14 23:21:46 +13:00
Renovate Bot c02c99a3e9 chore(deps): update dependency erlang to v26.1.2
Some checks reported errors
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build encountered an error
2023-10-12 21:21:44 +13:00
James Harton 8b2f2ade4c chore: release version v0.4.1
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-02 03:19:02 +00:00
James Harton b53fca0be3
docs: update README.md.
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-02 16:15:38 +13:00
James Harton 34dac0337a
fix: correctly enable filtering and sorting. 2023-10-02 16:15:06 +13:00
James Harton 568620c042 chore: release version v0.4.0
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-02 02:43:40 +00:00
James Harton 78587fced6 feat: Support destroying records.
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-02 15:42:13 +13:00
James Harton aa3103739d fix: honour tenancy when updating. 2023-10-02 15:42:13 +13:00
James Harton 74b2291fe2 chore: release version v0.3.1
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-02 02:29:20 +00:00
James Harton 176b6bf0aa
fix: enable sorting capability.
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-02 15:27:02 +13:00
James Harton 4b27f1523c chore: release version v0.3.0
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-02 02:23:00 +00:00
James Harton 6b1954baf6
feat: Support updating existing records.
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2023-10-02 15:15:11 +13:00
James Harton 98d722f20d
docs: add current status to README.md. 2023-09-30 19:29:38 +13:00
James Harton 86c573d37e
docs: add DSL cheat sheets to the sidebar. 2023-09-30 10:30:54 +13:00
James Harton 83ff64f256
docs: regenerate Spark DSL cheatsheets.
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2023-09-30 10:11:17 +13:00
Renovate Bot e4149b0bbd
chore(deps): update dependency smokestack to ~> 0.4 2023-09-30 10:11:16 +13:00
James Harton e9294ed7fa
chore: tweak .drone.yml.
All checks were successful
continuous-integration/drone/push Build is passing
2023-09-30 10:09:19 +13:00
James Harton 052dc87ef2 chore: release version v0.2.0
All checks were successful
continuous-integration/drone/push Build is passing
2023-09-29 07:31:46 +00:00
James Harton 74d878b70e feat: create and read works.
All checks were successful
continuous-integration/drone/push Build is passing
2023-09-29 20:30:42 +13:00
Renovate Bot cfb8ebb73d chore(deps): update dependency elixir to v1.15.6
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2023-09-28 23:21:48 +13:00
Renovate Bot 06279da6c1 chore(deps): update dependency erlang to v26.1.1
Some checks failed
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is failing
2023-09-28 22:21:46 +13:00
Renovate Bot 671bfdd9c3 chore(deps): update dependency elixir to v1.15.5
Some checks failed
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is failing
2023-08-29 00:19:57 +12:00
Renovate Bot 6a784dfd39 chore(deps): add renovate.json
Some checks failed
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is failing
2023-08-07 16:08:51 +12:00
29 changed files with 1781 additions and 335 deletions

34
.check.exs Normal file
View file

@ -0,0 +1,34 @@
[
## don't run tools concurrently
# parallel: false,
## don't print info about skipped tools
# skipped: false,
## always run tools in fix mode (put it in ~/.check.exs locally, not in project config)
# fix: true,
## don't retry automatically even if last run resulted in failures
# retry: false,
## list of tools (see `mix check` docs for a list of default curated tools)
tools: [
## curated tools may be disabled (e.g. the check for compilation warnings)
# {:compiler, false},
## ...or have command & args adjusted (e.g. enable skip comments for sobelow)
{:sobelow, false},
## ...or reordered (e.g. to see output from dialyzer before others)
# {:dialyzer, order: -1},
## ...or reconfigured (e.g. disable parallel execution of ex_unit in umbrella)
# {:ex_unit, umbrella: [parallel: false]},
## custom new tools may be added (Mix tasks or arbitrary commands)
# {:my_task, "mix my_task", env: %{"MIX_ENV" => "prod"}},
# {:my_tool, ["my_tool", "arg with spaces"]}
{:spark_formatter, "mix spark.formatter --check"},
{:spark_cheat_sheets, "mix spark.cheat_sheets --check"}
]
]

17
.doctor.exs Normal file
View file

@ -0,0 +1,17 @@
%Doctor.Config{
ignore_modules: [
~r/^Inspect\./,
~r/^Support\./
],
ignore_paths: [],
min_module_doc_coverage: 40,
min_module_spec_coverage: 0,
min_overall_doc_coverage: 50,
min_overall_spec_coverage: 0,
min_overall_moduledoc_coverage: 100,
exception_moduledoc_required: true,
raise: false,
reporter: Doctor.Reporters.Full,
struct_type_spec_required: true,
umbrella: false
}

View file

@ -26,7 +26,6 @@ steps:
- name: restore build cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
@ -50,7 +49,7 @@ steps:
- .rebar3
- name: install dependencies
image: code.harton.nz/james/asdf_container:latest
image: harton.dev/james/asdf_container:latest
pull: "always"
environment:
MIX_ENV: test
@ -65,15 +64,14 @@ steps:
commands:
- asdf_install
- rm -rf .asdf/downloads
- . $ASDF_DIR/asdf.sh
- mix local.hex --if-missing --force
- mix local.rebar --if-missing --force
- mix deps.get
- mix deps.compile
- asdf mix local.hex --if-missing --force
- asdf mix local.rebar --if-missing --force
- asdf mix deps.get
- asdf mix deps.compile
- asdf mix dialyzer --plt
- name: store ASDF cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
@ -97,7 +95,6 @@ steps:
- name: store build cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
@ -123,65 +120,8 @@ steps:
- .mix
- .rebar3
---
kind: pipeline
type: docker
name: test
depends_on:
- build
steps:
- name: restore ASDF cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'asdf-{{ os }}-{{ arch }}-{{ checksum ".tool-versions" }}'
mount:
- .asdf
- name: restore build cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'elixir-{{ checksum "mix.lock" }}-{{ checksum ".tool-versions" }}'
mount:
- deps
- _build
- .hex
- .mix
- .rebar3
- name: mix compile
image: code.harton.nz/james/asdf_container:latest
pull: "always"
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -189,14 +129,12 @@ steps:
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- restore ASDF cache
- restore build cache
- install dependencies
commands:
- asdf mix compile --warnings-as-errors
- name: mix test
image: code.harton.nz/james/asdf_container:latest
pull: "always"
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -209,8 +147,7 @@ steps:
- asdf mix test
- name: mix credo
image: code.harton.nz/james/asdf_container:latest
pull: "always"
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -223,8 +160,7 @@ steps:
- asdf mix credo --strict
- name: mix hex.audit
image: code.harton.nz/james/asdf_container:latest
pull: "always"
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -237,8 +173,7 @@ steps:
- asdf mix hex.audit
- name: mix format
image: code.harton.nz/james/asdf_container:latest
pull: "always"
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -250,9 +185,34 @@ steps:
commands:
- asdf mix format --check-formatted
- name: mix spark.formatter
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- mix compile
commands:
- asdf mix spark.formatter --check
- name: mix spark.cheat_sheets
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- mix compile
commands:
- asdf mix spark.cheat_sheets --check
- name: mix deps.unlock
image: code.harton.nz/james/asdf_container:latest
pull: "always"
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -265,8 +225,7 @@ steps:
- asdf mix deps.unlock --check-unused
- name: mix doctor
image: code.harton.nz/james/asdf_container:latest
pull: "always"
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -279,8 +238,7 @@ steps:
- asdf mix doctor --full
- name: mix git_ops.check_message
image: code.harton.nz/james/asdf_container:latest
pull: "always"
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
@ -293,202 +251,24 @@ steps:
- git log -1 --format=%s > .last_commit_message
- asdf mix git_ops.check_message .last_commit_message
---
kind: pipeline
type: docker
name: git ops
trigger:
branch:
- main
event:
- push
depends_on:
- test
steps:
- name: restore ASDF cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'asdf-{{ os }}-{{ arch }}-{{ checksum ".tool-versions" }}'
mount:
- .asdf
- name: restore build cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'elixir-{{ checksum "mix.lock" }}-{{ checksum ".tool-versions" }}'
mount:
- deps
- _build
- .hex
- .mix
- .rebar3
- name: mix git_ops.release
image: code.harton.nz/james/asdf_container:latest
pull: "always"
depends_on:
- restore ASDF cache
- restore build cache
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
ASDF_DIR: /root/.asdf
DRONE_TOKEN:
from_secret: DRONE_TOKEN
commands:
- git fetch --tags
- . $ASDF_DIR/asdf.sh
- mix git_ops.project_info --format=shell > before.env
- mix git_ops.release --yes --no-major || true
- mix git_ops.project_info --format=shell > after.env
- . ./before.env
- export OLD_APP_VERSION=$${APP_VERSION}
- . ./after.env
- export NEW_APP_VERSION=$${APP_VERSION}
- if [ "v$${OLD_APP_VERSION}" != "v$${NEW_APP_VERSION}" ]; then
- export GIT_URL=$(echo $DRONE_GIT_HTTP_URL | sed -e "s/:\\/\\//:\\/\\/$DRONE_REPO_OWNER:$DRONE_TOKEN@/")
- git push $${GIT_URL} "HEAD:${DRONE_COMMIT_REF}" "refs/tags/v$${NEW_APP_VERSION}"
- fi
---
kind: pipeline
type: docker
name: release
trigger:
ref:
include:
- refs/tags/v**
depends_on:
- test
steps:
- name: restore ASDF cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'asdf-{{ os }}-{{ arch }}-{{ checksum ".tool-versions" }}'
mount:
- .asdf
- name: restore build cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'elixir-{{ checksum "mix.lock" }}-{{ checksum ".tool-versions" }}'
mount:
- deps
- _build
- .hex
- .mix
- .rebar3
- name: build artifacts
image: code.harton.nz/james/asdf_container:latest
pull: "always"
depends_on:
- restore ASDF cache
- restore build cache
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
ASDF_DIR: /root/.asdf
commands:
- . $ASDF_DIR/asdf.sh
- mix git_ops.project_info --format=shell > app.env
- . ./app.env
- mkdir artifacts
- mix hex.build -o "artifacts/$${APP_NAME}-$${APP_VERSION}-pkg.tar"
- gzip "artifacts/$${APP_NAME}-$${APP_VERSION}-pkg.tar"
- mix docs
- tar zcvf "artifacts/$${APP_NAME}-$${APP_VERSION}-docs.tar.gz" doc/
- git tag -l --format='%(contents:subject)' v$${APP_VERSION} > tag_subject
- git tag -l --format='%(contents:body)' v$${APP_VERSION} > tag_body
- name: gitea release
image: plugins/gitea-release
depends_on:
- build artifacts
settings:
api_key:
from_secret: DRONE_TOKEN
base_url: https://code.harton.nz
files: artifacts/*.tar.gz
checksum: sha256
title: tag_subject
note: tag_body
# - name: hex release
# image: code.harton.nz/james/asdf_container:latest
# pull: "always"
# - name: mix git_ops.release
# image: harton.dev/james/asdf_container:latest
# when:
# branch:
# - main
# event:
# exclude:
# - pull_request
# depends_on:
# - restore ASDF cache
# - restore build cache
# - mix test
# - mix credo
# - mix hex.audit
# - mix format
# - mix spark.formatter
# - mix spark.cheat_sheets
# - mix deps.unlock
# - mix doctor
# - mix git_ops.check_message
# environment:
# MIX_ENV: test
# HEX_HOME: /drone/src/.hex
@ -496,8 +276,118 @@ steps:
# REBAR_BASE_DIR: /drone/src/.rebar3
# ASDF_DATA_DIR: /drone/src/.asdf
# ASDF_DIR: /root/.asdf
# HEX_API_KEY:
# from_secret: HEX_API_KEY
# DRONE_TOKEN:
# from_secret: DRONE_TOKEN
# commands:
# - . $ASDF_DIR/asdf.sh
# - mix hex.publish --yes
# - git fetch --tags
# - asdf mix git_ops.project_info --format=shell > before.env
# - asdf mix git_ops.release --yes --no-major || true
# - asdf mix git_ops.project_info --format=shell > after.env
# - . ./before.env
# - export OLD_APP_VERSION=$${APP_VERSION}
# - . ./after.env
# - export NEW_APP_VERSION=$${APP_VERSION}
# - if [ "v$${OLD_APP_VERSION}" != "v$${NEW_APP_VERSION}" ]; then
# - export GIT_URL=$(echo $DRONE_GIT_HTTP_URL | sed -e "s/:\\/\\//:\\/\\/$DRONE_REPO_OWNER:$DRONE_TOKEN@/")
# - git push $${GIT_URL} "HEAD:${DRONE_COMMIT_REF}" "refs/tags/v$${NEW_APP_VERSION}"
# - fi
- name: build artifacts
image: harton.dev/james/asdf_container:latest
when:
event:
- tag
refs:
include:
- refs/tags/v*
depends_on:
- mix test
- mix credo
- mix hex.audit
- mix format
- mix spark.formatter
- mix spark.cheat_sheets
- mix deps.unlock
- mix doctor
- mix git_ops.check_message
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
ASDF_DIR: /root/.asdf
commands:
- asdf mix git_ops.project_info --format=shell > app.env
- . ./app.env
- mkdir artifacts
- asdf mix hex.build -o "artifacts/$${APP_NAME}-$${APP_VERSION}-pkg.tar"
- gzip "artifacts/$${APP_NAME}-$${APP_VERSION}-pkg.tar"
- asdf mix docs
- tar zcvf "artifacts/$${APP_NAME}-$${APP_VERSION}-docs.tar.gz" doc/
- git tag -l --format='%(contents:subject)' v$${APP_VERSION} > tag_subject
- git tag -l --format='%(contents:body)' v$${APP_VERSION} > tag_body
- name: gitea release
image: plugins/gitea-release
when:
event:
- tag
refs:
include:
- refs/tags/v*
depends_on:
- build artifacts
settings:
api_key:
from_secret: DRONE_TOKEN
base_url: https://harton.dev
files: artifacts/*.tar.gz
checksum: sha256
title: tag_subject
note: tag_body
- name: docs release
when:
event:
- tag
refs:
include:
- refs/tags/v*
image: minio/mc
environment:
S3_ENDPOINT:
from_secret: S3_ENDPOINT
ACCESS_KEY:
from_secret: ACCESS_KEY_ID
SECRET_KEY:
from_secret: SECRET_ACCESS_KEY
depends_on:
- build artifacts
commands:
- mc alias set store $${S3_ENDPOINT} $${ACCESS_KEY} $${SECRET_KEY}
- mc mb -p store/docs.harton.nz
- mc mirror --overwrite doc/ store/docs.harton.nz/$${DRONE_REPO}/$${DRONE_TAG}
- mc mirror --overwrite doc/ store/docs.harton.nz/$${DRONE_REPO}
- name: hex release
image: harton.dev/james/asdf_container:latest
when:
event:
- tag
refs:
include:
- refs/tags/v*
depends_on:
- build artifacts
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
ASDF_DIR: /root/.asdf
HEX_API_KEY:
from_secret: HEX_API_KEY
commands:
- asdf mix hex.publish --yes

View file

@ -1,4 +1,20 @@
# Used by "mix format"
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
spark_locals_without_parens = [
auto_compact?: 1,
auto_file_sync?: 1,
directory: 1,
name: 1,
otp_app: 1
]
[
import_deps: [:ash, :spark],
inputs: [
"*.{ex,exs}",
"{config,lib,test}/**/*.{ex,exs}"
],
plugins: [Spark.Formatter],
locals_without_parens: spark_locals_without_parens,
export: [
locals_without_parens: spark_locals_without_parens
]
]

View file

@ -1,2 +1,2 @@
erlang 26.0.2
elixir 1.15.4
erlang 27.0
elixir 1.17.1

View file

@ -5,7 +5,104 @@ See [Conventional Commits](Https://conventionalcommits.org) for commit guideline
<!-- changelog -->
## [v0.1.0](https://code.harton.nz/james/ash_cubdb/compare/v0.1.0...v0.1.0) (2023-08-07)
## [v0.6.2](https://harton.dev/james/ash_cubdb/compare/v0.6.1-rc.2...v0.6.2) (2024-05-11)
### Bug Fixes:
* deprecation warnings in tests.
### Improvements:
* Relax RC version dependencies.
## [v0.6.1-rc.2](https://harton.dev/james/ash_cubdb/compare/v0.6.1-rc.1...v0.6.1-rc.2) (2024-04-03)
### Improvements:
* Relax RC version dependencies.
## [v0.6.1-rc.1](https://harton.dev/james/ash_cubdb/compare/v0.6.1-rc.0...v0.6.1-rc.1) (2024-04-01)
### Bug Fixes:
* deprecation warnings in tests.
## [v0.6.1-rc.0](https://harton.dev/james/ash_cubdb/compare/v0.6.0...v0.6.1-rc.0) (2024-03-30)
## [v0.6.0](https://harton.dev/james/ash_cubdb/compare/v0.5.0-rc.0...v0.6.0) (2024-03-27)
## [v0.5.0-rc.0](https://harton.dev/james/ash_cubdb/compare/v0.4.1...v0.5.0-rc.0) (2024-03-27)
### Breaking Changes:
* Ash 3.0 support.
## [v0.4.1](https://harton.dev/james/ash_cubdb/compare/v0.4.0...v0.4.1) (2023-10-02)
### Bug Fixes:
* correctly enable filtering and sorting.
## [v0.4.0](https://harton.dev/james/ash_cubdb/compare/v0.3.1...v0.4.0) (2023-10-02)
### Features:
* Support destroying records.
### Bug Fixes:
* honour tenancy when updating.
## [v0.3.1](https://harton.dev/james/ash_cubdb/compare/v0.3.0...v0.3.1) (2023-10-02)
### Bug Fixes:
* enable sorting capability.
## [v0.3.0](https://harton.dev/james/ash_cubdb/compare/v0.2.0...v0.3.0) (2023-10-02)
### Features:
* Support updating existing records.
## [v0.2.0](https://harton.dev/james/ash_cubdb/compare/v0.1.0...v0.2.0) (2023-09-29)
### Features:
* `create` and `read` works.
## [v0.1.0](https://harton.dev/james/ash_cubdb/compare/v0.1.0...v0.1.0) (2023-08-07)

View file

@ -1,24 +1,55 @@
# AshCubDB
[![Build Status](https://drone.harton.nz/api/badges/james/ash_cubdb/status.svg?ref=refs/heads/main)](https://drone.harton.nz/cinder/cinder)
[![Build Status](https://drone.harton.dev/api/badges/james/ash_cubdb/status.svg?ref=refs/heads/main)](https://drone.harton.dev/james/ash_cubdb)
[![Hex.pm](https://img.shields.io/hexpm/v/ash_cubdb.svg)](https://hex.pm/packages/ash_cubdb)
[![Hippocratic License HL3-FULL](https://img.shields.io/static/v1?label=Hippocratic%20License&message=HL3-FULL&labelColor=5e2751&color=bc8c3d)](https://firstdonoharm.dev/version/3/0/full.html)
An [Ash DataLayer](https://ash-hq.org/docs/module/ash/latest/ash-datalayer)
which adds support for [CubDB](https://hex.pm/packages/cubdb).
## Status
AshCubDb is still a work in progress. Feel free to give it a go.
| Feature | Status |
| ----------------------- | ------ |
| Create | ✅ |
| Upsert (by primary key) | ✅ |
| Upsert (by identity) | ❌ |
| Read (all) | ✅ |
| Read (by primary key) | ✅ |
| Read (filters) | ✅ |
| Read (sort) | ✅ |
| Read (distinct sort) | ✅ |
| Read (calculations) | ✅ |
| Read (aggregates) | ❌ |
| Update | ✅ |
| Destroy | ✅ |
| Transactions | ❌ |
## Github Mirror
This repository is mirrored [on Github](https://github.com/jimsynz/ash_cubdb)
from it's primary location [on my Forgejo instance](https://harton.dev/james/ash_cubdb).
Feel free to raise issues and open PRs on Github.
## Installation
If [available in Hex](https://hex.pm/docs/publish), the package can be installed
AshCubDB is [available in Hex](https://hex.pm/packages/ash_cubdb), the package can be installed
by adding `ash_cubdb` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:ash_cubdb, "~> 0.1.0"}
{:ash_cubdb, "~> 0.6.2"}
]
end
```
Documentation for the latest release can be found on
[HexDocs](https://hexdocs.pm/ash_cubdb) and for the `main` branch on
[docs.harton.nz](https://docs.harton.nz/james/ash_cubdb).
## License
This software is licensed under the terms of the

View file

@ -3,7 +3,14 @@ import Config
config :git_ops,
mix_project: Mix.Project.get!(),
changelog_file: "CHANGELOG.md",
repository_url: "https://code.harton.nz/james/ash_cubdb",
repository_url: "https://harton.dev/james/ash_cubdb",
manage_mix_version?: true,
version_tag_prefix: "v",
manage_readme_version: "README.md"
if Mix.env() in ~w[dev test]a do
config :ash_cubdb, ash_domains: [Support.Domain]
config :ash_cubdb, debug_data_layer_capabilities?: true
config :spark, :formatter, remove_parens?: true
end

View file

@ -0,0 +1,104 @@
<!--
This file was generated by Spark. Do not edit it by hand.
-->
# DSL: AshCubDB.DataLayer
A CubDB data layer for Ash.
<!--- ash-hq-hide-start --> <!--- -->
## DSL Documentation
### Index
* cubdb
### Docs
## cubdb
CubDB data layer configuration.
Examples:
```
cubdb do
directory "/opt/storage/my_awesome_resource"
auto_compact? true
auto_file_sync? true
name :my_awesome_resource
end
```
---
* `:directory` - The directory within which to store the CubDB data.
If none is supplied, then one will be automatically generated in the
`priv` directory of the parent OTP application.
* `:otp_app` (`t:atom/0`) - The OTP application in whose `priv` directory data should be stored.
Only used if `directory` is not supplied. When not provided
`Application.get_application/1` will be called for the resource.
* `:auto_compact?` (`t:boolean/0`) - Whether or not to automatically compact the CubDB database.
See [the CubDB documentation](https://hexdocs.pm/cubdb/faq.html#what-is-compaction) for more information. The default value is `true`.
* `:auto_file_sync?` (`t:boolean/0`) - Whether or not to automatically flush the buffer to disk on write.
See [the CubDB documentation](https://hexdocs.pm/cubdb/faq.html#what-does-file-sync-mean) The default value is `true`.
* `:name` (`t:atom/0`) - The name of the CubDB database.
By default this is the name of the resource module, however in some
(rare) circumstances you may wish to specifically name the database.
<!--- ash-hq-hide-stop --> <!--- -->
## cubdb
CubDB data layer configuration.
### Examples
```
cubdb do
directory "/opt/storage/my_awesome_resource"
auto_compact? true
auto_file_sync? true
name :my_awesome_resource
end
```
### Options
| Name | Type | Default | Docs |
|------|------|---------|------|
| [`directory`](#cubdb-directory){: #cubdb-directory } | `nil \| String.t` | | The directory within which to store the CubDB data. If none is supplied, then one will be automatically generated in the `priv` directory of the parent OTP application. |
| [`otp_app`](#cubdb-otp_app){: #cubdb-otp_app } | `atom` | | The OTP application in whose `priv` directory data should be stored. Only used if `directory` is not supplied. When not provided `Application.get_application/1` will be called for the resource. |
| [`auto_compact?`](#cubdb-auto_compact?){: #cubdb-auto_compact? } | `boolean` | `true` | Whether or not to automatically compact the CubDB database. See [the CubDB documentation](https://hexdocs.pm/cubdb/faq.html#what-is-compaction) for more information. |
| [`auto_file_sync?`](#cubdb-auto_file_sync?){: #cubdb-auto_file_sync? } | `boolean` | `true` | Whether or not to automatically flush the buffer to disk on write. See [the CubDB documentation](https://hexdocs.pm/cubdb/faq.html#what-does-file-sync-mean) |
| [`name`](#cubdb-name){: #cubdb-name } | `atom` | | The name of the CubDB database. By default this is the name of the resource module, however in some (rare) circumstances you may wish to specifically name the database. |
<style type="text/css">.spark-required::after { content: "*"; color: red !important; }</style>

View file

@ -1,18 +1,161 @@
defmodule AshCubDB do
@moduledoc """
Documentation for `AshCubDB`.
`AshCubDB` is an [Ash DataLayer](https://ash-hq.org/docs/module/ash/latest/ash-datalayer)
which adds support for persisting Ash resources with [CubDB](https://hex.pm/packages/cubdb).
CubDB is an Elixir-based key value store which supports all Erlang-native
terms. More information can be found in
[the CubDB readme](https://hexdocs.pm/cubdb/readme.html).
"""
alias AshCubDB.{Info, Migration}
@doc """
Hello world.
## Examples
iex> AshCubDB.hello()
:world
Ensure that the CubDB process is running for the specified resource.
"""
def hello do
:world
@spec start(Ash.Resource.t()) :: {:ok, pid} | {:error, any}
def start(resource) do
directory = Info.cubdb_directory!(resource)
auto_compact? = Info.cubdb_auto_compact?(resource)
auto_file_sync? = Info.cubdb_auto_file_sync?(resource)
name = via(resource)
with {:ok, pid} <-
DynamicSupervisor.start_child(
AshCubDB.DynamicSupervisor,
{CubDB, [data_dir: directory, name: name]}
),
:ok <- CubDB.set_auto_compact(pid, auto_compact?),
:ok <- CubDB.set_auto_file_sync(pid, auto_file_sync?),
:ok <- Migration.check(pid, resource) do
{:ok, pid}
else
{:error, {:already_started, pid}} -> {:ok, pid}
{:error, reason} -> {:error, reason}
end
end
@doc """
Stop the CubDB process running for a specific resource.
"""
@spec stop(Ash.Resource.t()) :: :ok
def stop(resource) do
AshCubDB
|> Registry.lookup(resource)
|> Enum.each(&DynamicSupervisor.terminate_child(AshCubDB.DynamicSupervisor, &1))
end
@doc """
Creates a backup of the database into the target directory path.
Wrapper around `CubDB.back_up/2`
"""
@spec back_up(Ash.Resource.t(), Path.t()) :: :ok | {:error, any}
def back_up(resource, target_path) do
case start(resource) do
{:ok, pid} -> CubDB.back_up(pid, target_path)
{:error, reason} -> {:error, reason}
end
end
@doc """
Deletes all entries, resulting in an empty database.
Wrapper around `CubDB.clear/1`
"""
@spec clear(Ash.Resource.t()) :: :ok
def clear(resource) do
case start(resource) do
{:ok, pid} -> CubDB.clear(pid)
_ -> :ok
end
end
@doc """
Runs a database compaction.
Wrapper around `CubDB.compact/1`
"""
@spec compact(Ash.Resource.t()) :: :ok | {:error, any}
def compact(resource) do
case start(resource) do
{:ok, pid} -> CubDB.compact(pid)
{:error, reason} -> {:error, reason}
end
end
@doc """
Returns true if a compaction operation is currently running, false otherwise.
Wrapper around `CubDB.compacting?/1`
"""
@spec compacting?(Ash.Resource.t()) :: boolean
def compacting?(resource) do
case start(resource) do
{:ok, pid} -> CubDB.compacting?(pid)
_ -> false
end
end
@doc """
Returns the path of the current database file.
Wrapper around `CubDB.current_db_file/1`
"""
@spec current_db_file(Ash.Resource.t()) :: String.t()
def current_db_file(resource) do
resource
|> via()
|> CubDB.current_db_file()
end
@doc """
Returns the path of the data directory, as given when the `CubDB` process was started.
Wrapper around `CubDB.data_dir/1`
"""
@spec data_dir(Ash.Resource.t()) :: String.t()
def data_dir(resource) do
resource
|> via()
|> CubDB.data_dir()
end
@doc """
Returns the dirt factor.
Wrapper around `CubDB.dirt_factor/1`
"""
@spec dirt_factor(Ash.Resource.t()) :: float
def dirt_factor(resource) do
resource
|> via()
|> CubDB.dirt_factor()
end
@doc """
Performs an `fsync`, forcing to flush all data that might be buffered by the OS to disk.
Wrapper around `CubDB.file_sync/1`
"""
@spec file_sync(Ash.Resource.t()) :: :ok
def file_sync(resource) do
resource
|> via()
|> CubDB.file_sync()
end
@doc """
Stops a running compaction.
Wrapper around `CubDB.halt_compaction/1`
"""
@spec halt_compaction(Ash.Resource.t()) :: :ok | {:error, :no_compaction_running}
def halt_compaction(resource) do
resource
|> via()
|> CubDB.halt_compaction()
end
defp via(resource), do: {:via, Registry, {AshCubDB.Registry, resource}}
end

View file

@ -1,19 +1,16 @@
defmodule AshCubDB.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
@doc false
@impl true
def start(_type, _args) do
children = [
# Starts a worker by calling: AshCubDB.Worker.start_link(arg)
# {AshCubDB.Worker, arg}
{DynamicSupervisor, strategy: :one_for_one, name: AshCubDB.DynamicSupervisor},
{Registry, keys: :unique, name: AshCubDB.Registry}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: AshCubDB.Supervisor]
Supervisor.start_link(children, opts)
end

View file

@ -0,0 +1,317 @@
defmodule AshCubDB.DataLayer do
alias AshCubDB.{
CacheLayoutTransformer,
ConfigureDirectoryTransformer,
Dir,
Dsl,
Info,
Query,
Serde
}
alias Ash.{
Actions.Sort,
Changeset,
Error,
Error.Changes.InvalidAttribute,
Error.Changes.StaleRecord,
Error.Invalid.TenantRequired,
Filter.Runtime,
Resource
}
alias Ecto.Schema.Metadata
alias Spark.Dsl.Extension
import AshCubDB, only: [start: 1]
@moduledoc """
A CubDB data layer for Ash.
<!--- ash-hq-hide-start --> <!--- -->
## DSL Documentation
### Index
#{Extension.doc_index(Dsl.sections())}
### Docs
#{Extension.doc(Dsl.sections())}
<!--- ash-hq-hide-stop --> <!--- -->
"""
@behaviour Ash.DataLayer
use Extension,
sections: Dsl.sections(),
transformers: [ConfigureDirectoryTransformer, CacheLayoutTransformer]
@doc false
@impl true
def can?(resource, :create), do: Dir.writable?(resource)
def can?(resource, :update), do: Dir.writable?(resource)
def can?(resource, :upsert), do: Dir.writable?(resource)
def can?(resource, :destroy), do: Dir.writable?(resource)
def can?(resource, :read), do: Dir.readable?(resource)
def can?(_, :multitenancy), do: true
def can?(_, :filter), do: true
def can?(_, :limit), do: true
def can?(_, :offset), do: true
def can?(_, :distinct), do: true
def can?(_, :distinct_sort), do: true
def can?(_, {:filter_expr, _}), do: true
def can?(_, :boolean_filter), do: true
def can?(_, :sort), do: true
def can?(_, {:sort, _}), do: true
def can?(_, :nested_expressions), do: true
def can?(resource, capability) do
if Application.get_env(:ash_cubdb, :debug_data_layer_capabilities?, false) do
# credo:disable-for-next-line Credo.Check.Warning.Dbg
dbg(resource: resource, capability: capability)
end
false
end
@doc false
@impl true
def create(resource, changeset) do
with :ok <- validate_tenant_configuration(resource, changeset.tenant),
{:ok, db} <- start(resource),
{:ok, record} <- Changeset.apply_attributes(changeset),
{:ok, key, data} <- Serde.serialise(record),
{:ok, key} <- maybe_wrap_in_tenant(key, changeset),
:ok <- CubDB.put_new(db, key, data) do
{:ok, set_loaded(record)}
else
{:error, :exists} ->
errors =
resource
|> Resource.Info.primary_key()
|> Enum.map(
&InvalidAttribute.exception(
field: &1,
message: "has already been taken"
)
)
{:error, errors}
{:error, reason} ->
{:error, reason}
end
end
@doc false
@impl true
def upsert(resource, changeset, keys) do
pkey = Resource.Info.primary_key(resource)
keys = keys || pkey
{key_layout, _} = Info.field_layout(resource)
cond do
Enum.any?(keys, &is_nil(Changeset.get_attribute(changeset, &1))) ->
create(resource, changeset)
Tuple.to_list(key_layout) == Enum.sort(keys) ->
do_direct_upsert(resource, changeset)
true ->
do_search_upsert(resource, changeset, keys)
end
end
@doc false
@impl true
def update(resource, changeset) do
with :ok <- validate_tenant_configuration(resource, changeset.tenant),
{:ok, db} <- start(resource),
{:ok, record} <- Changeset.apply_attributes(changeset),
{:ok, key, data} <- Serde.serialise(record),
{:ok, key} <- maybe_wrap_in_tenant(key, changeset),
true <- CubDB.has_key?(db, key),
:ok <- CubDB.put(db, key, data) do
{:ok, set_loaded(record)}
else
false -> {:error, StaleRecord.exception(resource: resource)}
{:error, reason} -> {:error, Ash.Error.to_ash_error(reason)}
end
end
@doc false
@impl true
def destroy(resource, changeset) do
with :ok <- validate_tenant_configuration(resource, changeset.tenant),
{:ok, db} <- start(resource),
{:ok, key, _data} <- Serde.serialise(changeset.data),
{:ok, key} <- maybe_wrap_in_tenant(key, changeset),
true <- CubDB.has_key?(db, key) do
CubDB.delete(db, key)
else
false -> {:error, StaleRecord.exception(resource: resource)}
{:error, reason} -> {:error, reason}
end
end
@doc false
@impl true
def run_query(query, resource, parent \\ nil) do
with :ok <- validate_tenant_configuration(resource, query.tenant),
{:ok, db} <- start(resource),
{:ok, stream} <- get_records(resource, db, query.tenant),
{:ok, records} <- filter_matches(stream, query, parent),
{:ok, records} <- runtime_sort(records, query) do
{:ok, records}
else
{:error, reason} -> {:error, Error.to_ash_error(reason)}
end
end
@doc false
@impl true
def resource_to_query(resource, domain), do: %Query{resource: resource, domain: domain}
@doc false
@impl true
def limit(query, limit, _), do: {:ok, %{query | limit: limit}}
@doc false
@impl true
def offset(query, offset, _), do: {:ok, %{query | offset: offset}}
@doc false
@impl true
def add_calculation(query, calculation, _, _),
do: {:ok, %{query | calculations: [calculation | query.calculations]}}
@doc false
@impl true
def add_aggregate(query, aggregate, _),
do: {:ok, %{query | aggregates: [aggregate | query.aggregates]}}
@doc false
@impl true
def set_tenant(_resource, query, tenant) do
{:ok, %{query | tenant: tenant}}
end
@doc false
@impl true
def filter(query, filter, _resource) do
if query.filter do
{:ok, %{query | filter: Ash.Filter.add_to_filter!(query.filter, filter)}}
else
{:ok, %{query | filter: filter}}
end
end
@doc false
@impl true
def sort(query, sort, _resource) do
{:ok, %{query | sort: sort}}
end
@doc false
@impl true
def distinct(query, distinct, _resource) do
{:ok, %{query | distinct: distinct}}
end
@impl true
def distinct_sort(query, distinct_sort, _resource) do
{:ok, %{query | distinct_sort: distinct_sort}}
end
defp set_loaded(record),
do: %{record | __meta__: %Metadata{state: :loaded, schema: record.__struct__}}
defp do_direct_upsert(resource, changeset) do
with :ok <- validate_tenant_configuration(resource, changeset.tenant),
{:ok, db} <- start(resource),
{:ok, record} <- Changeset.apply_attributes(changeset),
{:ok, key, data} <- Serde.serialise(record),
{:ok, key} <- maybe_wrap_in_tenant(key, changeset),
:ok <- CubDB.put(db, key, data) do
{:ok, set_loaded(record)}
end
end
defp do_search_upsert(_resource, _changeset, _keys) do
{:error, :not_implemented}
end
defp get_records(resource, db, tenant) do
stream =
db
|> CubDB.select()
|> Stream.filter(&is_tuple(elem(&1, 0)))
stream =
if Resource.Info.multitenancy_strategy(resource) == :context do
stream
|> Stream.filter(fn {{t, _}, _} -> t == tenant end)
|> Stream.map(fn {{_, key}, value} -> {key, value} end)
else
stream
end
stream =
stream
|> Stream.map(&Serde.deserialise!(resource, &1))
{:ok, stream}
end
defp maybe_wrap_in_tenant(key, changeset) do
if Resource.Info.multitenancy_strategy(changeset.resource) == :context do
{:ok, {changeset.tenant, key}}
else
{:ok, key}
end
end
defp validate_tenant_configuration(resource, tenant) do
strategy = Resource.Info.multitenancy_strategy(resource)
global? = Resource.Info.multitenancy_global?(resource)
case {strategy, global?, tenant} do
{strategy, false, nil} when not is_nil(strategy) ->
{:error, TenantRequired.exception(resource: resource)}
_ ->
:ok
end
end
defp filter_matches(stream, query, _parent) when is_nil(query.filter), do: {:ok, stream}
defp filter_matches(stream, query, parent) do
records =
stream
|> Enum.to_list()
query.domain
|> Runtime.filter_matches(records, query.filter, parent: parent)
end
defp runtime_sort(records, query) when is_list(records) do
records =
records
|> Sort.runtime_sort(query.distinct_sort || query.sort, domain: query.domain)
|> Sort.runtime_distinct(query.distinct, domain: query.domain)
|> Sort.runtime_sort(query.sort, domain: query.domain)
|> Enum.drop(query.offset || 0)
|> do_limit(query.limit)
{:ok, records}
end
defp runtime_sort(records, query), do: records |> Enum.to_list() |> runtime_sort(query)
defp do_limit(records, :infinity), do: records
defp do_limit(records, limit), do: Enum.take(records, limit)
end

41
lib/ash_cub_db/dir.ex Normal file
View file

@ -0,0 +1,41 @@
defmodule AshCubDB.Dir do
@moduledoc """
Utilities for working with the underlying data directory.
"""
alias AshCubDB.Info
@doc """
Is the directory able to be written to by the current user?
"""
def writable?(resource) do
with {:ok, dir} <- Info.cubdb_directory(resource),
{:ok, stat} when stat.access in ~w[read_write write]a <- dir_stat(dir) do
true
else
_ -> false
end
end
@doc """
Is the directory able to be read from by the current user?
"""
def readable?(resource) do
with {:ok, dir} <- Info.cubdb_directory(resource),
{:ok, stat} when stat.access in ~w[read read_write]a <- dir_stat(dir) do
true
else
_ -> false
end
end
defp dir_stat(directory) do
with {:error, :enoent} <- File.stat(directory),
{:error, error} <- File.mkdir_p(directory) do
{:error, "Unable to create directory: #{inspect(error)}"}
else
:ok -> File.stat(directory)
{:ok, stat} -> {:ok, stat}
end
end
end

79
lib/ash_cub_db/dsl.ex Normal file
View file

@ -0,0 +1,79 @@
defmodule AshCubDB.Dsl do
@moduledoc false
alias Spark.Dsl.Section
@cubdb %Section{
name: :cubdb,
describe: """
CubDB data layer configuration.
""",
examples: [
"""
cubdb do
directory "/opt/storage/my_awesome_resource"
auto_compact? true
auto_file_sync? true
name :my_awesome_resource
end
"""
],
schema: [
directory: [
type: {:or, [nil, :string]},
required: false,
doc: """
The directory within which to store the CubDB data.
If none is supplied, then one will be automatically generated in the
`priv` directory of the parent OTP application.
"""
],
otp_app: [
type: :atom,
required: false,
doc: """
The OTP application in whose `priv` directory data should be stored.
Only used if `directory` is not supplied. When not provided
`Application.get_application/1` will be called for the resource.
"""
],
auto_compact?: [
type: :boolean,
default: true,
required: false,
doc: """
Whether or not to automatically compact the CubDB database.
See [the CubDB documentation](https://hexdocs.pm/cubdb/faq.html#what-is-compaction) for more information.
"""
],
auto_file_sync?: [
type: :boolean,
default: true,
required: false,
doc: """
Whether or not to automatically flush the buffer to disk on write.
See [the CubDB documentation](https://hexdocs.pm/cubdb/faq.html#what-does-file-sync-mean)
"""
],
name: [
type: :atom,
required: false,
doc: """
The name of the CubDB database.
By default this is the name of the resource module, however in some
(rare) circumstances you may wish to specifically name the database.
"""
]
]
}
@sections [@cubdb]
@doc false
@spec sections :: [Section.t()]
def sections, do: @sections
end

15
lib/ash_cub_db/info.ex Normal file
View file

@ -0,0 +1,15 @@
defmodule AshCubDB.Info do
@moduledoc """
Auto-generated introspection for the AshCubDB DSL.
"""
use Spark.InfoGenerator, sections: [:cubdb], extension: AshCubDB.DataLayer
alias Spark.Dsl.Extension
@doc """
Retrieve the cached field layout for the resource.
"""
@spec field_layout(Ash.Resource.t() | Spark.Dsl.t()) :: nil | {tuple, tuple}
def field_layout(resource_or_dsl_state),
do: Extension.get_persisted(resource_or_dsl_state, :cubdb_field_layout)
end

View file

@ -0,0 +1,31 @@
defmodule AshCubDB.Migration do
@moduledoc """
We store and check metadata when opening a database to ensure that the
resource and attributes match, and possibly perform migrations.
"""
alias AshCubDB.Info
@doc """
Check that a newly opened database doesn't need to be migrated.
"""
@spec check(GenServer.server(), Ash.Resource.t()) :: :ok | {:error, any}
def check(db, resource) do
layout = Info.field_layout(resource)
case CubDB.fetch(db, :__metadata_) do
:error ->
CubDB.put(db, :__metadata__, %{resource: resource, layout: layout})
{:ok, metadata} when metadata.resource == resource and metadata.layout == layout ->
:ok
{:ok, metadata} when metadata.resource != resource ->
{:error,
"CubDB database refers to resource `#{metadata.resource}`, but should be `#{inspect(resource)}`."}
{:ok, _} ->
{:error, "CubDB database needs to be migrated."}
end
end
end

35
lib/ash_cub_db/query.ex Normal file
View file

@ -0,0 +1,35 @@
defmodule AshCubDB.Query do
@moduledoc """
A struct which holds information about a resource query as it is being built.
"""
alias Ash.{Domain, Filter, Resource}
defstruct aggregates: [],
domain: nil,
calculations: [],
distinct: nil,
distinct_sort: nil,
filter: nil,
limit: :infinity,
offset: 0,
relationships: %{},
resource: nil,
sort: nil,
tenant: nil
@type t :: %__MODULE__{
aggregates: [Resource.Aggregate.t()],
domain: Domain.t(),
calculations: [Resource.Calculation.t()],
distinct: Ash.Sort.t(),
distinct_sort: Ash.Sort.t(),
filter: nil | Filter.t(),
limit: :infinity | non_neg_integer(),
offset: non_neg_integer(),
relationships: %{optional(atom) => Ash.Resource.Relationships.relationship()},
resource: Ash.Resource.t(),
sort: nil | Ash.Sort.t(),
tenant: any
}
end

100
lib/ash_cub_db/serde.ex Normal file
View file

@ -0,0 +1,100 @@
defmodule AshCubDB.Serde do
@moduledoc """
Handle serialising and deserialising of records into CubDB.
"""
alias Ash.{Resource, Type}
alias AshCubDB.Info
alias Ecto.Schema.Metadata
@doc """
Serialise the record into key and value tuples for storage in CubDB.
"""
@spec serialise(Resource.record()) :: {:ok, tuple, tuple} | {:error, any}
def serialise(record) do
{key_layout, data_layout} =
record.__struct__
|> Info.field_layout()
with {:ok, key} <- serialise_with_layout(record, key_layout),
{:ok, data} <- serialise_with_layout(record, data_layout) do
{:ok, key, data}
end
end
@doc false
@spec deserialise!(Resource.t(), {tuple, tuple}) :: Resource.record() | no_return
def deserialise!(resource, {key, data}) do
case deserialise(resource, key, data) do
{:ok, record} -> record
{:error, reason} -> raise reason
end
end
@doc """
Convert the key and data back into a record..
"""
@spec deserialise(Resource.t(), tuple, tuple) :: {:ok, Resource.record()} | {:error, any}
def deserialise(resource, key, data) do
{key_layout, data_layout} = Info.field_layout(resource)
with {:ok, key_map} <- deserialise_with_layout(resource, key, key_layout),
{:ok, data_map} <- deserialise_with_layout(resource, data, data_layout) do
attrs = Map.merge(key_map, data_map)
record = struct(resource, attrs)
{:ok, %{record | __meta__: %Metadata{state: :loaded, schema: resource}}}
end
end
defp serialise_with_layout(record, layout) do
layout
|> Tuple.to_list()
|> Enum.reduce_while({:ok, {}}, fn attr, {:ok, result} ->
with {:ok, value} <- fetch_record_attribute(record, attr),
{:ok, attr} <- fetch_attribute_definition(record.__struct__, attr),
{:ok, casted} <- Type.dump_to_native(attr.type, value, attr.constraints) do
{:cont, {:ok, Tuple.append(result, casted)}}
else
:error -> {:halt, {:error, "Failed to dump value as type `#{attr.type}`"}}
{:error, reason} -> {:halt, {:error, reason}}
end
end)
end
defp deserialise_with_layout(resource, data, layout) do
layout
|> Tuple.to_list()
|> Enum.zip(Tuple.to_list(data))
|> Enum.reduce_while({:ok, %{}}, fn {attr, value}, {:ok, result} ->
with {:ok, attr} <- fetch_attribute_definition(resource, attr),
{:ok, value} <- Type.cast_stored(attr.type, value, attr.constraints) do
{:cont, {:ok, Map.put(result, attr.name, value)}}
else
:error -> {:halt, {:error, "Failed to load `#{inspect(value)}`."}}
{:error, reason} -> {:halt, {:error, reason}}
end
end)
end
defp fetch_record_attribute(record, attribute_name) do
case Map.fetch(record, attribute_name) do
{:ok, value} ->
{:ok, value}
:error ->
{:error,
"Unable to retreive attribute `#{attribute_name}` from resource `#{inspect(record.__struct__)}`"}
end
end
defp fetch_attribute_definition(resource, attribute_name) do
case Resource.Info.attribute(resource, attribute_name) do
nil ->
{:error, "Attribute `#{attribute_name}` not found on resource `#{inspect(resource)}`"}
attribute ->
{:ok, attribute}
end
end
end

View file

@ -0,0 +1,39 @@
defmodule AshCubDB.CacheLayoutTransformer do
@moduledoc false
alias Ash.Resource.Info
alias Spark.{Dsl, Dsl.Transformer, Error.DslError}
use Transformer
@doc false
@impl true
@spec after?(module) :: boolean
def after?(_), do: true
@doc false
@impl true
@spec transform(Dsl.t()) :: {:ok, Dsl.t()} | {:error, DslError.t()}
def transform(dsl_state) do
key =
dsl_state
|> Info.attributes()
|> Enum.filter(& &1.primary_key?)
|> Enum.map(& &1.name)
|> Enum.sort()
|> Enum.uniq()
|> List.to_tuple()
attributes =
dsl_state
|> Info.attributes()
|> Enum.reject(& &1.primary_key?)
|> Enum.map(& &1.name)
|> Enum.sort()
|> Enum.uniq()
|> List.to_tuple()
layout = {key, attributes}
{:ok, Transformer.persist(dsl_state, :cubdb_field_layout, layout)}
end
end

View file

@ -0,0 +1,68 @@
defmodule AshCubDB.ConfigureDirectoryTransformer do
@moduledoc false
alias Spark.{Dsl, Dsl.Transformer, Error.DslError}
use Transformer
@doc false
@impl true
@spec transform(Dsl.t()) :: {:ok, Dsl.t()} | {:error, DslError.t()}
def transform(dsl_state) do
module = Transformer.get_persisted(dsl_state, :module)
with nil <- Transformer.get_option(dsl_state, [:cubdb], :directory),
nil <- Transformer.get_option(dsl_state, [:cubdb], :otp_app),
nil <- Application.get_application(module) do
message = """
Unable to infer a data storage path for this resource.
You can either set the `cubdb.directory` DSL option directly, or set the `cubdb.otp_app` option
to use the application's priv directory for storage.
"""
{:error, DslError.exception(module: module, path: [:cubdb], message: message)}
else
path when is_binary(path) ->
verify_directory(dsl_state, path)
otp_app when is_atom(otp_app) ->
dsl_state =
dsl_state
|> Transformer.set_option([:cubdb], :otp_app, otp_app)
|> Transformer.set_option([:cubdb], :directory, generate_directory(dsl_state))
{:ok, dsl_state}
end
end
defp generate_directory(dsl_state) do
otp_app = Transformer.get_option(dsl_state, [:cubdb], :otp_app)
short_name =
dsl_state
|> Transformer.get_persisted(:module)
|> Module.split()
|> List.last()
|> Macro.underscore()
otp_app
|> :code.priv_dir()
|> Path.join("cubdb")
|> Path.join(short_name)
end
defp verify_directory(dsl_state, path) do
case Path.type(path) do
:absolute ->
{:ok, dsl_state}
_ ->
{:error,
DslError.exception(
module: Transformer.get_persisted(dsl_state, :module),
path: [:cubdb],
message: "Directory must be an absolute path"
)}
end
end
end

51
mix.exs
View file

@ -1,7 +1,7 @@
defmodule AshCubDB.MixProject do
use Mix.Project
@version "0.1.0"
@version "0.6.2"
@moduledoc """
A CubDB data layer for `Ash` resources.
@ -13,12 +13,34 @@ defmodule AshCubDB.MixProject do
version: @version,
elixir: "~> 1.15",
start_permanent: Mix.env() == :prod,
consolidate_protocols: Mix.env() != :test,
elixirc_paths: elixirc_paths(Mix.env()),
deps: deps(),
description: @moduledoc,
package: package(),
source_url: "https://code.harton.nz/james/ash_cubdb",
homepage_url: "https://code.harton.nz/james/ash_cubdb",
aliases: aliases()
source_url: "https://harton.dev/james/ash_cubdb",
homepage_url: "https://harton.dev/james/ash_cubdb",
aliases: aliases(),
dialyzer: [plt_add_apps: [:faker, :smokestack]],
docs: [
main: "readme",
extra_section: "Guides",
formatters: ["html"],
filter_modules: ~r/^Elixir.AshCubDB/,
source_url_pattern:
"https://harton.dev/james/ash_cub_db/src/branch/main/%{path}#L%{line}",
extras: [
"README.md",
"CHANGELOG.md",
"documentation/dsls/DSL:-AshCubDB.DataLayer.md"
],
groups_for_extras: [
Tutorials: ~r'documentation/tutorials',
"How To": ~r'documentation/how_to',
Topics: ~r'documentation/topics',
DSLs: ~r'documentation/dsls'
]
]
]
end
@ -27,7 +49,10 @@ defmodule AshCubDB.MixProject do
maintainers: ["James Harton <james@harton.nz>"],
licenses: ["HL3-FULL"],
links: %{
"Source" => "https://code.harton.nz/james/ash_cubdb"
"Source" => "https://harton.dev/james/ash_cubdb",
"GitHub" => "https://github.com/jimsynz/ash_cubdb",
"Changelog" => "https://docs.harton.nz/james/ash_cubdb/changelog.html",
"Sponsor" => "https://github.com/sponsors/jimsynz"
}
]
end
@ -45,21 +70,29 @@ defmodule AshCubDB.MixProject do
opts = [only: ~w[dev test]a, runtime: false]
[
{:ash, "~> 3.0"},
{:cubdb, "~> 2.0"},
{:spark, "~> 2.1"},
{:earmark, ">= 0.0.0"},
{:credo, "~> 1.7", opts},
{:dialyxir, "~> 1.3", opts},
{:doctor, "~> 0.21", opts},
{:earmark, ">= 0.0.0", opts},
{:ex_check, "~> 0.15", opts},
{:ex_check, "~> 0.16", opts},
{:ex_doc, ">= 0.0.0", opts},
{:faker, "~> 0.18", opts},
{:git_ops, "~> 2.6", opts},
{:mix_audit, "~> 2.1", opts}
{:mix_audit, "~> 2.1", opts},
{:smokestack, "~> 0.9.0", opts}
]
end
defp aliases do
[
"spark.formatter": "spark.formatter --extensions=AshCubDB.DataLayer"
"spark.formatter": "spark.formatter --extensions=AshCubDB.DataLayer",
"spark.cheat_sheets": "spark.cheat_sheets --extensions=AshCubDB.DataLayer"
]
end
defp elixirc_paths(env) when env in ~w[dev test]a, do: ~w[lib test/support]
defp elixirc_paths(_), do: ~w[lib]
end

View file

@ -1,24 +1,51 @@
%{
"bunt": {:hex, :bunt, "0.2.1", "e2d4792f7bc0ced7583ab54922808919518d0e57ee162901a16a1b6664ef3b14", [:mix], [], "hexpm", "a330bfb4245239787b15005e66ae6845c9cd524a288f0d141c148b02603777a5"},
"credo": {:hex, :credo, "1.7.0", "6119bee47272e85995598ee04f2ebbed3e947678dee048d10b5feca139435f75", [:mix], [{:bunt, "~> 0.2.1", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "6839fcf63d1f0d1c0f450abc8564a57c43d644077ab96f2934563e68b8a769d7"},
"ash": {:hex, :ash, "3.0.16", "8eaebd5a9f3ee404937ac811a240799613b0619026e097436132d60eaf18ed16", [:mix], [{:comparable, "~> 1.0", [hex: :comparable, repo: "hexpm", optional: false]}, {:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ecto, "~> 3.7", [hex: :ecto, repo: "hexpm", optional: false]}, {:ets, "~> 0.8", [hex: :ets, repo: "hexpm", optional: false]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: false]}, {:picosat_elixir, "~> 0.2", [hex: :picosat_elixir, repo: "hexpm", optional: true]}, {:plug, ">= 0.0.0", [hex: :plug, repo: "hexpm", optional: true]}, {:reactor, ">= 0.8.1 and < 1.0.0-0", [hex: :reactor, repo: "hexpm", optional: false]}, {:simple_sat, ">= 0.1.1 and < 1.0.0-0", [hex: :simple_sat, repo: "hexpm", optional: true]}, {:spark, ">= 2.1.18 and < 3.0.0-0", [hex: :spark, repo: "hexpm", optional: false]}, {:splode, "~> 0.2", [hex: :splode, repo: "hexpm", optional: false]}, {:stream_data, "~> 1.0", [hex: :stream_data, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.1", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "36c0d7653f7fb1d13cc03e1cc7ea7f6b9aadd278b9c9375ff5f0636ed0d7a785"},
"bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"},
"castore": {:hex, :castore, "1.0.7", "b651241514e5f6956028147fe6637f7ac13802537e895a724f90bf3e36ddd1dd", [:mix], [], "hexpm", "da7785a4b0d2a021cd1292a60875a784b6caef71e76bf4917bdee1f390455cf5"},
"comparable": {:hex, :comparable, "1.0.0", "bb669e91cedd14ae9937053e5bcbc3c52bb2f22422611f43b6e38367d94a495f", [:mix], [{:typable, "~> 0.1", [hex: :typable, repo: "hexpm", optional: false]}], "hexpm", "277c11eeb1cd726e7cd41c6c199e7e52fa16ee6830b45ad4cdc62e51f62eb60c"},
"credo": {:hex, :credo, "1.7.7", "771445037228f763f9b2afd612b6aa2fd8e28432a95dbbc60d8e03ce71ba4446", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8bc87496c9aaacdc3f90f01b7b0582467b69b4bd2441fe8aae3109d843cc2f2e"},
"cubdb": {:hex, :cubdb, "2.0.2", "d4253885084dae37a8ff73887d232864eb38ecac962aa08543e686b0183a1d62", [:mix], [], "hexpm", "c99cc8f9e6c4deb98d16cca5ded1928edd22e48b4736b76e8a1a85367d7fe921"},
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
"dialyxir": {:hex, :dialyxir, "1.3.0", "fd1672f0922b7648ff9ce7b1b26fcf0ef56dda964a459892ad15f6b4410b5284", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "00b2a4bcd6aa8db9dcb0b38c1225b7277dca9bc370b6438715667071a304696f"},
"dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"},
"doctor": {:hex, :doctor, "0.21.0", "20ef89355c67778e206225fe74913e96141c4d001cb04efdeba1a2a9704f1ab5", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "a227831daa79784eb24cdeedfa403c46a4cb7d0eab0e31232ec654314447e4e0"},
"earmark": {:hex, :earmark, "1.4.39", "acdb2f02c536471029dbcc509fbd6b94b89f40ad7729fb3f68f4b6944843f01d", [:mix], [{:earmark_parser, "~> 1.4.33", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "156c9d8ec3cbeccdbf26216d8247bdeeacc8c76b4d9eee7554be2f1b623ea440"},
"earmark_parser": {:hex, :earmark_parser, "1.4.33", "3c3fd9673bb5dcc9edc28dd90f50c87ce506d1f71b70e3de69aa8154bc695d44", [:mix], [], "hexpm", "2d526833729b59b9fdb85785078697c72ac5e5066350663e5be6a1182da61b8f"},
"earmark": {:hex, :earmark, "1.4.46", "8c7287bd3137e99d26ae4643e5b7ef2129a260e3dcf41f251750cb4563c8fb81", [:mix], [], "hexpm", "798d86db3d79964e759ddc0c077d5eb254968ed426399fbf5a62de2b5ff8910a"},
"earmark_parser": {:hex, :earmark_parser, "1.4.39", "424642f8335b05bb9eb611aa1564c148a8ee35c9c8a8bba6e129d51a3e3c6769", [:mix], [], "hexpm", "06553a88d1f1846da9ef066b87b57c6f605552cfbe40d20bd8d59cc6bde41944"},
"ecto": {:hex, :ecto, "3.11.2", "e1d26be989db350a633667c5cda9c3d115ae779b66da567c68c80cfb26a8c9ee", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3c38bca2c6f8d8023f2145326cc8a80100c3ffe4dcbd9842ff867f7fc6156c65"},
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
"ex_check": {:hex, :ex_check, "0.15.0", "074b94c02de11c37bba1ca82ae5cc4926e6ccee862e57a485b6ba60fca2d8dc1", [:mix], [], "hexpm", "33848031a0c7e4209c3b4369ce154019788b5219956220c35ca5474299fb6a0e"},
"ex_doc": {:hex, :ex_doc, "0.30.4", "e8395c8e3c007321abb30a334f9f7c0858d80949af298302daf77553468c0c39", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "9a19f0c50ffaa02435668f5242f2b2a61d46b541ebf326884505dfd3dd7af5e4"},
"file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
"ets": {:hex, :ets, "0.9.0", "79c6a6c205436780486f72d84230c6cba2f8a9920456750ddd1e47389107d5fd", [:mix], [], "hexpm", "2861fdfb04bcaeff370f1a5904eec864f0a56dcfebe5921ea9aadf2a481c822b"},
"ex_check": {:hex, :ex_check, "0.16.0", "07615bef493c5b8d12d5119de3914274277299c6483989e52b0f6b8358a26b5f", [:mix], [], "hexpm", "4d809b72a18d405514dda4809257d8e665ae7cf37a7aee3be6b74a34dec310f5"},
"ex_doc": {:hex, :ex_doc, "0.34.1", "9751a0419bc15bc7580c73fde506b17b07f6402a1e5243be9e0f05a68c723368", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "d441f1a86a235f59088978eff870de2e815e290e44a8bd976fe5d64470a4c9d2"},
"faker": {:hex, :faker, "0.18.0", "943e479319a22ea4e8e39e8e076b81c02827d9302f3d32726c5bf82f430e6e14", [:mix], [], "hexpm", "bfbdd83958d78e2788e99ec9317c4816e651ad05e24cfd1196ce5db5b3e81797"},
"file_system": {:hex, :file_system, "1.0.0", "b689cc7dcee665f774de94b5a832e578bd7963c8e637ef940cd44327db7de2cd", [:mix], [], "hexpm", "6752092d66aec5a10e662aefeed8ddb9531d79db0bc145bb8c40325ca1d8536d"},
"finch": {:hex, :finch, "0.18.0", "944ac7d34d0bd2ac8998f79f7a811b21d87d911e77a786bc5810adb75632ada4", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6 or ~> 1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "69f5045b042e531e53edc2574f15e25e735b522c37e2ddb766e15b979e03aa65"},
"git_cli": {:hex, :git_cli, "0.3.0", "a5422f9b95c99483385b976f5d43f7e8233283a47cda13533d7c16131cb14df5", [:mix], [], "hexpm", "78cb952f4c86a41f4d3511f1d3ecb28edb268e3a7df278de2faa1bd4672eaf9b"},
"git_ops": {:hex, :git_ops, "2.6.0", "e0791ee1cf5db03f2c61b7ebd70e2e95cba2bb9b9793011f26609f22c0900087", [:mix], [{:git_cli, "~> 0.2", [hex: :git_cli, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "b98fca849b18aaf490f4ac7d1dd8c6c469b0cc3e6632562d366cab095e666ffe"},
"git_ops": {:hex, :git_ops, "2.6.1", "cc7799a68c26cf814d6d1a5121415b4f5bf813de200908f930b27a2f1fe9dad5", [:mix], [{:git_cli, "~> 0.2", [hex: :git_cli, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "ce62d07e41fe993ec22c35d5edb11cf333a21ddaead6f5d9868fcb607d42039e"},
"glob_ex": {:hex, :glob_ex, "0.1.7", "eae6b6377147fb712ac45b360e6dbba00346689a87f996672fe07e97d70597b1", [:mix], [], "hexpm", "decc1c21c0c73df3c9c994412716345c1692477b9470e337f628a7e08da0da6a"},
"hpax": {:hex, :hpax, "0.2.0", "5a58219adcb75977b2edce5eb22051de9362f08236220c9e859a47111c194ff5", [:mix], [], "hexpm", "bea06558cdae85bed075e6c036993d43cd54d447f76d8190a8db0dc5893fa2f1"},
"igniter": {:hex, :igniter, "0.2.4", "71fc8a473c07de9bacfaa26862427d695b49d263c2f484a256fdb38fcc3471cc", [:mix], [{:glob_ex, "~> 0.1.7", [hex: :glob_ex, repo: "hexpm", optional: false]}, {:nimble_options, "~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:req, "~> 0.4", [hex: :req, repo: "hexpm", optional: false]}, {:rewrite, "~> 0.9", [hex: :rewrite, repo: "hexpm", optional: false]}, {:sourceror, "~> 1.4", [hex: :sourceror, repo: "hexpm", optional: false]}, {:spitfire, ">= 0.1.3 and < 1.0.0-0", [hex: :spitfire, repo: "hexpm", optional: false]}], "hexpm", "361b9bc44f6e36161076bde02ed75287280b5aa2c7d0ad9bde83d521cc875115"},
"jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"},
"makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"},
"makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"},
"makeup_erlang": {:hex, :makeup_erlang, "0.1.2", "ad87296a092a46e03b7e9b0be7631ddcf64c790fa68a9ef5323b6cbb36affc72", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f3f5a1ca93ce6e092d92b6d9c049bcda58a3b617a8d888f8e7231c85630e8108"},
"mix_audit": {:hex, :mix_audit, "2.1.1", "653aa6d8f291fc4b017aa82bdb79a4017903902ebba57960ef199cbbc8c008a1", [:make, :mix], [{:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:yaml_elixir, "~> 2.9", [hex: :yaml_elixir, repo: "hexpm", optional: false]}], "hexpm", "541990c3ab3a7bb8c4aaa2ce2732a4ae160ad6237e5dcd5ad1564f4f85354db1"},
"nimble_parsec": {:hex, :nimble_parsec, "1.3.1", "2c54013ecf170e249e9291ed0a62e5832f70a476c61da16f6aac6dca0189f2af", [:mix], [], "hexpm", "2682e3c0b2eb58d90c6375fc0cc30bc7be06f365bf72608804fb9cffa5e1b167"},
"libgraph": {:hex, :libgraph, "0.16.0", "3936f3eca6ef826e08880230f806bfea13193e49bf153f93edcf0239d4fd1d07", [:mix], [], "hexpm", "41ca92240e8a4138c30a7e06466acc709b0cbb795c643e9e17174a178982d6bf"},
"makeup": {:hex, :makeup, "1.1.2", "9ba8837913bdf757787e71c1581c21f9d2455f4dd04cfca785c70bbfff1a76a3", [], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cce1566b81fbcbd21eca8ffe808f33b221f9eee2cbc7a1706fc3da9ff18e6cac"},
"makeup_elixir": {:hex, :makeup_elixir, "0.16.2", "627e84b8e8bf22e60a2579dad15067c755531fea049ae26ef1020cad58fe9578", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "41193978704763f6bbe6cc2758b84909e62984c7752b3784bd3c218bb341706b"},
"makeup_erlang": {:hex, :makeup_erlang, "1.0.0", "6f0eff9c9c489f26b69b61440bf1b238d95badae49adac77973cbacae87e3c2e", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "ea7a9307de9d1548d2a72d299058d1fd2339e3d398560a0e46c27dab4891e4d2"},
"mime": {:hex, :mime, "2.0.5", "dc34c8efd439abe6ae0343edbb8556f4d63f178594894720607772a041b04b02", [:mix], [], "hexpm", "da0d64a365c45bc9935cc5c8a7fc5e49a0e0f9932a761c55d6c52b142780a05c"},
"mint": {:hex, :mint, "1.6.1", "065e8a5bc9bbd46a41099dfea3e0656436c5cbcb6e741c80bd2bad5cd872446f", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "4fc518dcc191d02f433393a72a7ba3f6f94b101d094cb6bf532ea54c89423780"},
"mix_audit": {:hex, :mix_audit, "2.1.3", "c70983d5cab5dca923f9a6efe559abfb4ec3f8e87762f02bab00fa4106d17eda", [:make, :mix], [{:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:yaml_elixir, "~> 2.9", [hex: :yaml_elixir, repo: "hexpm", optional: false]}], "hexpm", "8c3987100b23099aea2f2df0af4d296701efd031affb08d0746b2be9e35988ec"},
"nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"},
"nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"},
"nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"},
"reactor": {:hex, :reactor, "0.8.4", "344d02ba4a0010763851f4e4aa0ff190ebe7e392e3c27c6cd143dde077b986e7", [], [{:libgraph, "~> 0.16", [hex: :libgraph, repo: "hexpm", optional: false]}, {:spark, "~> 2.0", [hex: :spark, repo: "hexpm", optional: false]}, {:splode, "~> 0.2", [hex: :splode, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.2", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "49c1fd3c786603cec8140ce941c41c7ea72cc4411860ccdee9876c4ca2204f81"},
"recase": {:hex, :recase, "0.8.0", "ec9500abee5d493d41e3cbfd7d51a4e10957a164570be0c805d5c6661b8cdbae", [:mix], [], "hexpm", "0d4b67b81e7897af77552bd1e6d6148717a4b45ec5c7b014a48b0ba9a28946b5"},
"req": {:hex, :req, "0.5.1", "90584216d064389a4ff2d4279fe2c11ff6c812ab00fa01a9fb9d15457f65ba70", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 1.6 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "7ea96a1a95388eb0fefa92d89466cdfedba24032794e5c1147d78ec90db7edca"},
"rewrite": {:hex, :rewrite, "0.10.5", "6afadeae0b9d843b27ac6225e88e165884875e0aed333ef4ad3bf36f9c101bed", [], [{:glob_ex, "~> 0.1", [hex: :glob_ex, repo: "hexpm", optional: false]}, {:sourceror, "~> 1.0", [hex: :sourceror, repo: "hexpm", optional: false]}], "hexpm", "51cc347a4269ad3a1e7a2c4122dbac9198302b082f5615964358b4635ebf3d4f"},
"smokestack": {:hex, :smokestack, "0.9.1", "5a7a87bfdf749c9f7053184ab45d7dd4847a63fd0c1596333fa6b3d7c608b687", [:mix], [{:ash, "~> 3.0", [hex: :ash, repo: "hexpm", optional: false]}, {:recase, "~> 0.8", [hex: :recase, repo: "hexpm", optional: false]}, {:spark, "~> 2.1", [hex: :spark, repo: "hexpm", optional: false]}], "hexpm", "2aa540da560b8cbb6e70385c96857db4e78334f63f0dfecb9f64ff3b8f444f07"},
"sourceror": {:hex, :sourceror, "1.4.0", "be87319b1579191e25464005d465713079b3fd7124a3938a1e6cf4def39735a9", [:mix], [], "hexpm", "16751ca55e3895f2228938b703ad399b0b27acfe288eff6c0e629ed3e6ec0358"},
"spark": {:hex, :spark, "2.2.6", "4f160462f45c0be2bccdc4700e7ffc6b2e97b4e38f57eed2349bc9dab4aaa66c", [:mix], [{:igniter, "~> 0.2", [hex: :igniter, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:sourceror, "~> 1.2", [hex: :sourceror, repo: "hexpm", optional: false]}], "hexpm", "1e0e012978be808232a502a116d4b99b5059ab3760453438b155ac048f82ce20"},
"spitfire": {:hex, :spitfire, "0.1.3", "7ea0f544005dfbe48e615ed90250c9a271bfe126914012023fd5e4b6b82b7ec7", [:mix], [], "hexpm", "d53b5107bcff526a05c5bb54c95e77b36834550affd5830c9f58760e8c543657"},
"splode": {:hex, :splode, "0.2.4", "71046334c39605095ca4bed5d008372e56454060997da14f9868534c17b84b53", [:mix], [], "hexpm", "ca3b95f0d8d4b482b5357954fec857abd0fa3ea509d623334c1328e7382044c2"},
"stream_data": {:hex, :stream_data, "1.1.1", "fd515ca95619cca83ba08b20f5e814aaf1e5ebff114659dc9731f966c9226246", [:mix], [], "hexpm", "45d0cd46bd06738463fd53f22b70042dbb58c384bb99ef4e7576e7bb7d3b8c8c"},
"telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"},
"typable": {:hex, :typable, "0.3.0", "0431e121d124cd26f312123e313d2689b9a5322b15add65d424c07779eaa3ca1", [:mix], [], "hexpm", "880a0797752da1a4c508ac48f94711e04c86156f498065a83d160eef945858f8"},
"yamerl": {:hex, :yamerl, "0.10.0", "4ff81fee2f1f6a46f1700c0d880b24d193ddb74bd14ef42cb0bcf46e81ef2f8e", [:rebar3], [], "hexpm", "346adb2963f1051dc837a2364e4acf6eb7d80097c0f53cbdc3046ec8ec4b4e6e"},
"yaml_elixir": {:hex, :yaml_elixir, "2.9.0", "9a256da867b37b8d2c1ffd5d9de373a4fda77a32a45b452f1708508ba7bbcb53", [:mix], [{:yamerl, "~> 0.10", [hex: :yamerl, repo: "hexpm", optional: false]}], "hexpm", "0cb0e7d4c56f5e99a6253ed1a670ed0e39c13fc45a6da054033928607ac08dfc"},
}

6
renovate.json Normal file
View file

@ -0,0 +1,6 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"local>renovate/renovate"
]
}

View file

@ -0,0 +1,233 @@
defmodule AshCubDB.DataLayerTest do
@moduledoc false
use ExUnit.Case, async: true
alias Ash.{Error.Query.NotFound, Query}
alias AshCubDB.Info
alias Support.{Author, Domain, Post}
import Support.Factory
require Query
setup do
on_exit(fn ->
AshCubDB.clear(Post)
AshCubDB.clear(Author)
end)
end
describe "transformer" do
test "it correctly infers the data directory" do
assert {:ok, path} = Info.cubdb_directory(Post)
assert path =~ ~r/ash_cubdb\/priv\/cubdb\/post$/
end
end
describe "create" do
test "it creates a record" do
params = params!(Post)
assert {:ok, post} = Post.create(params)
assert [{key, value}] = dump(Post)
assert key == {Ecto.UUID.dump!(post.id)}
assert value == {nil, post.body, post.title}
end
test "it honours context multitenancy" do
insert!(Author, count: 3)
assert {:ok, author} =
Author
|> params!()
|> Author.create(tenant: :tenant)
keys =
dump(Author)
|> Enum.map(&elem(&1, 0))
assert {:tenant, {Ecto.UUID.dump!(author.id)}} in keys
assert Enum.count(keys, &(elem(&1, 0) == nil)) == 3
end
test "it doesn't allow IDs to conflict" do
uuid = Ash.UUID.generate()
params =
params!(Post)
|> Map.put(:id, uuid)
assert {:ok, %{id: ^uuid}} = Post.create(params)
assert {:error, invalid} = Post.create(params)
assert Exception.message(invalid) =~ "id: has already been taken"
end
end
describe "upsert" do
test "it creates a record" do
params = params!(Post)
assert {:ok, post} = Post.create(params, upsert?: true)
assert [{key, value}] = dump(Post)
assert key == {Ecto.UUID.dump!(post.id)}
assert value == {nil, post.body, post.title}
end
test "it updates an existing record" do
params = params!(Post)
assert {:ok, post} = Post.create(params)
params =
params
|> Map.put(:title, Faker.Lorem.sentence())
|> Map.put(:id, post.id)
assert {:ok, updated} = Post.create(params, upsert?: true)
assert updated.id == post.id
assert updated.title == params[:title]
assert updated.title != post.title
end
end
describe "read" do
test "non-tenant scoped read" do
expected = insert!(Post, count: 3)
assert {:ok, actual} = Post.read()
assert Enum.all?(actual, &is_struct(&1, Post))
for post <- expected do
assert post.id in Enum.map(actual, & &1.id)
end
end
test "tenant scoped read" do
insert!(Author, count: 3)
expected =
Post
|> params!(count: 3)
|> Enum.map(&Post.create!(&1, tenant: :wat))
assert {:ok, actual} = Post.read(tenant: :wat)
expected_ids = expected |> Enum.map(& &1.id) |> Enum.sort()
actual_ids = actual |> Enum.map(& &1.id) |> Enum.sort()
assert expected_ids == actual_ids
end
test "filters work" do
expected = insert!(Author, attrs: %{name: "Marty McFly"})
insert!(Author, count: 3)
[actual] =
Author
|> Query.filter(name: "Marty McFly")
|> Ash.read!()
assert expected.id == actual.id
end
test "sorting" do
insert!(Author, attrs: %{name: "Alice"})
insert!(Author, attrs: %{name: "Mallory"})
insert!(Author, attrs: %{name: "Bob"})
sorted =
Author
|> Query.sort(name: :desc)
|> Ash.read!()
assert Enum.map(sorted, &to_string(&1.name)) == ["Mallory", "Bob", "Alice"]
end
test "limit" do
insert!(Author, count: 3)
assert [_] =
Author
|> Query.limit(1)
|> Ash.read!()
end
test "offset" do
insert(Author, count: 3)
assert [_, _] =
Author
|> Query.offset(1)
|> Ash.read!()
end
test "distinct" do
author = insert!(Author)
insert!(Author, count: 3, attrs: %{name: author.name})
assert [selected] =
Author
|> Query.distinct(:name)
|> Ash.read!()
assert selected.name == author.name
end
test "distinct sort" do
post = insert!(Post, attrs: %{body: "Alice is cool"})
insert!(Post, attrs: %{title: post.title, body: "Bob is cool"})
insert!(Post, attrs: %{title: post.title, body: "Mallory is cool"})
assert [selected] =
Post
|> Query.distinct(:title)
|> Query.distinct_sort(body: :desc)
|> Ash.read!()
assert selected.title == post.title
assert selected.body == "Mallory is cool"
end
end
describe "update" do
test "records can be updated" do
post = insert!(Post)
params = Post |> params!() |> Map.take([:title])
assert {:ok, updated} = Post.update(post, params)
assert updated.id == post.id
assert updated.title == params.title
assert {:ok, updated} = Post.get(post.id)
assert updated.id == post.id
assert updated.title == params.title
end
end
describe "destroy" do
test "records can be destroyed" do
post = insert!(Post)
assert :ok = Post.destroy(post)
assert {:error, %NotFound{}} = Post.get(post.id)
end
end
describe "calculations" do
test "can be loaded" do
post = insert!(Post)
{:ok, post} = Post.get(post.id, load: :all_text)
assert post.all_text == post.title <> post.body
end
end
defp dump(resource) do
resource
|> via()
|> CubDB.select()
|> Enum.reject(&(elem(&1, 0) == :__metadata__))
|> Enum.to_list()
end
defp via(resource), do: {:via, Registry, {AshCubDB.Registry, resource}}
end

View file

@ -1,8 +1,4 @@
defmodule AshCubDBTest do
use ExUnit.Case
doctest AshCubDB
test "greets the world" do
assert AshCubDB.hello() == :world
end
end

33
test/support/author.ex Normal file
View file

@ -0,0 +1,33 @@
defmodule Support.Author do
@moduledoc false
use Ash.Resource, data_layer: AshCubDB.DataLayer, domain: Support.Domain
cubdb do
otp_app :ash_cubdb
end
multitenancy do
strategy :context
global? true
end
attributes do
uuid_primary_key :id
attribute :name, :ci_string, public?: true
end
relationships do
has_many :posts, Support.Post
end
actions do
default_accept :*
defaults ~w[create read]a
end
code_interface do
define :create
define :read
end
end

9
test/support/domain.ex Normal file
View file

@ -0,0 +1,9 @@
defmodule Support.Domain do
@moduledoc false
use Ash.Domain
resources do
resource Support.Author
resource Support.Post
end
end

13
test/support/factory.ex Normal file
View file

@ -0,0 +1,13 @@
defmodule Support.Factory do
@moduledoc false
use Smokestack
factory Support.Post do
attribute(:title, &Faker.Lorem.sentence/0)
attribute(:body, &Faker.Lorem.paragraph/0)
end
factory Support.Author do
attribute(:name, &Faker.Person.name/0)
end
end

35
test/support/post.ex Normal file
View file

@ -0,0 +1,35 @@
defmodule Support.Post do
@moduledoc false
use Ash.Resource, data_layer: AshCubDB.DataLayer, domain: Support.Domain
cubdb do
otp_app :ash_cubdb
end
attributes do
uuid_primary_key :id, writable?: true, public?: true
attribute :title, :string, public?: true
attribute :body, :string, public?: true
end
actions do
default_accept [:id, :title, :body]
defaults ~w[create read update destroy]a
end
calculations do
calculate :all_text, :string, expr(title <> body)
end
relationships do
belongs_to :author, Support.Author
end
code_interface do
define :create
define :read
define :update
define :get, action: :read, get_by: [:id]
define :destroy
end
end