mirror of
https://github.com/team-alembic/ash_gen_server.git
synced 2024-09-19 21:02:53 +12:00
feat: first pass at a genserver backed Ash datalayer.
This commit is contained in:
commit
6683251713
40 changed files with 1779 additions and 0 deletions
85
.devcontainer/Dockerfile
Normal file
85
.devcontainer/Dockerfile
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
FROM ubuntu:latest
|
||||||
|
|
||||||
|
ENV DEBIAN_FRONTEND="noninteractive"
|
||||||
|
|
||||||
|
# Install basic dependencies
|
||||||
|
RUN apt-get -q -y update && \
|
||||||
|
apt-get -q -y --no-install-recommends install autoconf automake \
|
||||||
|
bash build-essential bzip2 ca-certificates curl dpkg-dev file \
|
||||||
|
g++ gcc git-core imagemagick libbz2-dev libc6-dev libdb-dev libevent-dev \
|
||||||
|
libffi-dev libgdbm-dev libglib2.0-dev libgmp-dev libjpeg-dev libkrb5-dev \
|
||||||
|
liblzma-dev libmagickcore-dev libmagickwand-dev libmaxminddb-dev \
|
||||||
|
libncurses-dev libncurses5-dev libncursesw5-dev libpng-dev libpq-dev \
|
||||||
|
libreadline-dev libsctp-dev libsqlite3-dev libssl-dev libtool libwebp-dev \
|
||||||
|
libxml2-dev libxslt-dev libyaml-dev locales make make mercurial patch python3 \
|
||||||
|
unixodbc-dev unzip wget xz-utils zlib1g-dev zsh gnupg inotify-tools less \
|
||||||
|
postgresql-client ssh direnv && apt-get -q -y clean
|
||||||
|
|
||||||
|
RUN locale-gen en_AU.UTF-8
|
||||||
|
ENV LANG en_AU.UTF-8
|
||||||
|
ENV LANGUAGE en_AU:en
|
||||||
|
ENV LC_ALL en_AU.UTF-8
|
||||||
|
|
||||||
|
ARG USERNAME=vscode
|
||||||
|
ARG USER_UID=1000
|
||||||
|
ARG USER_GID=$USER_UID
|
||||||
|
RUN groupadd --gid ${USER_GID} ${USERNAME}
|
||||||
|
RUN useradd --shell /usr/bin/zsh --uid ${USER_UID} --gid ${USER_GID} -m ${USERNAME}
|
||||||
|
RUN mkdir /workspace && chown ${USERNAME}:${USERNAME} /workspace
|
||||||
|
RUN touch /entrypoint.sh && chown ${USERNAME}:${USERNAME} /entrypoint.sh
|
||||||
|
RUN mkdir -p /var/tmp/history && chown -R ${USERNAME}:${USERNAME} /var/tmp/history
|
||||||
|
RUN mkdir /storage && chown ${USERNAME}:${USERNAME} /storage
|
||||||
|
|
||||||
|
USER ${USERNAME}
|
||||||
|
ENV HOME=/home/${USERNAME}
|
||||||
|
|
||||||
|
WORKDIR $HOME
|
||||||
|
|
||||||
|
# Install oh-my-zsh
|
||||||
|
RUN sh -c "$(curl -fsSL https://raw.github.com/ohmyzsh/ohmyzsh/master/tools/install.sh)"
|
||||||
|
|
||||||
|
RUN echo '\neval "$(direnv hook zsh)"' >> ~/.zshrc
|
||||||
|
|
||||||
|
# Install ASDF
|
||||||
|
ARG ASDF_VERSION=0.10.2
|
||||||
|
RUN git clone https://github.com/asdf-vm/asdf.git ~/.asdf --branch v${ASDF_VERSION} && \
|
||||||
|
echo '\nsource $HOME/.asdf/asdf.sh' >> ~/.bashrc && \
|
||||||
|
echo '\nsource $HOME/.asdf/asdf.sh' >> ~/.zshrc
|
||||||
|
|
||||||
|
WORKDIR /workspace/
|
||||||
|
RUN mkdir _build deps .elixir_ls
|
||||||
|
|
||||||
|
# Install all the tools specified in the tool versions file.
|
||||||
|
COPY .tool-versions /workspace/
|
||||||
|
RUN /bin/bash -c 'source ~/.asdf/asdf.sh && \
|
||||||
|
cat .tool-versions | cut -d \ -f 1 | xargs -n 1 asdf plugin add && \
|
||||||
|
asdf install && \
|
||||||
|
cat .tool-versions | xargs -n 2 asdf global'
|
||||||
|
|
||||||
|
# Elixir and Erlang setup (we just assume that Elixir was in the .tool-versions file)
|
||||||
|
ARG HEX_API_KEY
|
||||||
|
RUN /bin/bash -c 'source ~/.asdf/asdf.sh && \
|
||||||
|
mix local.rebar --force && \
|
||||||
|
mix local.hex --force'
|
||||||
|
ENV ERL_AFLAGS="-kernel shell_history enabled -kernel shell_history_path '\"/var/tmp/history/erlang.history\"'"
|
||||||
|
|
||||||
|
# Generate an entrypoint.sh
|
||||||
|
RUN echo '#!/bin/bash' > /entrypoint.sh && \
|
||||||
|
echo 'source ~/.asdf/asdf.sh' >> /entrypoint.sh && \
|
||||||
|
echo 'eval "$(direnv hook bash)"' >> /entrypoint.sh && \
|
||||||
|
echo 'exec "$@"' >> /entrypoint.sh && \
|
||||||
|
chmod 755 /entrypoint.sh
|
||||||
|
|
||||||
|
ENTRYPOINT [ "/entrypoint.sh" ]
|
||||||
|
|
||||||
|
# Prodigious use of asterisk to allow for files which may not exist.
|
||||||
|
COPY .env* .formatter.exs .tool-versions* bin* config* lib* priv* rel* test* CHANGELOG.md* mix.* /workspace/
|
||||||
|
|
||||||
|
RUN /bin/bash -c 'if [ -e .envrc ]; then /usr/bin/direnv allow; fi'
|
||||||
|
|
||||||
|
# Run mix deps.get
|
||||||
|
RUN /bin/bash -c 'source ~/.asdf/asdf.sh && \
|
||||||
|
mix deps.get'
|
||||||
|
|
||||||
|
CMD ["zsh"]
|
||||||
|
|
37
.devcontainer/devcontainer.json
Normal file
37
.devcontainer/devcontainer.json
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||||
|
// https://github.com/microsoft/vscode-dev-containers/tree/v0.241.1/containers/elixir-phoenix-postgres
|
||||||
|
{
|
||||||
|
"name": "ASDF, Elixir and Postgres",
|
||||||
|
"dockerComposeFile": "docker-compose.yml",
|
||||||
|
"service": "app",
|
||||||
|
"workspaceFolder": "/workspace",
|
||||||
|
// Configure tool-specific properties.
|
||||||
|
"customizations": {
|
||||||
|
// Configure properties specific to VS Code.
|
||||||
|
"vscode": {
|
||||||
|
// Add the IDs of extensions you want installed when the container is created.
|
||||||
|
"extensions": [
|
||||||
|
"jakebecker.elixir-ls",
|
||||||
|
"msaraiva.surface",
|
||||||
|
"ue.alphabetical-sorter",
|
||||||
|
"wmaurer.change-case",
|
||||||
|
"Rubymaniac.vscode-direnv",
|
||||||
|
"RoyalMist.vscode-eex-format",
|
||||||
|
"iampeterbanjo.elixirlinter",
|
||||||
|
"pgourlain.erlang",
|
||||||
|
"szTheory.erlang-formatter"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||||
|
// This can be used to network with other containers or with the host.
|
||||||
|
"forwardPorts": [
|
||||||
|
4000,
|
||||||
|
4001,
|
||||||
|
5432
|
||||||
|
],
|
||||||
|
// Use 'postCreateCommand' to run commands after the container is created.
|
||||||
|
// "postCreateCommand": "mix deps.get"
|
||||||
|
// Uncomment to connect as a non-root user. See https://aka.ms/vscode-remote/containers/non-root.
|
||||||
|
"remoteUser": "vscode"
|
||||||
|
}
|
39
.devcontainer/docker-compose.yml
Normal file
39
.devcontainer/docker-compose.yml
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
apt-cache: {}
|
||||||
|
history: {}
|
||||||
|
app-deps: {}
|
||||||
|
app-build: {}
|
||||||
|
app-elixir-ls: {}
|
||||||
|
app-asdf: {}
|
||||||
|
app-storage: {}
|
||||||
|
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
environment:
|
||||||
|
LOGGER_LEVEL: 1
|
||||||
|
HISTFILE: /var/tmp/history/shell.history
|
||||||
|
GIT_AUTHOR_EMAIL:
|
||||||
|
GIT_COMMITTER_EMAIL:
|
||||||
|
PORT: 400
|
||||||
|
build:
|
||||||
|
context: ../
|
||||||
|
dockerfile: .devcontainer/Dockerfile
|
||||||
|
args:
|
||||||
|
HEX_API_KEY:
|
||||||
|
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
- ..:/workspace:cached
|
||||||
|
- "apt-cache:/var/cache/apt:rw"
|
||||||
|
- "history:/var/tmp/history:rw"
|
||||||
|
- "app-asdf:/home/vscode/.asdf:rw"
|
||||||
|
- "app-deps:/workspace/deps:rw"
|
||||||
|
- "app-build:/workspace/_build:rw"
|
||||||
|
- "app-elixir-ls:/workspace/.elixir_ls:rw"
|
||||||
|
- "app-storage:/storage:rw"
|
||||||
|
# Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function.
|
||||||
|
|
||||||
|
# Overrides default command so things don't shut down after the process ends.
|
||||||
|
command: sleep infinity
|
17
.doctor.exs
Normal file
17
.doctor.exs
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
%Doctor.Config{
|
||||||
|
ignore_modules: [
|
||||||
|
Inspect.TimeTravel.Character,
|
||||||
|
Inspect.TimeTravel.Machine
|
||||||
|
],
|
||||||
|
ignore_paths: [],
|
||||||
|
min_module_doc_coverage: 40,
|
||||||
|
min_module_spec_coverage: 0,
|
||||||
|
min_overall_doc_coverage: 50,
|
||||||
|
min_overall_spec_coverage: 0,
|
||||||
|
moduledoc_required: true,
|
||||||
|
exception_moduledoc_required: true,
|
||||||
|
raise: false,
|
||||||
|
reporter: Doctor.Reporters.Full,
|
||||||
|
struct_type_spec_required: true,
|
||||||
|
umbrella: false
|
||||||
|
}
|
8
.formatter.exs
Normal file
8
.formatter.exs
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
[
|
||||||
|
import_deps: [:ash],
|
||||||
|
inputs: [
|
||||||
|
"*.{ex,exs}",
|
||||||
|
"{config,lib,test}/**/*.{ex,exs}"
|
||||||
|
],
|
||||||
|
plugins: [Ash.ResourceFormatter]
|
||||||
|
]
|
9
.github/dependabot.yml
vendored
Normal file
9
.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: docker
|
||||||
|
directory: "/.devcontainer"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
- package-ecosystem: mix
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
36
.github/workflows/mix_credo.yml
vendored
Normal file
36
.github/workflows/mix_credo.yml
vendored
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
name: mix credo --strict
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
MIX_ENV: test
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
elixir: [1.13.4]
|
||||||
|
otp: [25.0.3]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: erlef/setup-beam@v1
|
||||||
|
with:
|
||||||
|
elixir-version: ${{ matrix.elixir }}
|
||||||
|
otp-version: ${{ matrix.otp }}
|
||||||
|
- name: Retrieve Mix Dependencies Cache
|
||||||
|
uses: actions/cache@v1
|
||||||
|
id: mix-cache
|
||||||
|
with:
|
||||||
|
path: deps
|
||||||
|
key: ${{ runner.os }}-${{ matrix.otp }}-${{ matrix.elixir }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}
|
||||||
|
- name: Install Mix Dependencies
|
||||||
|
if: steps.mix-cache.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
mix local.rebar --force
|
||||||
|
mix local.hex --force
|
||||||
|
mix deps.get
|
||||||
|
- run: mix credo --strict
|
36
.github/workflows/mix_doctor.yml
vendored
Normal file
36
.github/workflows/mix_doctor.yml
vendored
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
name: mix doctor
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
MIX_ENV: test
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
elixir: [1.13.4]
|
||||||
|
otp: [25.0.3]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: erlef/setup-beam@v1
|
||||||
|
with:
|
||||||
|
elixir-version: ${{ matrix.elixir }}
|
||||||
|
otp-version: ${{ matrix.otp }}
|
||||||
|
- name: Retrieve Mix Dependencies Cache
|
||||||
|
uses: actions/cache@v1
|
||||||
|
id: mix-cache
|
||||||
|
with:
|
||||||
|
path: deps
|
||||||
|
key: ${{ runner.os }}-${{ matrix.otp }}-${{ matrix.elixir }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}
|
||||||
|
- name: Install Mix Dependencies
|
||||||
|
if: steps.mix-cache.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
mix local.rebar --force
|
||||||
|
mix local.hex --force
|
||||||
|
mix deps.get
|
||||||
|
- run: mix doctor --full --raise
|
37
.github/workflows/mix_format.yml
vendored
Normal file
37
.github/workflows/mix_format.yml
vendored
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
name: mix format --check-formatted
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
MIX_ENV: test
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
elixir: [1.13.4]
|
||||||
|
otp: [25.0.3]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: erlef/setup-beam@v1
|
||||||
|
with:
|
||||||
|
elixir-version: ${{ matrix.elixir }}
|
||||||
|
otp-version: ${{ matrix.otp }}
|
||||||
|
- name: Retrieve Mix Dependencies Cache
|
||||||
|
uses: actions/cache@v1
|
||||||
|
id: mix-cache
|
||||||
|
with:
|
||||||
|
path: deps
|
||||||
|
key: ${{ runner.os }}-${{ matrix.otp }}-${{ matrix.elixir }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}
|
||||||
|
- name: Install Mix Dependencies
|
||||||
|
if: steps.mix-cache.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
mix local.rebar --force
|
||||||
|
mix local.hex --force
|
||||||
|
mix deps.get
|
||||||
|
- name: Check Formatting
|
||||||
|
run: mix format --check-formatted
|
82
.github/workflows/mix_git_ops.yml
vendored
Normal file
82
.github/workflows/mix_git_ops.yml
vendored
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
name: mix git_ops.release
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
maybe_release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: erlef/setup-beam@v1
|
||||||
|
with:
|
||||||
|
elixir-version: "1.13.4"
|
||||||
|
otp-version: "25.0.3"
|
||||||
|
- name: Retrieve Mix Dependencies Cache
|
||||||
|
uses: actions/cache@v1
|
||||||
|
id: mix-cache
|
||||||
|
with:
|
||||||
|
path: deps
|
||||||
|
key: ${{ runner.os }}-${{ matrix.otp }}-${{ matrix.elixir }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}
|
||||||
|
- name: Install Mix Dependencies
|
||||||
|
if: steps.mix-cache.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
mix local.rebar --force
|
||||||
|
mix local.hex --force
|
||||||
|
mix deps.get
|
||||||
|
- name: Configure git author
|
||||||
|
run: git config user.email '25500012+github-actions[bot]@users.noreply.github.com' ; git config user.name 'github-actions'
|
||||||
|
- name: Retrieving the current app version
|
||||||
|
id: original-app-info
|
||||||
|
run: MIX_ENV=test mix git_ops.project_info --format github-actions
|
||||||
|
- name: Run mix git_ops.release
|
||||||
|
run: MIX_ENV=test mix git_ops.release --yes || true
|
||||||
|
- name: Checking for new app version
|
||||||
|
id: app-info
|
||||||
|
run: MIX_ENV=test mix git_ops.project_info --format github-actions
|
||||||
|
- name: Building hex package
|
||||||
|
if: ${{ steps.original-app-info.outputs.app_version != steps.app-info.outputs.app_version }}
|
||||||
|
run: mix hex.build -o ${{ steps.app-info.outputs.app_name }}-${{ steps.app-info.outputs.app_version }}.tar
|
||||||
|
- name: Building hex docs
|
||||||
|
if: ${{ steps.original-app-info.outputs.app_version != steps.app-info.outputs.app_version }}
|
||||||
|
run: MIX_ENV=test mix docs && tar zcvf ${{ steps.app-info.outputs.app_name }}-${{ steps.app-info.outputs.app_version }}-docs.tar.gz doc/
|
||||||
|
- name: Pushing new tag
|
||||||
|
if: ${{ steps.original-app-info.outputs.app_version != steps.app-info.outputs.app_version }}
|
||||||
|
run: git push "https://${{ github.actor }}:${{ github.token }}@github.com/${{ github.repository }}" HEAD:${{ github.ref }} refs/tags/v${{ steps.app-info.outputs.app_version }}
|
||||||
|
- name: Creating new Github release
|
||||||
|
if: ${{ steps.original-app-info.outputs.app_version != steps.app-info.outputs.app_version }}
|
||||||
|
id: create-release
|
||||||
|
uses: actions/create-release@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
|
with:
|
||||||
|
tag_name: v${{ steps.app-info.outputs.app_version }}
|
||||||
|
release_name: Release ${{ steps.app-info.outputs.app_name }} ${{ steps.app-info.outputs.app_version }}
|
||||||
|
body: Autogenerated by git_ops release.
|
||||||
|
- name: Uploading hex package as release artifact
|
||||||
|
if: ${{ steps.original-app-info.outputs.app_version != steps.app-info.outputs.app_version }}
|
||||||
|
id: upload-package
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||||
|
asset_path: ${{ steps.app-info.outputs.app_name }}-${{ steps.app-info.outputs.app_version }}.tar
|
||||||
|
asset_name: ${{ steps.app-info.outputs.app_name }}-${{ steps.app-info.outputs.app_version }}.tar
|
||||||
|
asset_content_type: application/x-tar
|
||||||
|
- name: Uploading documentation as release artifact
|
||||||
|
if: ${{ steps.original-app-info.outputs.app_version != steps.app-info.outputs.app_version }}
|
||||||
|
id: upload-docs
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||||
|
asset_path: ${{ steps.app-info.outputs.app_name }}-${{ steps.app-info.outputs.app_version }}-docs.tar.gz
|
||||||
|
asset_name: ${{ steps.app-info.outputs.app_name }}-${{ steps.app-info.outputs.app_version }}-docs.tar.gz
|
||||||
|
asset_content_type: application/gzip
|
||||||
|
# - name: Publishing hex package
|
||||||
|
# if: ${{ steps.original-app-info.outputs.app_version != steps.app-info.outputs.app_version }}
|
||||||
|
# run: mix hex.publish --yes
|
35
.github/workflows/mix_hex_audit.yml
vendored
Normal file
35
.github/workflows/mix_hex_audit.yml
vendored
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
name: mix hex.audit
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
MIX_ENV: test
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
elixir: [1.13.4]
|
||||||
|
otp: [25.0.3]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: erlef/setup-beam@v1
|
||||||
|
with:
|
||||||
|
elixir-version: ${{ matrix.elixir }}
|
||||||
|
otp-version: ${{ matrix.otp }}
|
||||||
|
- name: Retrieve Mix Dependencies Cache
|
||||||
|
uses: actions/cache@v1
|
||||||
|
id: mix-cache
|
||||||
|
with:
|
||||||
|
path: deps
|
||||||
|
key: ${{ runner.os }}-${{ matrix.otp }}-${{ matrix.elixir }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}
|
||||||
|
- name: Install Mix Dependencies
|
||||||
|
if: steps.mix-cache.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
mix local.rebar --force
|
||||||
|
mix local.hex --force
|
||||||
|
mix deps.get
|
||||||
|
- run: mix hex.audit
|
54
.github/workflows/mix_test.yml
vendored
Normal file
54
.github/workflows/mix_test.yml
vendored
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
name: mix test
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
MIX_ENV: test
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
elixir: [1.13.4]
|
||||||
|
otp: [25.0.3]
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres
|
||||||
|
env:
|
||||||
|
POSTGRES_HOST_AUTH_METHOD: trust
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: erlef/setup-beam@v1
|
||||||
|
with:
|
||||||
|
elixir-version: ${{ matrix.elixir }}
|
||||||
|
otp-version: ${{ matrix.otp }}
|
||||||
|
- name: Retrieve Mix Dependencies Cache
|
||||||
|
uses: actions/cache@v1
|
||||||
|
id: mix-cache
|
||||||
|
with:
|
||||||
|
path: deps
|
||||||
|
key: ${{ runner.os }}-${{ matrix.otp }}-${{ matrix.elixir }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}
|
||||||
|
- name: Install Mix Dependencies
|
||||||
|
if: steps.mix-cache.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
mix local.rebar --force
|
||||||
|
mix local.hex --force
|
||||||
|
mix deps.get
|
||||||
|
- run: mix test
|
||||||
|
env:
|
||||||
|
MIX_ENV: test
|
||||||
|
PGUSER: postgres
|
||||||
|
PGPASS: postgres
|
||||||
|
PGHOST: postgres
|
26
.gitignore
vendored
Normal file
26
.gitignore
vendored
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
# The directory Mix will write compiled artifacts to.
|
||||||
|
/_build/
|
||||||
|
|
||||||
|
# If you run "mix test --cover", coverage assets end up here.
|
||||||
|
/cover/
|
||||||
|
|
||||||
|
# The directory Mix downloads your dependencies sources to.
|
||||||
|
/deps/
|
||||||
|
|
||||||
|
# Where third-party dependencies like ExDoc output generated docs.
|
||||||
|
/doc/
|
||||||
|
|
||||||
|
# Ignore .fetch files in case you like to edit your project deps locally.
|
||||||
|
/.fetch
|
||||||
|
|
||||||
|
# If the VM crashes, it generates a dump, let's ignore it too.
|
||||||
|
erl_crash.dump
|
||||||
|
|
||||||
|
# Also ignore archive artifacts (built via "mix archive.build").
|
||||||
|
*.ez
|
||||||
|
|
||||||
|
# Ignore package tarball (built via "mix hex.build").
|
||||||
|
ash_gen_server-*.tar
|
||||||
|
|
||||||
|
# Temporary files, for example, from tests.
|
||||||
|
/tmp/
|
2
.tool-versions
Normal file
2
.tool-versions
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
elixir 1.13.4-otp-25
|
||||||
|
erlang 25.0.3
|
21
README.md
Normal file
21
README.md
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
# AshGenServer
|
||||||
|
|
||||||
|
**TODO: Add description**
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
If [available in Hex](https://hex.pm/docs/publish), the package can be installed
|
||||||
|
by adding `ash_gen_server` to your list of dependencies in `mix.exs`:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
def deps do
|
||||||
|
[
|
||||||
|
{:ash_gen_server, "~> 0.1.0"}
|
||||||
|
]
|
||||||
|
end
|
||||||
|
```
|
||||||
|
|
||||||
|
Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc)
|
||||||
|
and published on [HexDocs](https://hexdocs.pm). Once published, the docs can
|
||||||
|
be found at <https://hexdocs.pm/ash_gen_server>.
|
||||||
|
|
11
config/config.exs
Normal file
11
config/config.exs
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
import Config
|
||||||
|
|
||||||
|
config :ash_gen_server, runtime: config_env() != :test
|
||||||
|
|
||||||
|
config :git_ops,
|
||||||
|
mix_project: Mix.Project.get!(),
|
||||||
|
changelog_file: "CHANGELOG.md",
|
||||||
|
repository_url: "https://github.com/team-alembic/ash_gen_server",
|
||||||
|
manage_mix_version: true,
|
||||||
|
manage_readme_version: "README.md",
|
||||||
|
version_tag_prefix: "v"
|
18
lib/ash_gen_server.ex
Normal file
18
lib/ash_gen_server.ex
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
defmodule AshGenServer do
|
||||||
|
@moduledoc """
|
||||||
|
Documentation for `AshGenServer`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Hello world.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
iex> AshGenServer.hello()
|
||||||
|
:world
|
||||||
|
|
||||||
|
"""
|
||||||
|
def hello do
|
||||||
|
:world
|
||||||
|
end
|
||||||
|
end
|
19
lib/ash_gen_server/application.ex
Normal file
19
lib/ash_gen_server/application.ex
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
defmodule AshGenServer.Application do
|
||||||
|
# See https://hexdocs.pm/elixir/Application.html
|
||||||
|
# for more information on OTP Applications
|
||||||
|
@moduledoc false
|
||||||
|
|
||||||
|
use Application
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def start(_type, _args) do
|
||||||
|
children()
|
||||||
|
|> Supervisor.start_link(strategy: :one_for_all, name: __MODULE__)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp children do
|
||||||
|
if Application.get_env(:ash_gen_server, :runtime, true),
|
||||||
|
do: [AshGenServer.Supervisor, AshGenServer.Registry],
|
||||||
|
else: []
|
||||||
|
end
|
||||||
|
end
|
174
lib/ash_gen_server/data_layer.ex
Normal file
174
lib/ash_gen_server/data_layer.ex
Normal file
|
@ -0,0 +1,174 @@
|
||||||
|
defmodule AshGenServer.DataLayer do
|
||||||
|
@moduledoc """
|
||||||
|
An Ash Datalayer backed by individual GenServers.
|
||||||
|
|
||||||
|
You probably don't actually want this, except in very specific circumstances.
|
||||||
|
If you merely want in-memory storage of resources then take a look at
|
||||||
|
`Ash.DataLayer.Ets`.
|
||||||
|
|
||||||
|
## Caveats:
|
||||||
|
* When a resource using this datalayer is created it spawns an instance of
|
||||||
|
`AshGenServer.Server` and performs all operations on the data within it.
|
||||||
|
This means that your actions must pay the price of a `GenServer.call/3` to
|
||||||
|
read or modify the data.
|
||||||
|
* When destroying a resource it's process is terminated and it's internal
|
||||||
|
state is lost.
|
||||||
|
* If, for some reason, the `AshGenServer.Server` process crashes or exits for
|
||||||
|
an abnormal reason the supervisor will restart it **with the changeset used
|
||||||
|
by the `create` action** - this means that any updates performed since
|
||||||
|
creation will be lost.
|
||||||
|
* Any resource using this data source **must** have at least one primary key
|
||||||
|
field.
|
||||||
|
* Retrieving a resource by primary key is an optimised case, but any other
|
||||||
|
queries will pay the price of having to query every `AshGenServer.Server`
|
||||||
|
process in sequence.
|
||||||
|
"""
|
||||||
|
|
||||||
|
use Ash.Dsl.Extension, transformers: [], sections: []
|
||||||
|
alias Ash.{Actions, Api, Changeset, DataLayer, Filter, Resource, Sort}
|
||||||
|
alias AshGenServer.{Query, Registry, Server, Supervisor}
|
||||||
|
@behaviour Ash.DataLayer
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@impl true
|
||||||
|
@spec can?(Resource.t(), DataLayer.feature()) :: boolean
|
||||||
|
def can?(_, :composite_primary_key), do: true
|
||||||
|
def can?(_, :create), do: true
|
||||||
|
def can?(_, :read), do: true
|
||||||
|
def can?(_, :update), do: true
|
||||||
|
def can?(_, :destroy), do: true
|
||||||
|
def can?(_, :filter), do: true
|
||||||
|
def can?(_, :boolean_filter), do: true
|
||||||
|
def can?(_, :nested_expressions), do: true
|
||||||
|
def can?(_, {:filter_expr, _}), do: true
|
||||||
|
def can?(_, _), do: false
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@impl true
|
||||||
|
@spec resource_to_query(Resource.t(), Api.t()) :: Query.t()
|
||||||
|
def resource_to_query(resource, api), do: %Query{resource: resource, api: api}
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@impl true
|
||||||
|
@spec filter(Query.t(), Filter.t(), Resource.t()) :: Query.t()
|
||||||
|
def filter(%{resource: resource, filter: nil} = query, filter, resource),
|
||||||
|
do: {:ok, %{query | filter: filter}}
|
||||||
|
|
||||||
|
def filter(%{resource: resource} = query, filter, resource) do
|
||||||
|
with {:ok, filter} <- Filter.add_to_filter(query.filter, filter),
|
||||||
|
do: {:ok, %{query | filter: filter}}
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@impl true
|
||||||
|
@spec limit(Query.t(), limit, Resource.t()) :: Query.t() when limit: non_neg_integer()
|
||||||
|
def limit(%{resource: resource} = query, limit, resource), do: {:ok, %{query | limit: limit}}
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@impl true
|
||||||
|
@spec offset(Query.t(), offset, Resource.t()) :: Query.t() when offset: non_neg_integer()
|
||||||
|
def offset(%{resource: resource} = query, offset, resource),
|
||||||
|
do: {:ok, %{query | offset: offset}}
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@impl true
|
||||||
|
@spec sort(Query.t(), Sort.t(), Resource.t()) :: Query.t()
|
||||||
|
def sort(%{resource: resource} = query, sort, resource), do: {:ok, %{query | sort: sort}}
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@impl true
|
||||||
|
@spec run_query(Query.t(), Resource.t()) :: {:ok, Enum.t(Resource.t())} | {:error, any}
|
||||||
|
def run_query(%Query{resource: resource, filter: nil} = query, resource),
|
||||||
|
do: do_slow_query(query, resource)
|
||||||
|
|
||||||
|
# attempt to detect and accellerate the case of `get(primary_key)`
|
||||||
|
def run_query(
|
||||||
|
%Query{resource: resource, filter: filter} = query,
|
||||||
|
resource
|
||||||
|
) do
|
||||||
|
primary_key_fields =
|
||||||
|
resource
|
||||||
|
|> Resource.Info.primary_key()
|
||||||
|
|> MapSet.new()
|
||||||
|
|
||||||
|
primary_key_preds =
|
||||||
|
primary_key_fields
|
||||||
|
|> Stream.map(&{&1, Filter.find_simple_equality_predicate(filter, &1)})
|
||||||
|
|> Stream.filter(&elem(&1, 1))
|
||||||
|
|> Enum.into(%{})
|
||||||
|
|
||||||
|
primary_search_keys = primary_key_preds |> Map.keys() |> MapSet.new()
|
||||||
|
|
||||||
|
if MapSet.equal?(primary_search_keys, primary_key_fields) do
|
||||||
|
with {:ok, pid} <-
|
||||||
|
Registry.find_server_by_resource_key({resource, primary_key_preds}),
|
||||||
|
{:ok, data} <- Server.get(pid) do
|
||||||
|
{:ok, [data]}
|
||||||
|
else
|
||||||
|
{:error, :not_found} -> {:ok, []}
|
||||||
|
{:error, reason} -> {:error, reason}
|
||||||
|
end
|
||||||
|
else
|
||||||
|
do_slow_query(query, resource)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_slow_query(query, resource) do
|
||||||
|
result =
|
||||||
|
resource
|
||||||
|
|> Registry.find_servers_by_resource()
|
||||||
|
|> Stream.map(&elem(&1, 1))
|
||||||
|
|> Stream.map(&Server.get/1)
|
||||||
|
|> Stream.filter(&is_tuple/1)
|
||||||
|
|> Stream.filter(&(elem(&1, 0) == :ok))
|
||||||
|
|> Stream.map(&elem(&1, 1))
|
||||||
|
|> maybe_apply(query.filter, fn stream ->
|
||||||
|
Stream.filter(stream, &Filter.Runtime.matches?(query.api, &1, query.filter))
|
||||||
|
end)
|
||||||
|
|> maybe_apply(query.sort, &Actions.Sort.runtime_sort(Enum.to_list(&1), query.sort))
|
||||||
|
|> maybe_apply(query.offset, &Stream.drop(&1, query.offset))
|
||||||
|
|> maybe_apply(query.limit, &Stream.take(&1, query.limit))
|
||||||
|
|
||||||
|
{:ok, result}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_apply(stream, nil, _), do: stream
|
||||||
|
defp maybe_apply(stream, _, callback), do: callback.(stream)
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@impl true
|
||||||
|
@spec create(Resource.t(), Changeset.t()) :: {:ok, Resource.t()} | {:error, any}
|
||||||
|
def create(resource, changeset) do
|
||||||
|
with {:ok, pid} <- Supervisor.start_server(resource, changeset),
|
||||||
|
do: Server.get(pid)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@impl true
|
||||||
|
@spec update(Resource.t(), Changeset.t()) :: {:ok, Resource.t()} | {:error, any}
|
||||||
|
def update(resource, changeset) do
|
||||||
|
resource_key = resource_key_from_resource_and_changeset(resource, changeset)
|
||||||
|
|
||||||
|
with {:ok, pid} <- Registry.find_server_by_resource_key(resource_key),
|
||||||
|
do: Server.update(pid, resource, changeset)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@impl true
|
||||||
|
@spec destroy(Resource.t(), Changeset.t()) :: :ok | {:error, any}
|
||||||
|
def destroy(resource, changeset) do
|
||||||
|
resource_key = resource_key_from_resource_and_changeset(resource, changeset)
|
||||||
|
|
||||||
|
Supervisor.stop_server(resource_key)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp resource_key_from_resource_and_changeset(resource, changeset) do
|
||||||
|
{resource, primary_key_from_resource_and_changeset(resource, changeset)}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp primary_key_from_resource_and_changeset(resource, changeset) do
|
||||||
|
resource
|
||||||
|
|> Resource.Info.primary_key()
|
||||||
|
|> Enum.into(%{}, &{&1, Changeset.get_attribute(changeset, &1)})
|
||||||
|
end
|
||||||
|
end
|
15
lib/ash_gen_server/query.ex
Normal file
15
lib/ash_gen_server/query.ex
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
defmodule AshGenServer.Query do
|
||||||
|
@moduledoc """
|
||||||
|
A struct which holds information about a resource query as it is being built.
|
||||||
|
"""
|
||||||
|
defstruct [:resource, :filter, :api, :limit, :offset, :sort]
|
||||||
|
|
||||||
|
@type t :: %__MODULE__{
|
||||||
|
resource: Ash.Resource.t(),
|
||||||
|
filter: Ash.Filter.t(),
|
||||||
|
api: Ash.Api.t(),
|
||||||
|
limit: non_neg_integer(),
|
||||||
|
offset: non_neg_integer(),
|
||||||
|
sort: Ash.Sort.t()
|
||||||
|
}
|
||||||
|
end
|
78
lib/ash_gen_server/registry.ex
Normal file
78
lib/ash_gen_server/registry.ex
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
defmodule AshGenServer.Registry do
|
||||||
|
@moduledoc """
|
||||||
|
A `Registry` which keeps track of the resources which are currently in use in
|
||||||
|
the system.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@spec child_spec(keyword) :: Supervisor.child_spec()
|
||||||
|
def child_spec(opts),
|
||||||
|
do: %{id: {Registry, __MODULE__}, start: {__MODULE__, :start_link, [opts]}}
|
||||||
|
|
||||||
|
@typedoc """
|
||||||
|
A composite key containing the resource module and the primary key(s).
|
||||||
|
|
||||||
|
This is the key that's actually stored in the Registry.
|
||||||
|
"""
|
||||||
|
@type resource_key :: {Ash.resource(), primary_key}
|
||||||
|
|
||||||
|
@typedoc """
|
||||||
|
A map containing the primary key field(s) and value(s) for a the resource.
|
||||||
|
"""
|
||||||
|
@type primary_key :: %{required(atom) => any}
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@spec start_link(keyword) :: {:ok, pid} | {:error, any}
|
||||||
|
def start_link(_), do: Registry.start_link(keys: :unique, name: __MODULE__)
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Register the calling process with the provided `resource_key`.
|
||||||
|
"""
|
||||||
|
@spec register(resource_key) :: {:ok, pid} | {:error, {:already_registered, pid}}
|
||||||
|
def register(resource_key), do: Registry.register(__MODULE__, resource_key, nil)
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Unregister the calling process from the provided `resource_key`.
|
||||||
|
"""
|
||||||
|
@spec unregister(resource_key) :: :ok
|
||||||
|
def unregister(resource_key), do: Registry.unregister(__MODULE__, resource_key)
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Attempt to find a process registered to the provided `resource_key`.
|
||||||
|
"""
|
||||||
|
@spec find_server_by_resource_key(resource_key) :: {:ok, pid} | {:error, :not_found}
|
||||||
|
def find_server_by_resource_key(resource_key) do
|
||||||
|
case Registry.lookup(__MODULE__, resource_key) do
|
||||||
|
[{pid, _}] -> {:ok, pid}
|
||||||
|
_ -> {:error, :not_found}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Find all the processes registered to the provided resource.
|
||||||
|
"""
|
||||||
|
@spec find_servers_by_resource(Ash.resource()) :: [{primary_key, pid}]
|
||||||
|
def find_servers_by_resource(resource) do
|
||||||
|
Registry.select(__MODULE__, [
|
||||||
|
{
|
||||||
|
{{resource, :"$1"}, :"$2", :_},
|
||||||
|
[],
|
||||||
|
[{{:"$1", :"$2"}}]
|
||||||
|
}
|
||||||
|
])
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Find all servers currently active.
|
||||||
|
"""
|
||||||
|
@spec find_servers :: [{resource_key, pid}]
|
||||||
|
def find_servers do
|
||||||
|
Registry.select(__MODULE__, [
|
||||||
|
{
|
||||||
|
{:"$1", :"$2", :_},
|
||||||
|
[],
|
||||||
|
[{{:"$1", :"$2"}}]
|
||||||
|
}
|
||||||
|
])
|
||||||
|
end
|
||||||
|
end
|
99
lib/ash_gen_server/server.ex
Normal file
99
lib/ash_gen_server/server.ex
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
defmodule AshGenServer.Server do
|
||||||
|
@moduledoc """
|
||||||
|
The main resource server.
|
||||||
|
|
||||||
|
This module is a `GenServer` which can create, read and update a single
|
||||||
|
resource stored within it's state by applying changesets.
|
||||||
|
"""
|
||||||
|
defstruct ~w[primary_key resource record]a
|
||||||
|
alias Ash.{Changeset, Resource}
|
||||||
|
alias AshGenServer.Registry
|
||||||
|
use GenServer
|
||||||
|
|
||||||
|
@type t :: %__MODULE__{
|
||||||
|
primary_key: Registry.primary_key(),
|
||||||
|
resource: Resource.t(),
|
||||||
|
record: Resource.record()
|
||||||
|
}
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@spec start_link(list) :: GenServer.on_start()
|
||||||
|
def start_link(args), do: GenServer.start_link(__MODULE__, args)
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Retrieve the current state of the stored record.
|
||||||
|
"""
|
||||||
|
@spec get(GenServer.server()) :: {:ok, Resource.record()} | no_return
|
||||||
|
def get(server), do: GenServer.call(server, :get)
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Update the stored record by applying the provided changeset.
|
||||||
|
"""
|
||||||
|
@spec update(GenServer.server(), Resource.t(), Changeset.t()) ::
|
||||||
|
{:ok, Resource.record()} | {:error, any}
|
||||||
|
def update(server, resource, changeset),
|
||||||
|
do: GenServer.call(server, {:update, resource, changeset})
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@impl true
|
||||||
|
@spec init(list) :: {:ok, t} | {:error, any}
|
||||||
|
def init([resource, changeset]) do
|
||||||
|
primary_key = primary_key_from_resource_and_changeset(resource, changeset)
|
||||||
|
|
||||||
|
with {:ok, _self} <- Registry.register({resource, primary_key}),
|
||||||
|
{:ok, record} <- Changeset.apply_attributes(changeset) do
|
||||||
|
record = unload_relationships(resource, record)
|
||||||
|
{:ok, %__MODULE__{primary_key: primary_key, resource: resource, record: record}}
|
||||||
|
else
|
||||||
|
{:error, {:already_registered, _}} -> {:stop, :already_exists}
|
||||||
|
{:error, reason} -> {:error, reason}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@impl true
|
||||||
|
@spec handle_call(:get | {:update, Resource.t(), Changeset.t()}, GenServer.from(), t) ::
|
||||||
|
{:reply, {:ok, Resource.record()} | {:error, any}, t}
|
||||||
|
def handle_call(:get, _from, state), do: {:reply, {:ok, state.record}, state}
|
||||||
|
|
||||||
|
def handle_call({:update, resource, changeset}, _from, state) when state.resource == resource do
|
||||||
|
case Changeset.apply_attributes(changeset) do
|
||||||
|
{:ok, new_record} ->
|
||||||
|
primary_key_fields = state.resource |> Resource.Info.primary_key()
|
||||||
|
maybe_new_primary_key = Map.take(new_record, primary_key_fields)
|
||||||
|
|
||||||
|
state =
|
||||||
|
if maybe_new_primary_key != state.primary_key do
|
||||||
|
Registry.unregister({state.resource, state.primary_key})
|
||||||
|
Registry.register({state.resource, maybe_new_primary_key})
|
||||||
|
%{state | record: new_record, primary_key: maybe_new_primary_key}
|
||||||
|
else
|
||||||
|
%{state | record: new_record}
|
||||||
|
end
|
||||||
|
|
||||||
|
{:reply, {:ok, new_record}, state}
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
{:reply, {:error, reason}, state}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_call({:update, resource, _changeset}, _from, state),
|
||||||
|
do: {:reply, {:error, {:incorrect_resource, resource}}, state}
|
||||||
|
|
||||||
|
defp primary_key_from_resource_and_changeset(resource, changeset) do
|
||||||
|
resource
|
||||||
|
|> Resource.Info.primary_key()
|
||||||
|
|> Enum.into(%{}, &{&1, Changeset.get_attribute(changeset, &1)})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp unload_relationships(resource, record) do
|
||||||
|
empty = resource.__struct__
|
||||||
|
|
||||||
|
resource
|
||||||
|
|> Resource.Info.relationships()
|
||||||
|
|> Enum.reduce(record, fn relationship, record ->
|
||||||
|
Map.put(record, relationship.name, Map.get(empty, relationship.name))
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
36
lib/ash_gen_server/supervisor.ex
Normal file
36
lib/ash_gen_server/supervisor.ex
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
defmodule AshGenServer.Supervisor do
|
||||||
|
@moduledoc """
|
||||||
|
A DynamicSupervisor which supervises the indivdual resource processes.
|
||||||
|
"""
|
||||||
|
use DynamicSupervisor
|
||||||
|
alias Ash.{Changeset, Resource}
|
||||||
|
alias AshGenServer.Registry
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@spec start_link(list) :: Supervisor.on_start()
|
||||||
|
def start_link(args), do: DynamicSupervisor.start_link(__MODULE__, args, name: __MODULE__)
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
@impl true
|
||||||
|
def init(_), do: DynamicSupervisor.init(strategy: :one_for_one)
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Given a resource and a changeset, spawn a new server.
|
||||||
|
"""
|
||||||
|
@spec start_server(Resource.t(), Changeset.t()) :: DynamicSupervisor.on_start_child()
|
||||||
|
def start_server(resource, changeset) do
|
||||||
|
DynamicSupervisor.start_child(
|
||||||
|
__MODULE__,
|
||||||
|
{AshGenServer.Server, [resource, changeset]}
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Terminate the the resource and remove it from the supervision tree.
|
||||||
|
"""
|
||||||
|
@spec stop_server(Registry.resource_key()) :: :ok
|
||||||
|
def stop_server(resource_key) do
|
||||||
|
with {:ok, pid} <- AshGenServer.Registry.find_server_by_resource_key(resource_key),
|
||||||
|
do: DynamicSupervisor.terminate_child(__MODULE__, pid)
|
||||||
|
end
|
||||||
|
end
|
55
mix.exs
Normal file
55
mix.exs
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
defmodule AshGenServer.MixProject do
|
||||||
|
use Mix.Project
|
||||||
|
|
||||||
|
@version "0.1.0"
|
||||||
|
|
||||||
|
def project do
|
||||||
|
[
|
||||||
|
app: :ash_gen_server,
|
||||||
|
version: @version,
|
||||||
|
elixir: "~> 1.13",
|
||||||
|
start_permanent: Mix.env() == :prod,
|
||||||
|
deps: deps(),
|
||||||
|
package: package(),
|
||||||
|
elixirc_paths: elixirc_paths(Mix.env()),
|
||||||
|
consolidate_protocols: Mix.env() != :test
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
def package do
|
||||||
|
[
|
||||||
|
maintainers: [
|
||||||
|
"James Harton <james.harton@alembic.com.au>"
|
||||||
|
],
|
||||||
|
licenses: ["MIT"],
|
||||||
|
links: %{
|
||||||
|
"Source" => "https://github.com/team-alembic/ash_gen_server"
|
||||||
|
},
|
||||||
|
source_url: "https://github.com/team-alembic/ash_gen_server"
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Run "mix help compile.app" to learn about applications.
|
||||||
|
def application do
|
||||||
|
[
|
||||||
|
extra_applications: [:logger],
|
||||||
|
mod: {AshGenServer.Application, []}
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Specifies which paths to compile per environment.
|
||||||
|
defp elixirc_paths(:test), do: ["lib", "test/support"]
|
||||||
|
defp elixirc_paths(:dev), do: ["lib", "test/support"]
|
||||||
|
defp elixirc_paths(_), do: ["lib"]
|
||||||
|
|
||||||
|
# Run "mix help deps" to learn about dependencies.
|
||||||
|
defp deps do
|
||||||
|
[
|
||||||
|
{:ash, "~> 1.53"},
|
||||||
|
{:credo, "~> 1.6", only: [:dev, :test]},
|
||||||
|
{:ex_doc, ">= 0.0.0", only: [:dev, :test]},
|
||||||
|
{:doctor, "~> 0.18", only: [:dev, :test]},
|
||||||
|
{:git_ops, "~> 2.4", only: [:dev, :test], runtime: false}
|
||||||
|
]
|
||||||
|
end
|
||||||
|
end
|
39
mix.lock
Normal file
39
mix.lock
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
%{
|
||||||
|
"ash": {:hex, :ash, "1.53.2", "d50e0e753c8a69a4a481c250d7da34cb16f05362c58de5642db20b34d5a29c42", [:mix], [{:comparable, "~> 1.0", [hex: :comparable, repo: "hexpm", optional: false]}, {:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:earmark, "~> 1.4", [hex: :earmark, repo: "hexpm", optional: true]}, {:ecto, "~> 3.7", [hex: :ecto, repo: "hexpm", optional: false]}, {:ets, "~> 0.8.0", [hex: :ets, repo: "hexpm", optional: false]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.3.5", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:picosat_elixir, "~> 0.2", [hex: :picosat_elixir, repo: "hexpm", optional: false]}, {:sourceror, "~> 0.9", [hex: :sourceror, repo: "hexpm", optional: false]}, {:stream_data, "~> 0.5.0", [hex: :stream_data, repo: "hexpm", optional: false]}, {:timex, ">= 3.0.0", [hex: :timex, repo: "hexpm", optional: false]}], "hexpm", "a19e7080afaa1dcaef63acfab9a6dc20d040e4a6e62bdf06e58e0b6adf41dd71"},
|
||||||
|
"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"},
|
||||||
|
"certifi": {:hex, :certifi, "2.9.0", "6f2a475689dd47f19fb74334859d460a2dc4e3252a3324bd2111b8f0429e7e21", [:rebar3], [], "hexpm", "266da46bdb06d6c6d35fde799bcb28d36d985d424ad7c08b5bb48f5b5cdd4641"},
|
||||||
|
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
||||||
|
"comparable": {:hex, :comparable, "1.0.0", "bb669e91cedd14ae9937053e5bcbc3c52bb2f22422611f43b6e38367d94a495f", [:mix], [{:typable, "~> 0.1", [hex: :typable, repo: "hexpm", optional: false]}], "hexpm", "277c11eeb1cd726e7cd41c6c199e7e52fa16ee6830b45ad4cdc62e51f62eb60c"},
|
||||||
|
"credo": {:hex, :credo, "1.6.6", "f51f8d45db1af3b2e2f7bee3e6d3c871737bda4a91bff00c5eec276517d1a19c", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "625520ce0984ee0f9f1f198165cd46fa73c1e59a17ebc520038b8fce056a5bdc"},
|
||||||
|
"decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"},
|
||||||
|
"doctor": {:hex, :doctor, "0.19.0", "f7974836bc85756b38b99de46cc2c6ba36741f21d8eabcbef78f6806ca6769ed", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "504f17473dc6b39618e693c5198d85e274b056b73eb4a4605431aec0f42f0023"},
|
||||||
|
"earmark_parser": {:hex, :earmark_parser, "1.4.26", "f4291134583f373c7d8755566122908eb9662df4c4b63caa66a0eabe06569b0a", [:mix], [], "hexpm", "48d460899f8a0c52c5470676611c01f64f3337bad0b26ddab43648428d94aabc"},
|
||||||
|
"ecto": {:hex, :ecto, "3.8.4", "e06b8b87e62b27fea17fd2ff6041572ddd10339fd16cdf58446e402c6c90a74b", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "f9244288b8d42db40515463a008cf3f4e0e564bb9c249fe87bf28a6d79fe82d4"},
|
||||||
|
"elixir_make": {:hex, :elixir_make, "0.6.3", "bc07d53221216838d79e03a8019d0839786703129599e9619f4ab74c8c096eac", [:mix], [], "hexpm", "f5cbd651c5678bcaabdbb7857658ee106b12509cd976c2c2fca99688e1daf716"},
|
||||||
|
"ets": {:hex, :ets, "0.8.1", "8ff9bcda5682b98493f8878fc9dbd990e48d566cba8cce59f7c2a78130da29ea", [:mix], [], "hexpm", "6be41b50adb5bc5c43626f25ea2d0af1f4a242fb3fad8d53f0c67c20b78915cc"},
|
||||||
|
"ex_doc": {:hex, :ex_doc, "0.28.4", "001a0ea6beac2f810f1abc3dbf4b123e9593eaa5f00dd13ded024eae7c523298", [:mix], [{:earmark_parser, "~> 1.4.19", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "bf85d003dd34911d89c8ddb8bda1a958af3471a274a4c2150a9c01c78ac3f8ed"},
|
||||||
|
"file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
|
||||||
|
"gettext": {:hex, :gettext, "0.20.0", "75ad71de05f2ef56991dbae224d35c68b098dd0e26918def5bb45591d5c8d429", [:mix], [], "hexpm", "1c03b177435e93a47441d7f681a7040bd2a816ece9e2666d1c9001035121eb3d"},
|
||||||
|
"git_cli": {:hex, :git_cli, "0.3.0", "a5422f9b95c99483385b976f5d43f7e8233283a47cda13533d7c16131cb14df5", [:mix], [], "hexpm", "78cb952f4c86a41f4d3511f1d3ecb28edb268e3a7df278de2faa1bd4672eaf9b"},
|
||||||
|
"git_ops": {:hex, :git_ops, "2.4.5", "185a724dfde3745edd22f7571d59c47a835cf54ded67e9ccbc951920b7eec4c2", [:mix], [{:git_cli, "~> 0.2", [hex: :git_cli, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e323a5b01ad53bc8c19c3a444be3e61ed7803ecd2e95530446ae9327d0143ecc"},
|
||||||
|
"hackney": {:hex, :hackney, "1.18.1", "f48bf88f521f2a229fc7bae88cf4f85adc9cd9bcf23b5dc8eb6a1788c662c4f6", [:rebar3], [{:certifi, "~>2.9.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~>6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~>1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.3.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~>1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "a4ecdaff44297e9b5894ae499e9a070ea1888c84afdd1fd9b7b2bc384950128e"},
|
||||||
|
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
|
||||||
|
"jason": {:hex, :jason, "1.3.0", "fa6b82a934feb176263ad2df0dbd91bf633d4a46ebfdffea0c8ae82953714946", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "53fc1f51255390e0ec7e50f9cb41e751c260d065dcba2bf0d08dc51a4002c2ac"},
|
||||||
|
"makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"},
|
||||||
|
"makeup_elixir": {:hex, :makeup_elixir, "0.16.0", "f8c570a0d33f8039513fbccaf7108c5d750f47d8defd44088371191b76492b0b", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "28b2cbdc13960a46ae9a8858c4bebdec3c9a6d7b4b9e7f4ed1502f8159f338e7"},
|
||||||
|
"makeup_erlang": {:hex, :makeup_erlang, "0.1.1", "3fcb7f09eb9d98dc4d208f49cc955a34218fc41ff6b84df7c75b3e6e533cc65f", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "174d0809e98a4ef0b3309256cbf97101c6ec01c4ab0b23e926a9e17df2077cbb"},
|
||||||
|
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
|
||||||
|
"mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"},
|
||||||
|
"nimble_options": {:hex, :nimble_options, "0.3.7", "1e52dd7673d36138b1a5dede183b5d86dff175dc46d104a8e98e396b85b04670", [:mix], [], "hexpm", "2086907e6665c6b6579be54ef5001928df5231f355f71ed258f80a55e9f63633"},
|
||||||
|
"nimble_parsec": {:hex, :nimble_parsec, "1.2.3", "244836e6e3f1200c7f30cb56733fd808744eca61fd182f731eac4af635cc6d0b", [:mix], [], "hexpm", "c8d789e39b9131acf7b99291e93dae60ab48ef14a7ee9d58c6964f59efb570b0"},
|
||||||
|
"parse_trans": {:hex, :parse_trans, "3.3.1", "16328ab840cc09919bd10dab29e431da3af9e9e7e7e6f0089dd5a2d2820011d8", [:rebar3], [], "hexpm", "07cd9577885f56362d414e8c4c4e6bdf10d43a8767abb92d24cbe8b24c54888b"},
|
||||||
|
"picosat_elixir": {:hex, :picosat_elixir, "0.2.1", "407dcb90755167fd9e3311b60565ff32ed0d234010363406c07cdb4175b95bc5", [:make, :mix], [{:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "68f4bdb2ac3b594209e54625d3d58c9e2e98b90f2ec8e03235f66e88c9eda5fe"},
|
||||||
|
"sourceror": {:hex, :sourceror, "0.11.1", "1b80efe84330beefb6b3da95b75c1e1cdefe9dc785bf4c5064fae251a8af615c", [:mix], [], "hexpm", "22b6828ee5572f6cec75cc6357f3ca6c730a02954cef0302c428b3dba31e5e74"},
|
||||||
|
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"},
|
||||||
|
"stream_data": {:hex, :stream_data, "0.5.0", "b27641e58941685c75b353577dc602c9d2c12292dd84babf506c2033cd97893e", [:mix], [], "hexpm", "012bd2eec069ada4db3411f9115ccafa38540a3c78c4c0349f151fc761b9e271"},
|
||||||
|
"telemetry": {:hex, :telemetry, "1.1.0", "a589817034a27eab11144ad24d5c0f9fab1f58173274b1e9bae7074af9cbee51", [:rebar3], [], "hexpm", "b727b2a1f75614774cff2d7565b64d0dfa5bd52ba517f16543e6fc7efcc0df48"},
|
||||||
|
"timex": {:hex, :timex, "3.7.9", "790cdfc4acfce434e442f98c02ea6d84d0239073bfd668968f82ac63e9a6788d", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.1", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "64691582e5bb87130f721fc709acfb70f24405833998fabf35be968984860ce1"},
|
||||||
|
"typable": {:hex, :typable, "0.3.0", "0431e121d124cd26f312123e313d2689b9a5322b15add65d424c07779eaa3ca1", [:mix], [], "hexpm", "880a0797752da1a4c508ac48f94711e04c86156f498065a83d160eef945858f8"},
|
||||||
|
"tzdata": {:hex, :tzdata, "1.1.1", "20c8043476dfda8504952d00adac41c6eda23912278add38edc140ae0c5bcc46", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "a69cec8352eafcd2e198dea28a34113b60fdc6cb57eb5ad65c10292a6ba89787"},
|
||||||
|
"unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"},
|
||||||
|
}
|
119
test/api_integration_test.exs
Normal file
119
test/api_integration_test.exs
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
defmodule AshGenServer.ApiIntegrationTest do
|
||||||
|
@moduledoc """
|
||||||
|
Integrates the entire API to ensure that it works as advertised.
|
||||||
|
"""
|
||||||
|
|
||||||
|
use AshGenServer.RuntimeCase
|
||||||
|
alias Ash.Changeset
|
||||||
|
|
||||||
|
test "it's the plot of Back to the Future" do
|
||||||
|
assert {:ok, einstein} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> Changeset.for_create(:create, %{
|
||||||
|
name: "Einstein Brown",
|
||||||
|
nickname: "Einie",
|
||||||
|
current_year: 1985
|
||||||
|
})
|
||||||
|
|> TimeTravel.create()
|
||||||
|
|
||||||
|
assert {:ok, doc} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> Changeset.for_create(:create, %{
|
||||||
|
name: "Emmet Lathrop Brown",
|
||||||
|
nickname: "Doc",
|
||||||
|
current_year: 1985
|
||||||
|
})
|
||||||
|
|> TimeTravel.create()
|
||||||
|
|
||||||
|
assert {:ok, marty} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> Changeset.for_create(:create, %{
|
||||||
|
name: "Martin Seamus McFly",
|
||||||
|
nickname: "Marty",
|
||||||
|
current_year: 1985
|
||||||
|
})
|
||||||
|
|> TimeTravel.create()
|
||||||
|
|
||||||
|
assert {:ok, jennifer} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> Changeset.for_create(:create, %{
|
||||||
|
name: "Jennifer Jane Parker",
|
||||||
|
nickname: "Jennifer",
|
||||||
|
current_year: 1985
|
||||||
|
})
|
||||||
|
|> TimeTravel.create()
|
||||||
|
|
||||||
|
assert {:ok, %{has_power?: false} = delorean} =
|
||||||
|
TimeTravel.Machine
|
||||||
|
|> Changeset.for_create(:create, %{
|
||||||
|
name: "OUTATIME",
|
||||||
|
model: "DMC-12",
|
||||||
|
manufacturer: "Delorean Motor Company",
|
||||||
|
power_source: "Plutonium"
|
||||||
|
})
|
||||||
|
|> TimeTravel.create()
|
||||||
|
|
||||||
|
assert {:ok, %{has_power?: true} = delorean} =
|
||||||
|
delorean
|
||||||
|
|> Changeset.for_update(:charge)
|
||||||
|
|> TimeTravel.update()
|
||||||
|
|
||||||
|
assert {:ok, %{has_power?: false} = delorean} =
|
||||||
|
delorean
|
||||||
|
|> Changeset.for_update(:travel_in_time, %{occupants: [einstein], target_year: 1985})
|
||||||
|
|> TimeTravel.update()
|
||||||
|
|
||||||
|
assert {:ok, %{current_year: 1985}} = TimeTravel.reload(einstein)
|
||||||
|
|
||||||
|
assert {:ok, %{has_power?: true} = delorean} =
|
||||||
|
delorean
|
||||||
|
|> Changeset.for_update(:charge)
|
||||||
|
|> TimeTravel.update()
|
||||||
|
|
||||||
|
assert {:ok, %{has_power?: false} = delorean} =
|
||||||
|
delorean
|
||||||
|
|> Changeset.for_update(:travel_in_time, %{occupants: [marty], target_year: 1955})
|
||||||
|
|> TimeTravel.update()
|
||||||
|
|
||||||
|
assert {:ok, %{current_year: 1955} = marty} = TimeTravel.reload(marty)
|
||||||
|
|
||||||
|
{:ok, delorean} =
|
||||||
|
delorean
|
||||||
|
|> Changeset.for_update(:retrofit, %{power_source: "Lightning"})
|
||||||
|
|> TimeTravel.update()
|
||||||
|
|
||||||
|
{:ok, %{has_power?: true} = delorean} =
|
||||||
|
delorean
|
||||||
|
|> Changeset.for_update(:charge)
|
||||||
|
|> TimeTravel.update()
|
||||||
|
|
||||||
|
assert {:ok, %{has_power?: false} = delorean} =
|
||||||
|
delorean
|
||||||
|
|> Changeset.for_update(:travel_in_time, %{occupants: [marty], target_year: 1985})
|
||||||
|
|> TimeTravel.update()
|
||||||
|
|
||||||
|
assert {:ok, %{current_year: 1985} = marty} = TimeTravel.reload(marty)
|
||||||
|
|
||||||
|
{:ok, delorean} =
|
||||||
|
delorean
|
||||||
|
|> Changeset.for_update(:retrofit, %{power_source: "Mr Fusion"})
|
||||||
|
|> TimeTravel.update()
|
||||||
|
|
||||||
|
{:ok, %{has_power?: true} = delorean} =
|
||||||
|
delorean
|
||||||
|
|> Changeset.for_update(:charge)
|
||||||
|
|> TimeTravel.update()
|
||||||
|
|
||||||
|
assert {:ok, %{has_power?: false}} =
|
||||||
|
delorean
|
||||||
|
|> Changeset.for_update(:travel_in_time, %{
|
||||||
|
occupants: [marty, doc, jennifer],
|
||||||
|
target_year: 2015
|
||||||
|
})
|
||||||
|
|> TimeTravel.update()
|
||||||
|
|
||||||
|
assert {:ok, %{current_year: 2015}} = TimeTravel.reload(marty)
|
||||||
|
assert {:ok, %{current_year: 2015}} = TimeTravel.reload(doc)
|
||||||
|
assert {:ok, %{current_year: 2015}} = TimeTravel.reload(jennifer)
|
||||||
|
end
|
||||||
|
end
|
268
test/ash_gen_server/data_layer_test.exs
Normal file
268
test/ash_gen_server/data_layer_test.exs
Normal file
|
@ -0,0 +1,268 @@
|
||||||
|
defmodule AshGenServer.DataLayerTest do
|
||||||
|
@moduledoc false
|
||||||
|
use AshGenServer.RuntimeCase
|
||||||
|
|
||||||
|
alias Ash.{Changeset, Filter}
|
||||||
|
alias AshGenServer.{DataLayer, Query, Registry, Server}
|
||||||
|
|
||||||
|
describe "create/2" do
|
||||||
|
test "it spawns and registers a new process" do
|
||||||
|
changeset =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> Changeset.for_create(:create, %{
|
||||||
|
name: "Martin Seamus McFly",
|
||||||
|
nickname: "Marty",
|
||||||
|
current_year: 1985
|
||||||
|
})
|
||||||
|
|
||||||
|
assert {:ok, %TimeTravel.Character{id: id}} =
|
||||||
|
DataLayer.create(TimeTravel.Character, changeset)
|
||||||
|
|
||||||
|
assert [{{TimeTravel.Character, %{id: ^id}}, pid}] = Registry.find_servers()
|
||||||
|
|
||||||
|
assert {:ok, %{nickname: "Marty"}} = Server.get(pid)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "update/2" do
|
||||||
|
test "when a resource exists, it updates it" do
|
||||||
|
assert {:ok, doc} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> Changeset.for_create(:create, %{
|
||||||
|
name: "Emmet Lathrop Brown",
|
||||||
|
nickname: "Doc",
|
||||||
|
current_year: 1985
|
||||||
|
})
|
||||||
|
|> TimeTravel.create()
|
||||||
|
|
||||||
|
changeset = doc |> Changeset.for_update(:travel_in_time, %{target_year: 1955})
|
||||||
|
|
||||||
|
assert {:ok, %{current_year: 1955}} = DataLayer.update(TimeTravel.Character, changeset)
|
||||||
|
|
||||||
|
{:ok, pid} = Registry.find_server_by_resource_key({TimeTravel.Character, %{id: doc.id}})
|
||||||
|
assert {:ok, %{current_year: 1955}} = Server.get(pid)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "when a resource doesn't exist, it returns an error" do
|
||||||
|
changeset =
|
||||||
|
%TimeTravel.Character{
|
||||||
|
id: Ecto.UUID.generate(),
|
||||||
|
name: "Emmet Lathrop Brown",
|
||||||
|
nickname: "Doc",
|
||||||
|
current_year: 1985
|
||||||
|
}
|
||||||
|
|> Changeset.for_update(:travel_in_time, %{target_year: 1955})
|
||||||
|
|
||||||
|
assert {:error, :not_found} = DataLayer.update(TimeTravel.Character, changeset)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "destroy/2" do
|
||||||
|
test "when a resource exists, it stops the process" do
|
||||||
|
assert {:ok, doc} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> Changeset.for_create(:create, %{
|
||||||
|
name: "Emmet Lathrop Brown",
|
||||||
|
nickname: "Doc",
|
||||||
|
current_year: 1985
|
||||||
|
})
|
||||||
|
|> TimeTravel.create()
|
||||||
|
|
||||||
|
changeset = doc |> Changeset.for_destroy(:destroy)
|
||||||
|
{:ok, pid} = Registry.find_server_by_resource_key({TimeTravel.Character, %{id: doc.id}})
|
||||||
|
|
||||||
|
assert :ok = DataLayer.destroy(TimeTravel.Character, changeset)
|
||||||
|
|
||||||
|
refute Process.alive?(pid)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "when a resource doesn't exist, it returns an error" do
|
||||||
|
changeset =
|
||||||
|
%TimeTravel.Character{
|
||||||
|
id: Ecto.UUID.generate(),
|
||||||
|
name: "Emmet Lathrop Brown",
|
||||||
|
nickname: "Doc",
|
||||||
|
current_year: 1985
|
||||||
|
}
|
||||||
|
|> Changeset.for_destroy(:destroy)
|
||||||
|
|
||||||
|
assert {:error, :not_found} = DataLayer.destroy(TimeTravel.Character, changeset)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "resource_to_query/2" do
|
||||||
|
test "it returns a new empty query" do
|
||||||
|
assert %Query{
|
||||||
|
resource: TimeTravel.Machine,
|
||||||
|
api: TimeTravel,
|
||||||
|
filter: nil,
|
||||||
|
limit: nil,
|
||||||
|
offset: nil,
|
||||||
|
sort: nil
|
||||||
|
} = DataLayer.resource_to_query(TimeTravel.Machine, TimeTravel)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "filter/3" do
|
||||||
|
test "when the existing query contains no filter, it adds it" do
|
||||||
|
query = DataLayer.resource_to_query(TimeTravel.Machine, TimeTravel)
|
||||||
|
|
||||||
|
filter =
|
||||||
|
TimeTravel.Machine
|
||||||
|
|> Ash.Query.filter(name: "OUTATIME")
|
||||||
|
|> Map.fetch!(:filter)
|
||||||
|
|
||||||
|
assert {:ok, %{filter: ^filter}} = DataLayer.filter(query, filter, TimeTravel.Machine)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "when the existing query contains a filter, it combines them" do
|
||||||
|
query =
|
||||||
|
TimeTravel.Machine
|
||||||
|
|> DataLayer.resource_to_query(TimeTravel)
|
||||||
|
|
||||||
|
filter =
|
||||||
|
TimeTravel.Machine
|
||||||
|
|> Ash.Query.filter(name: "OUTATIME")
|
||||||
|
|> Map.fetch!(:filter)
|
||||||
|
|
||||||
|
{:ok, query} = DataLayer.filter(query, filter, TimeTravel.Machine)
|
||||||
|
|
||||||
|
filter =
|
||||||
|
TimeTravel.Machine
|
||||||
|
|> Ash.Query.filter(model: "DMC-12")
|
||||||
|
|> Map.fetch!(:filter)
|
||||||
|
|
||||||
|
assert {:ok, %{filter: filter}} = DataLayer.filter(query, filter, TimeTravel.Machine)
|
||||||
|
|
||||||
|
assert "OUTATIME" = Filter.find_simple_equality_predicate(filter, :name)
|
||||||
|
assert "DMC-12" = Filter.find_simple_equality_predicate(filter, :model)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "limit/3" do
|
||||||
|
test "it adds the limit to the query" do
|
||||||
|
query =
|
||||||
|
TimeTravel.Machine
|
||||||
|
|> DataLayer.resource_to_query(TimeTravel)
|
||||||
|
|
||||||
|
assert {:ok, %{limit: 13}} = DataLayer.limit(query, 13, TimeTravel.Machine)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "offset/3" do
|
||||||
|
test "it adds the offset to the query" do
|
||||||
|
query =
|
||||||
|
TimeTravel.Machine
|
||||||
|
|> DataLayer.resource_to_query(TimeTravel)
|
||||||
|
|
||||||
|
assert {:ok, %{offset: 13}} = DataLayer.offset(query, 13, TimeTravel.Machine)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "sort/3" do
|
||||||
|
test "it adds the sort to the query" do
|
||||||
|
query =
|
||||||
|
TimeTravel.Machine
|
||||||
|
|> DataLayer.resource_to_query(TimeTravel)
|
||||||
|
|
||||||
|
assert {:ok, %{sort: [:name]}} = DataLayer.sort(query, [:name], TimeTravel.Machine)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "run_query/2" do
|
||||||
|
setup :with_character_fixtures
|
||||||
|
|
||||||
|
test "when retrieving an existing record by the primary key, it works", %{doc: doc} do
|
||||||
|
filter = Ash.Query.filter(TimeTravel.Character, id: doc.id).filter
|
||||||
|
|
||||||
|
{:ok, query} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> DataLayer.resource_to_query(TimeTravel)
|
||||||
|
|> DataLayer.filter(filter, TimeTravel.Character)
|
||||||
|
|
||||||
|
assert {:ok, [^doc]} = DataLayer.run_query(query, TimeTravel.Character)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "when retrieving a non-existant record by the primary key, it returns an empty list" do
|
||||||
|
filter = Ash.Query.filter(TimeTravel.Character, id: Ecto.UUID.generate()).filter
|
||||||
|
|
||||||
|
{:ok, query} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> DataLayer.resource_to_query(TimeTravel)
|
||||||
|
|> DataLayer.filter(filter, TimeTravel.Character)
|
||||||
|
|
||||||
|
assert {:ok, []} = DataLayer.run_query(query, TimeTravel.Character)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it can filter by arbitrary fields" do
|
||||||
|
filter = Ash.Query.filter(TimeTravel.Character, nickname: "Marty").filter
|
||||||
|
|
||||||
|
{:ok, query} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> DataLayer.resource_to_query(TimeTravel)
|
||||||
|
|> DataLayer.filter(filter, TimeTravel.Character)
|
||||||
|
|
||||||
|
assert {:ok, result} = DataLayer.run_query(query, TimeTravel.Character)
|
||||||
|
assert ["Marty"] = Enum.map(result, & &1.nickname)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it can sort by arbitrary fields" do
|
||||||
|
{:ok, query} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> DataLayer.resource_to_query(TimeTravel)
|
||||||
|
|> DataLayer.sort([name: :desc], TimeTravel.Character)
|
||||||
|
|
||||||
|
assert {:ok, result} = DataLayer.run_query(query, TimeTravel.Character)
|
||||||
|
|
||||||
|
assert ["Marty", "Doc"] = Enum.map(result, & &1.nickname)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it can offset an arbitrary number of results" do
|
||||||
|
{:ok, query} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> DataLayer.resource_to_query(TimeTravel)
|
||||||
|
|> DataLayer.sort([name: :asc], TimeTravel.Character)
|
||||||
|
|
||||||
|
{:ok, query} = DataLayer.offset(query, 1, TimeTravel.Character)
|
||||||
|
|
||||||
|
assert {:ok, result} = DataLayer.run_query(query, TimeTravel.Character)
|
||||||
|
|
||||||
|
assert ["Marty"] = Enum.map(result, & &1.nickname)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it can limit to an arbitrary number of results" do
|
||||||
|
{:ok, query} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> DataLayer.resource_to_query(TimeTravel)
|
||||||
|
|> DataLayer.sort([name: :asc], TimeTravel.Character)
|
||||||
|
|
||||||
|
{:ok, query} = DataLayer.limit(query, 1, TimeTravel.Character)
|
||||||
|
|
||||||
|
assert {:ok, result} = DataLayer.run_query(query, TimeTravel.Character)
|
||||||
|
|
||||||
|
assert ["Doc"] = Enum.map(result, & &1.nickname)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def with_character_fixtures(_) do
|
||||||
|
{:ok, doc} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> Changeset.for_create(:create, %{
|
||||||
|
name: "Emmet Lathrop Brown",
|
||||||
|
nickname: "Doc",
|
||||||
|
current_year: 1985
|
||||||
|
})
|
||||||
|
|> TimeTravel.create()
|
||||||
|
|
||||||
|
{:ok, marty} =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> Changeset.for_create(:create, %{
|
||||||
|
name: "Martin Seamus McFly",
|
||||||
|
nickname: "Marty",
|
||||||
|
current_year: 1985
|
||||||
|
})
|
||||||
|
|> TimeTravel.create()
|
||||||
|
|
||||||
|
{:ok, doc: doc, marty: marty}
|
||||||
|
end
|
||||||
|
end
|
77
test/ash_gen_server/registry_test.exs
Normal file
77
test/ash_gen_server/registry_test.exs
Normal file
|
@ -0,0 +1,77 @@
|
||||||
|
defmodule AshGenServer.RegistryTest do
|
||||||
|
@moduledoc false
|
||||||
|
use AshGenServer.RuntimeCase
|
||||||
|
alias AshGenServer.Registry
|
||||||
|
|
||||||
|
describe "register/1" do
|
||||||
|
test "it can register a process" do
|
||||||
|
key = {__MODULE__, %{id: Ecto.UUID.generate()}}
|
||||||
|
|
||||||
|
self = self()
|
||||||
|
assert {:ok, _} = Registry.register(key)
|
||||||
|
assert {:ok, ^self} = Registry.find_server_by_resource_key(key)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "unregister/1" do
|
||||||
|
test "it can unregister a process" do
|
||||||
|
key = {__MODULE__, %{id: Ecto.UUID.generate()}}
|
||||||
|
|
||||||
|
assert {:ok, _} = Registry.register(key)
|
||||||
|
assert :ok = Registry.unregister(key)
|
||||||
|
|
||||||
|
assert {:error, :not_found} = Registry.find_server_by_resource_key(key)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "find_server_by_resource_key/1" do
|
||||||
|
test "when given the key of a registered process, it returns the pid" do
|
||||||
|
key = {__MODULE__, %{id: Ecto.UUID.generate()}}
|
||||||
|
|
||||||
|
self = self()
|
||||||
|
assert {:ok, _} = Registry.register(key)
|
||||||
|
assert {:ok, ^self} = Registry.find_server_by_resource_key(key)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "when given the key of a non-registered process, it returns a not found error" do
|
||||||
|
key = {__MODULE__, %{id: Ecto.UUID.generate()}}
|
||||||
|
assert {:error, :not_found} = Registry.find_server_by_resource_key(key)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "find_servers_by_resource/1" do
|
||||||
|
test "when there are processes registered for the resource, it returns a list of them" do
|
||||||
|
no_of_resoruces = :rand.uniform(7) + 2
|
||||||
|
primary_keys = Enum.map(1..no_of_resoruces, fn _ -> %{id: Ecto.UUID.generate()} end)
|
||||||
|
|
||||||
|
for primary_key <- primary_keys do
|
||||||
|
Registry.register({__MODULE__, primary_key})
|
||||||
|
end
|
||||||
|
|
||||||
|
result = Registry.find_servers_by_resource(__MODULE__)
|
||||||
|
assert Enum.count(result) == no_of_resoruces
|
||||||
|
|
||||||
|
for primary_key <- primary_keys do
|
||||||
|
assert {primary_key, self()} in result
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "find_servers/0" do
|
||||||
|
test "it returns a list of all registered processes" do
|
||||||
|
no_of_resoruces = :rand.uniform(7) + 2
|
||||||
|
primary_keys = Enum.map(1..no_of_resoruces, fn _ -> %{id: Ecto.UUID.generate()} end)
|
||||||
|
|
||||||
|
for primary_key <- primary_keys do
|
||||||
|
Registry.register({__MODULE__, primary_key})
|
||||||
|
end
|
||||||
|
|
||||||
|
result = Registry.find_servers()
|
||||||
|
assert Enum.count(result) == no_of_resoruces
|
||||||
|
|
||||||
|
for primary_key <- primary_keys do
|
||||||
|
assert {{__MODULE__, primary_key}, self()} in result
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
41
test/ash_gen_server/server_test.exs
Normal file
41
test/ash_gen_server/server_test.exs
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
defmodule AshGenServer.ServerTest do
|
||||||
|
@moduledoc false
|
||||||
|
use AshGenServer.RuntimeCase
|
||||||
|
alias Ash.Changeset
|
||||||
|
alias AshGenServer.{Registry, Server}
|
||||||
|
|
||||||
|
describe "init/1" do
|
||||||
|
test "it registers itself using the primary key provided" do
|
||||||
|
changeset =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> Changeset.for_create(:create, %{name: "Biff Tannen", current_year: 2015})
|
||||||
|
|
||||||
|
id = Changeset.get_attribute(changeset, :id)
|
||||||
|
|
||||||
|
Server.init([TimeTravel.Character, changeset])
|
||||||
|
|
||||||
|
self = self()
|
||||||
|
|
||||||
|
assert {:ok, ^self} =
|
||||||
|
Registry.find_server_by_resource_key({TimeTravel.Character, %{id: id}})
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it returns the correct state" do
|
||||||
|
changeset =
|
||||||
|
TimeTravel.Character
|
||||||
|
|> Changeset.for_create(:create, %{name: "Biff Tannen", current_year: 2015})
|
||||||
|
|
||||||
|
id = Changeset.get_attribute(changeset, :id)
|
||||||
|
|
||||||
|
assert {:ok, state} = Server.init([TimeTravel.Character, changeset])
|
||||||
|
|
||||||
|
assert state.primary_key == %{id: id}
|
||||||
|
assert state.resource == TimeTravel.Character
|
||||||
|
assert %TimeTravel.Character{} = state.record
|
||||||
|
assert state.record.id == id
|
||||||
|
assert state.record.name == "Biff Tannen"
|
||||||
|
assert state.record.current_year == 2015
|
||||||
|
refute state.record.nickname
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
8
test/ash_gen_server_test.exs
Normal file
8
test/ash_gen_server_test.exs
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
defmodule AshGenServerTest do
|
||||||
|
use ExUnit.Case
|
||||||
|
doctest AshGenServer
|
||||||
|
|
||||||
|
test "greets the world" do
|
||||||
|
assert AshGenServer.hello() == :world
|
||||||
|
end
|
||||||
|
end
|
35
test/support/runtime_case.ex
Normal file
35
test/support/runtime_case.ex
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
defmodule AshGenServer.RuntimeCase do
|
||||||
|
@moduledoc false
|
||||||
|
use ExUnit.CaseTemplate
|
||||||
|
|
||||||
|
alias AshGenServer.{Registry, Supervisor}
|
||||||
|
|
||||||
|
setup do
|
||||||
|
{:ok, registry_pid} = Registry.start_link([])
|
||||||
|
{:ok, supervisor_pid} = Supervisor.start_link([])
|
||||||
|
|
||||||
|
on_exit(fn ->
|
||||||
|
await_exit(registry_pid)
|
||||||
|
await_exit(supervisor_pid)
|
||||||
|
end)
|
||||||
|
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
using do
|
||||||
|
quote do
|
||||||
|
require Ash.Query
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp await_exit(pid) do
|
||||||
|
Process.monitor(pid)
|
||||||
|
Process.exit(pid, :normal)
|
||||||
|
|
||||||
|
receive do
|
||||||
|
{:DOWN, _, :process, ^pid, _} -> :ok
|
||||||
|
after
|
||||||
|
5000 -> :error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
10
test/support/time_travel.ex
Normal file
10
test/support/time_travel.ex
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
defmodule TimeTravel do
|
||||||
|
@moduledoc """
|
||||||
|
An example Ash API that has GenServer backed resources.
|
||||||
|
"""
|
||||||
|
use Ash.Api, otp_app: :ash_gen_server
|
||||||
|
|
||||||
|
resources do
|
||||||
|
registry TimeTravel.Registry
|
||||||
|
end
|
||||||
|
end
|
31
test/support/time_travel/character.ex
Normal file
31
test/support/time_travel/character.ex
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
defmodule TimeTravel.Character do
|
||||||
|
@moduledoc false
|
||||||
|
use Ash.Resource, data_layer: AshGenServer.DataLayer
|
||||||
|
|
||||||
|
actions do
|
||||||
|
create :create
|
||||||
|
|
||||||
|
read :read do
|
||||||
|
primary? true
|
||||||
|
end
|
||||||
|
|
||||||
|
destroy :destroy
|
||||||
|
|
||||||
|
update :travel_in_time do
|
||||||
|
argument :target_year, :integer, allow_nil?: false
|
||||||
|
|
||||||
|
change TimeTravel.CharacterTravelChange
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
attributes do
|
||||||
|
uuid_primary_key :id
|
||||||
|
|
||||||
|
attribute :name, :string, allow_nil?: false
|
||||||
|
attribute :nickname, :string
|
||||||
|
attribute :current_year, :integer, allow_nil?: false
|
||||||
|
|
||||||
|
create_timestamp :created_at
|
||||||
|
update_timestamp :updated_at
|
||||||
|
end
|
||||||
|
end
|
13
test/support/time_travel/character_travel_change.ex
Normal file
13
test/support/time_travel/character_travel_change.ex
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
defmodule TimeTravel.CharacterTravelChange do
|
||||||
|
@moduledoc false
|
||||||
|
use Ash.Resource.Change
|
||||||
|
alias Ash.Changeset
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def change(changeset, _opts, _context) do
|
||||||
|
target_year = Changeset.get_argument(changeset, :target_year)
|
||||||
|
|
||||||
|
changeset
|
||||||
|
|> Changeset.change_attribute(:current_year, target_year)
|
||||||
|
end
|
||||||
|
end
|
42
test/support/time_travel/machine.ex
Normal file
42
test/support/time_travel/machine.ex
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
defmodule TimeTravel.Machine do
|
||||||
|
@moduledoc false
|
||||||
|
use Ash.Resource, data_layer: AshGenServer.DataLayer
|
||||||
|
|
||||||
|
actions do
|
||||||
|
create :create
|
||||||
|
|
||||||
|
read :read do
|
||||||
|
primary? true
|
||||||
|
end
|
||||||
|
|
||||||
|
destroy :destroy
|
||||||
|
|
||||||
|
update :travel_in_time do
|
||||||
|
argument :occupants, {:array, TimeTravel.Character}
|
||||||
|
argument :target_year, :integer, allow_nil?: false
|
||||||
|
change TimeTravel.MachineTravelChange
|
||||||
|
end
|
||||||
|
|
||||||
|
update :retrofit do
|
||||||
|
argument :power_source, :string, allow_nil?: false
|
||||||
|
change TimeTravel.MachineRetrofitChange
|
||||||
|
end
|
||||||
|
|
||||||
|
update :charge do
|
||||||
|
change TimeTravel.MachineChargeChange
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
attributes do
|
||||||
|
uuid_primary_key :id
|
||||||
|
|
||||||
|
attribute :name, :string, allow_nil?: false
|
||||||
|
attribute :model, :string
|
||||||
|
attribute :manufacturer, :string
|
||||||
|
attribute :power_source, :string
|
||||||
|
attribute :has_power?, :boolean, allow_nil?: false, default: false
|
||||||
|
|
||||||
|
create_timestamp :created_at
|
||||||
|
update_timestamp :updated_at
|
||||||
|
end
|
||||||
|
end
|
11
test/support/time_travel/machine_charge_change.ex
Normal file
11
test/support/time_travel/machine_charge_change.ex
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
defmodule TimeTravel.MachineChargeChange do
|
||||||
|
@moduledoc false
|
||||||
|
use Ash.Resource.Change
|
||||||
|
alias Ash.Changeset
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def change(changeset, _opts, _context) do
|
||||||
|
changeset
|
||||||
|
|> Changeset.change_attribute(:has_power?, true)
|
||||||
|
end
|
||||||
|
end
|
13
test/support/time_travel/machine_retrofit_change.ex
Normal file
13
test/support/time_travel/machine_retrofit_change.ex
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
defmodule TimeTravel.MachineRetrofitChange do
|
||||||
|
@moduledoc false
|
||||||
|
use Ash.Resource.Change
|
||||||
|
alias Ash.Changeset
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def change(changeset, _opts, _context) do
|
||||||
|
power_source = Changeset.get_argument(changeset, :power_source)
|
||||||
|
|
||||||
|
changeset
|
||||||
|
|> Changeset.change_attribute(:power_source, power_source)
|
||||||
|
end
|
||||||
|
end
|
33
test/support/time_travel/machine_travel_change.ex
Normal file
33
test/support/time_travel/machine_travel_change.ex
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
defmodule TimeTravel.MachineTravelChange do
|
||||||
|
@moduledoc false
|
||||||
|
use Ash.Resource.Change
|
||||||
|
alias Ash.Changeset
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def change(changeset, _opts, _context) do
|
||||||
|
has_power? = Changeset.get_attribute(changeset, :has_power?)
|
||||||
|
|
||||||
|
if has_power? do
|
||||||
|
occupants = Changeset.get_argument(changeset, :occupants)
|
||||||
|
target_year = Changeset.get_argument(changeset, :target_year)
|
||||||
|
|
||||||
|
changeset
|
||||||
|
|> Changeset.change_attribute(:has_power?, false)
|
||||||
|
|> Changeset.after_action(fn _changeset, machine ->
|
||||||
|
occupants
|
||||||
|
|> Enum.reduce_while({:ok, machine}, fn occupant, {:ok, machine} ->
|
||||||
|
occupant
|
||||||
|
|> Changeset.for_update(:travel_in_time, %{target_year: target_year})
|
||||||
|
|> TimeTravel.update()
|
||||||
|
|> case do
|
||||||
|
{:ok, _} -> {:cont, {:ok, machine}}
|
||||||
|
{:error, reason} -> {:halt, {:error, reason}}
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end)
|
||||||
|
else
|
||||||
|
changeset
|
||||||
|
|> Changeset.add_error(:action, "Power source needs topping up")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
9
test/support/time_travel/registry.ex
Normal file
9
test/support/time_travel/registry.ex
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
defmodule TimeTravel.Registry do
|
||||||
|
@moduledoc false
|
||||||
|
use Ash.Registry, extensions: [Ash.Registry.ResourceValidations]
|
||||||
|
|
||||||
|
entries do
|
||||||
|
entry TimeTravel.Character
|
||||||
|
entry TimeTravel.Machine
|
||||||
|
end
|
||||||
|
end
|
1
test/test_helper.exs
Normal file
1
test/test_helper.exs
Normal file
|
@ -0,0 +1 @@
|
||||||
|
ExUnit.start()
|
Loading…
Reference in a new issue