feat: snapshot-based migration generator

This commit is contained in:
Zach Daniel 2020-09-10 20:26:47 -04:00 committed by GitHub
parent 60c18dd149
commit 24b743d31b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
25 changed files with 1765 additions and 93 deletions

View file

@ -87,7 +87,7 @@
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, [exit_status: 2]},
{Credo.Check.Design.TagTODO, false},
{Credo.Check.Design.TagFIXME, []},
#

View file

@ -1,6 +1,6 @@
# THIS FILE IS AUTOGENERATED USING `mix ash.formatter`
# DONT MODIFY IT BY HAND
locals_without_parens = [repo: 1, table: 1]
locals_without_parens = [migrate?: 1, repo: 1, table: 1]
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],

3
.gitignore vendored
View file

@ -22,3 +22,6 @@ erl_crash.dump
# Ignore package tarball (built via "mix hex.build").
ash_postgres-*.tar
test_migration_path
test_snapshots_path

View file

@ -20,7 +20,7 @@ if Mix.env() == :test do
config :ash_postgres, AshPostgres.TestRepo,
username: "postgres",
database: "postgres",
database: "ash_postgres_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox

View file

@ -18,4 +18,9 @@ defmodule AshPostgres do
def table(resource) do
Extension.get_opt(resource, [:postgres], :table, nil, true)
end
@doc "Whether or not the resource should be included when generating migrations"
def migrate?(resource) do
Extension.get_opt(resource, [:postgres], :migrate?, nil, true)
end
end

View file

@ -36,6 +36,12 @@ defmodule AshPostgres.DataLayer do
doc:
"The repo that will be used to fetch your data. See the `AshPostgres.Repo` documentation for more"
],
migrate?: [
type: :boolean,
default: true,
doc:
"Whether or not to include this resource in the generated migrations with `mix ash.generate_migrations`"
],
table: [
type: :string,
required: true,
@ -44,7 +50,6 @@ defmodule AshPostgres.DataLayer do
]
}
alias Ash.DataLayer.Delegate
alias Ash.Filter
alias Ash.Filter.{Expression, Not, Predicate}
alias Ash.Filter.Predicate.{Eq, GreaterThan, In, IsNil, LessThan}
@ -91,14 +96,12 @@ defmodule AshPostgres.DataLayer do
def can?(_, :upsert), do: true
def can?(resource, {:join, other_resource}) do
other_resource = Delegate.get_delegated(other_resource)
data_layer = Ash.Resource.data_layer(resource)
other_data_layer = Ash.Resource.data_layer(other_resource)
data_layer == other_data_layer and repo(data_layer) == repo(other_data_layer)
end
def can?(resource, {:lateral_join, other_resource}) do
other_resource = Delegate.get_delegated(other_resource)
data_layer = Ash.Resource.data_layer(resource)
other_data_layer = Ash.Resource.data_layer(other_resource)
data_layer == other_data_layer and repo(data_layer) == repo(other_data_layer)
@ -1121,6 +1124,6 @@ defmodule AshPostgres.DataLayer do
end
defp maybe_get_resource_query(resource) do
{table(Delegate.get_delegated(resource)), resource}
{table(resource), resource}
end
end

View file

@ -0,0 +1,906 @@
defmodule AshPostgres.MigrationGenerator do
@moduledoc "Generates migrations based on resource snapshots"
@default_snapshot_path "priv/resource_snapshots"
import Mix.Generator
alias AshPostgres.MigrationGenerator.{Operation, Phase}
defstruct snapshot_path: @default_snapshot_path, migration_path: nil, quiet: false, format: true
def generate(apis, opts \\ []) do
apis = List.wrap(apis)
opts = struct(__MODULE__, opts)
snapshots =
apis
|> Enum.flat_map(&Ash.Api.resources/1)
|> Enum.filter(&(Ash.Resource.data_layer(&1) == AshPostgres.DataLayer))
|> Enum.filter(&AshPostgres.migrate?/1)
|> Enum.map(&get_snapshot/1)
snapshots
|> Enum.group_by(& &1.repo)
|> Enum.each(fn {repo, snapshots} ->
deduped = deduplicate_snapshots(snapshots, opts)
snapshots = Enum.map(deduped, &elem(&1, 0))
deduped
|> fetch_operations()
|> Enum.uniq()
|> case do
[] ->
Mix.shell().info(
"No changes detected, so no migrations or snapshots have been created."
)
:ok
operations ->
operations
|> sort_operations()
|> streamline()
|> group_into_phases()
|> build_up_and_down()
|> write_migration(snapshots, repo, opts)
end
end)
end
defp deduplicate_snapshots(snapshots, opts) do
snapshots
|> Enum.group_by(fn snapshot ->
snapshot.table
end)
|> Enum.map(fn {_table, [snapshot | _] = snapshots} ->
existing_snapshot = get_existing_snapshot(snapshot, opts)
{primary_key, identities} = merge_primary_keys(existing_snapshot, snapshots)
attributes = Enum.flat_map(snapshots, & &1.attributes)
snapshot_identities =
snapshots
|> Enum.map(& &1.identities)
|> Enum.concat()
new_snapshot = %{
snapshot
| attributes: merge_attributes(attributes, snapshot.table),
identities: snapshot_identities
}
all_identities =
new_snapshot.identities
|> Kernel.++(identities)
|> Enum.sort_by(& &1.name)
|> Enum.uniq_by(fn identity ->
Enum.sort(identity.keys)
end)
new_snapshot = %{new_snapshot | identities: all_identities}
{
%{
new_snapshot
| attributes:
Enum.map(new_snapshot.attributes, fn attribute ->
if attribute.name in primary_key do
%{attribute | primary_key?: true}
else
%{attribute | primary_key?: false}
end
end)
},
existing_snapshot
}
end)
end
defp merge_attributes(attributes, table) do
attributes
|> Enum.group_by(& &1.name)
|> Enum.map(fn
{_name, [attribute]} ->
attribute
{name, attributes} ->
%{
name: name,
type: merge_types(Enum.map(attributes, & &1.type), name, table),
default: merge_defaults(Enum.map(attributes, & &1.default)),
allow_nil?: Enum.any?(attributes, & &1.allow_nil?),
references: merge_references(Enum.map(attributes, & &1.references), name, table),
primary_key?: false
}
end)
end
defp merge_references(references, name, table) do
references
|> Enum.reject(&is_nil/1)
|> Enum.uniq()
|> case do
[] ->
nil
[reference] ->
reference
references ->
conflicting_table_field_names =
Enum.map_join(references, "\n", fn reference ->
"* #{reference.table}.#{reference.destination_field}"
end)
raise "Conflicting references for `#{table}.#{name}`:\n#{conflicting_table_field_names}"
end
end
defp merge_types(types, name, table) do
types
|> Enum.uniq()
|> case do
[type] ->
type
types ->
raise "Conflicting types for table `#{table}.#{name}`: #{inspect(types)}"
end
end
defp merge_defaults(defaults) do
defaults
|> Enum.uniq()
|> case do
[default] -> default
_ -> nil
end
end
defp merge_primary_keys(nil, [snapshot | _] = snapshots) do
snapshots
|> Enum.map(&pkey_names(&1.attributes))
|> Enum.uniq()
|> case do
[pkey_names] ->
{pkey_names, []}
unique_primary_keys ->
unique_primary_key_names =
unique_primary_keys
|> Enum.with_index()
|> Enum.map_join("\n", fn {pkey, index} ->
"#{index}: #{inspect(pkey)}"
end)
message = """
Which primary key should be used for the table `#{snapshot.table}` (enter the number)?
#{unique_primary_key_names}
"""
choice =
message
|> Mix.shell().prompt()
|> String.to_integer()
identities =
unique_primary_keys
|> List.delete_at(choice)
|> Enum.map(fn pkey_names ->
pkey_name_string = Enum.join(pkey_names, "_")
name = snapshot.table <> "_" <> pkey_name_string
%{
keys: pkey_names,
name: name
}
end)
primary_key = Enum.sort(Enum.at(unique_primary_keys, choice))
identities =
Enum.reject(identities, fn identity ->
Enum.sort(identity.keys) == primary_key
end)
{primary_key, identities}
end
end
defp merge_primary_keys(existing_snapshot, snapshots) do
pkey_names = pkey_names(existing_snapshot.attributes)
one_pkey_exists? =
Enum.any?(snapshots, fn snapshot ->
pkey_names(snapshot.attributes) == pkey_names
end)
if one_pkey_exists? do
identities =
snapshots
|> Enum.map(&pkey_names(&1.attributes))
|> Enum.uniq()
|> Enum.reject(&(&1 == pkey_names))
|> Enum.map(fn pkey_names ->
pkey_name_string = Enum.join(pkey_names, "_")
name = existing_snapshot.table <> "_" <> pkey_name_string
%{
keys: pkey_names,
name: name
}
end)
{pkey_names, identities}
else
merge_primary_keys(nil, snapshots)
end
end
defp pkey_names(attributes) do
attributes
|> Enum.filter(& &1.primary_key?)
|> Enum.map(& &1.name)
|> Enum.sort()
end
defp write_migration({up, down}, snapshots, repo, opts) do
repo_name = repo |> Module.split() |> List.last() |> Macro.underscore()
Enum.each(snapshots, fn snapshot ->
snapshot_binary = snapshot_to_binary(snapshot)
snapshot_file =
opts.snapshot_path
|> Path.join(repo_name)
|> Path.join(snapshot.table <> ".json")
File.mkdir_p(Path.dirname(snapshot_file))
File.write!(snapshot_file, snapshot_binary, [])
end)
migration_path =
if opts.migration_path do
opts.migration_path
else
"priv/"
|> Path.join(repo_name)
|> Path.join("migrations")
end
count =
migration_path
|> Path.join("*_migrate_resources*")
|> Path.wildcard()
|> Enum.count()
|> Kernel.+(1)
migration_name = "#{timestamp()}_migrate_resources#{count}"
migration_file =
migration_path
|> Path.join(migration_name <> ".exs")
module_name = Module.concat([repo, Migrations, Macro.camelize("migrate_resources#{count}")])
contents = """
defmodule #{inspect(module_name)} do
@moduledoc \"\"\"
Updates resources based on their most recent snapshots.
This file was autogenerated with `mix ash_postgres.generate_migrations`
\"\"\"
use Ecto.Migration
def up() do
#{up}
end
def down() do
#{down}
end
end
"""
create_file(migration_file, format(contents, opts))
end
defp build_up_and_down(phases) do
up =
Enum.map_join(phases, "\n", fn phase ->
phase.__struct__.up(phase) <> "\n"
end)
down =
phases
|> Enum.reverse()
|> Enum.map_join("\n", fn phase ->
phase.__struct__.down(phase) <> "\n"
end)
{up, down}
end
defp format(string, opts) do
if opts.format do
Code.format_string!(string)
else
string
end
end
defp streamline(ops, acc \\ [])
defp streamline([], acc), do: Enum.reverse(acc)
defp streamline(
[
%Operation.AddAttribute{
attribute: %{
name: name
},
table: table
} = add,
%AshPostgres.MigrationGenerator.Operation.AlterAttribute{
new_attribute: %{
name: name,
references: references
},
old_attribute: %{
name: name
},
table: table
}
| rest
],
acc
)
when not is_nil(references) do
new_attribute = Map.put(add.attribute, :references, references)
streamline(
rest,
[%{add | attribute: new_attribute} | acc]
)
end
defp streamline([first | rest], acc) do
streamline(rest, [first | acc])
end
defp group_into_phases(ops, current \\ nil, acc \\ [])
defp group_into_phases([], nil, acc), do: Enum.reverse(acc)
defp group_into_phases([], phase, acc) do
phase = %{phase | operations: Enum.reverse(phase.operations)}
Enum.reverse([phase | acc])
end
defp group_into_phases([%Operation.CreateTable{table: table} | rest], nil, acc) do
group_into_phases(rest, %Phase.Create{table: table}, acc)
end
defp group_into_phases(
[%Operation.AddAttribute{table: table} = op | rest],
%{table: table} = phase,
acc
) do
group_into_phases(rest, %{phase | operations: [op | phase.operations]}, acc)
end
defp group_into_phases(
[%Operation.AlterAttribute{table: table} = op | rest],
%{table: table} = phase,
acc
) do
group_into_phases(rest, %{phase | operations: [op | phase.operations]}, acc)
end
defp group_into_phases(
[%Operation.RenameAttribute{table: table} = op | rest],
%{table: table} = phase,
acc
) do
group_into_phases(rest, %{phase | operations: [op | phase.operations]}, acc)
end
defp group_into_phases(
[%Operation.RemoveAttribute{table: table} = op | rest],
%{table: table} = phase,
acc
) do
group_into_phases(rest, %{phase | operations: [op | phase.operations]}, acc)
end
defp group_into_phases([operation | rest], nil, acc) do
group_into_phases(rest, nil, [
%Phase.Alter{operations: [operation], table: operation.table} | acc
])
end
defp group_into_phases(operations, phase, acc) do
phase = %{phase | operations: Enum.reverse(phase.operations)}
group_into_phases(operations, nil, [phase | acc])
end
defp sort_operations(ops, acc \\ [])
defp sort_operations([], acc), do: acc
defp sort_operations([op | rest], []), do: sort_operations(rest, [op])
defp sort_operations([op | rest], acc) do
acc = Enum.reverse(acc)
after_index = Enum.find_index(acc, &after?(op, &1))
new_acc =
if after_index do
acc
|> List.insert_at(after_index, op)
|> Enum.reverse()
else
[op | Enum.reverse(acc)]
end
sort_operations(rest, new_acc)
end
defp after?(
%Operation.AddUniqueIndex{identity: %{keys: keys}, table: table},
%Operation.AddAttribute{table: table, attribute: %{name: name}}
) do
name in keys
end
defp after?(
%Operation.AddUniqueIndex{identity: %{keys: keys}, table: table},
%Operation.AlterAttribute{table: table, new_attribute: %{name: name}}
) do
name in keys
end
defp after?(
%Operation.AddUniqueIndex{identity: %{keys: keys}, table: table},
%Operation.RenameAttribute{table: table, new_attribute: %{name: name}}
) do
name in keys
end
defp after?(
%Operation.RemoveUniqueIndex{identity: %{keys: keys}, table: table},
%Operation.RemoveAttribute{table: table, attribute: %{name: name}}
) do
name in keys
end
defp after?(
%Operation.RemoveUniqueIndex{identity: %{keys: keys}, table: table},
%Operation.RenameAttribute{table: table, old_attribute: %{name: name}}
) do
name in keys
end
defp after?(%Operation.AddAttribute{table: table}, %Operation.CreateTable{table: table}) do
true
end
defp after?(
%Operation.AddAttribute{
attribute: %{
references: %{table: table, destination_field: name}
}
},
%Operation.AddAttribute{table: table, attribute: %{name: name}}
),
do: true
defp after?(
%Operation.AddAttribute{
table: table,
attribute: %{
primary_key?: false
}
},
%Operation.AddAttribute{table: table, attribute: %{primary_key?: true}}
),
do: true
defp after?(
%Operation.AddAttribute{
table: table,
attribute: %{
primary_key?: true
}
},
%Operation.RemoveAttribute{table: table, attribute: %{primary_key?: true}}
),
do: true
defp after?(
%Operation.AlterAttribute{
table: table,
new_attribute: %{primary_key?: false},
old_attribute: %{primary_key?: true}
},
%Operation.AddAttribute{
table: table,
attribute: %{
primary_key?: true
}
}
),
do: true
defp after?(
%Operation.RemoveAttribute{attribute: %{name: name}, table: table},
%Operation.AlterAttribute{
old_attribute: %{references: %{table: table, destination_field: name}}
}
),
do: true
defp after?(
%Operation.AlterAttribute{
new_attribute: %{
references: %{table: table, destination_field: name}
}
},
%Operation.AddAttribute{table: table, attribute: %{name: name}}
),
do: true
defp after?(%Operation.AddUniqueIndex{table: table}, %Operation.CreateTable{table: table}) do
true
end
defp after?(%Operation.AlterAttribute{new_attribute: %{references: references}}, _)
when not is_nil(references),
do: true
defp after?(_, _), do: false
defp fetch_operations(snapshots) do
Enum.flat_map(snapshots, fn {snapshot, existing_snapshot} ->
do_fetch_operations(snapshot, existing_snapshot)
end)
end
defp do_fetch_operations(snapshot, existing_snapshot, acc \\ [])
defp do_fetch_operations(snapshot, nil, acc) do
empty_snapshot = %{
attributes: [],
identities: [],
table: snapshot.table,
repo: snapshot.repo
}
do_fetch_operations(snapshot, empty_snapshot, [
%Operation.CreateTable{table: snapshot.table} | acc
])
end
defp do_fetch_operations(snapshot, old_snapshot, acc) do
attribute_operations = attribute_operations(snapshot, old_snapshot)
unique_indexes_to_remove =
old_snapshot.identities
|> Enum.reject(fn old_identity ->
Enum.find(snapshot.identities, fn identity ->
Enum.sort(old_identity.keys) == Enum.sort(identity.keys)
end)
end)
|> Enum.map(fn identity ->
%Operation.RemoveUniqueIndex{identity: identity, table: snapshot.table}
end)
unique_indexes_to_add =
snapshot.identities
|> Enum.reject(fn identity ->
Enum.find(old_snapshot.identities, fn old_identity ->
Enum.sort(old_identity.keys) == Enum.sort(identity.keys)
end)
end)
|> Enum.map(fn identity ->
%Operation.AddUniqueIndex{identity: identity, table: snapshot.table}
end)
attribute_operations ++ unique_indexes_to_add ++ unique_indexes_to_remove ++ acc
end
defp attribute_operations(snapshot, old_snapshot) do
attributes_to_add =
Enum.reject(snapshot.attributes, fn attribute ->
Enum.find(old_snapshot.attributes, &(&1.name == attribute.name))
end)
attributes_to_remove =
Enum.reject(old_snapshot.attributes, fn attribute ->
Enum.find(snapshot.attributes, &(&1.name == attribute.name))
end)
{attributes_to_add, attributes_to_remove, attributes_to_rename} =
resolve_renames(attributes_to_add, attributes_to_remove)
attributes_to_alter =
snapshot.attributes
|> Enum.map(fn attribute ->
{attribute,
Enum.find(old_snapshot.attributes, &(&1.name == attribute.name && &1 != attribute))}
end)
|> Enum.filter(&elem(&1, 1))
rename_attribute_events =
Enum.map(attributes_to_rename, fn {new, old} ->
%Operation.RenameAttribute{new_attribute: new, old_attribute: old, table: snapshot.table}
end)
add_attribute_events =
Enum.flat_map(attributes_to_add, fn attribute ->
if attribute.references do
[
%Operation.AddAttribute{
attribute: Map.delete(attribute, :references),
table: snapshot.table
},
%Operation.AlterAttribute{
old_attribute: Map.delete(attribute, :references),
new_attribute: attribute,
table: snapshot.table
}
]
else
[
%Operation.AddAttribute{
attribute: attribute,
table: snapshot.table
}
]
end
end)
alter_attribute_events =
Enum.flat_map(attributes_to_alter, fn {new_attribute, old_attribute} ->
if new_attribute.references do
[
%Operation.AlterAttribute{
new_attribute: Map.delete(new_attribute, :references),
old_attribute: old_attribute,
table: snapshot.table
},
%Operation.AlterAttribute{
new_attribute: new_attribute,
old_attribute: Map.delete(new_attribute, :references),
table: snapshot.table
}
]
else
[
%Operation.AlterAttribute{
new_attribute: new_attribute,
old_attribute: old_attribute,
table: snapshot.table
}
]
end
end)
remove_attribute_events =
Enum.map(attributes_to_remove, fn attribute ->
%Operation.RemoveAttribute{attribute: attribute, table: snapshot.table}
end)
add_attribute_events ++
alter_attribute_events ++ remove_attribute_events ++ rename_attribute_events
end
def get_existing_snapshot(snapshot, opts) do
repo_name = snapshot.repo |> Module.split() |> List.last() |> Macro.underscore()
folder = Path.join(opts.snapshot_path, repo_name)
file = Path.join(folder, snapshot.table <> ".json")
if File.exists?(file) do
existing_snapshot =
file
|> File.read!()
|> load_snapshot()
existing_snapshot
end
end
defp resolve_renames(adding, []), do: {adding, [], []}
defp resolve_renames([adding], [removing]) do
if Mix.shell().yes?("Are you renaming :#{removing.name} to :#{adding.name}?") do
{[], [], [{adding, removing}]}
else
{[adding], [removing], []}
end
end
defp resolve_renames(adding, [removing | rest]) do
{new_adding, new_removing, new_renames} =
if Mix.shell().yes?("Are you renaming :#{removing.name}?") do
new_attribute = get_new_attribute(adding)
{adding -- [new_attribute], [], [{new_attribute, removing}]}
else
{adding, [removing], []}
end
{rest_adding, rest_removing, rest_renames} = resolve_renames(new_adding, rest)
{new_adding ++ rest_adding, new_removing ++ rest_removing, rest_renames ++ new_renames}
end
defp get_new_attribute(adding, tries \\ 3)
defp get_new_attribute(_adding, 0) do
raise "Could not get matching name after 3 attempts."
end
defp get_new_attribute(adding, tries) do
name =
Mix.shell().prompt(
"What are you renaming it to?: #{Enum.map_join(adding, ", ", & &1.name)}"
)
case Enum.find(adding, &(to_string(&1.name) == name)) do
nil -> get_new_attribute(adding, tries - 1)
new_attribute -> new_attribute
end
end
defp timestamp do
{{y, m, d}, {hh, mm, ss}} = :calendar.universal_time()
"#{y}#{pad(m)}#{pad(d)}#{pad(hh)}#{pad(mm)}#{pad(ss)}"
end
defp pad(i) when i < 10, do: <<?0, ?0 + i>>
defp pad(i), do: to_string(i)
def get_snapshot(resource) do
snapshot = %{
attributes: attributes(resource),
identities: identities(resource),
table: AshPostgres.table(resource),
repo: AshPostgres.repo(resource)
}
hash =
:sha256
|> :crypto.hash(inspect(snapshot))
|> Base.encode16()
Map.put(snapshot, :hash, hash)
end
def attributes(resource) do
repo = AshPostgres.repo(resource)
resource
|> Ash.Resource.attributes()
|> Enum.sort_by(& &1.name)
|> Enum.map(&Map.take(&1, [:name, :type, :default, :allow_nil?, :primary_key?]))
|> Enum.map(fn attribute ->
default = default(attribute, repo)
attribute
|> Map.put(:default, default)
|> Map.update!(:type, fn type ->
type
|> Ash.Type.storage_type()
|> migration_type()
end)
end)
|> Enum.map(fn attribute ->
references = find_reference(resource, attribute)
Map.put(attribute, :references, references)
end)
end
defp find_reference(resource, attribute) do
Enum.find_value(Ash.Resource.relationships(resource), fn relationship ->
if attribute.name == relationship.source_field && relationship.type == :belongs_to &&
foreign_key?(relationship) do
%{
destination_field: relationship.destination_field,
table: AshPostgres.table(relationship.destination)
}
end
end)
end
defp migration_type(:string), do: :text
defp migration_type(:integer), do: :integer
defp migration_type(:boolean), do: :boolean
defp migration_type(:binary_id), do: :binary_id
defp migration_type(other), do: raise("No migration_type set up for #{other}")
defp foreign_key?(relationship) do
Ash.Resource.data_layer(relationship.source) == AshPostgres.DataLayer &&
AshPostgres.repo(relationship.source) == AshPostgres.repo(relationship.destination)
end
defp identities(resource) do
resource
|> Ash.Resource.identities()
|> Enum.filter(fn identity ->
Enum.all?(identity.keys, fn key ->
Ash.Resource.attribute(resource, key)
end)
end)
|> Enum.sort_by(& &1.name)
|> Enum.map(&Map.take(&1, [:name, :keys]))
end
if :erlang.function_exported(Ash, :uuid, 0) do
@uuid_functions [&Ash.uuid/0, &Ecto.UUID.generate/0]
else
@uuid_functions [&Ecto.UUID.generate/0]
end
defp default(%{default: default}, repo) when is_function(default) do
cond do
default in @uuid_functions && "uuid-ossp" in (repo.config()[:installed_extensions] || []) ->
~S[fragment("uuid_generate_v4()")]
default == (&DateTime.utc_now/0) ->
~S[fragment("now()")]
true ->
"nil"
end
end
defp default(%{default: {_, _, _}}, _), do: "nil"
defp default(%{default: value, type: type}, _) do
case Ash.Type.dump_to_native(type, value) do
{:ok, value} -> inspect(value)
_ -> "nil"
end
end
defp snapshot_to_binary(snapshot) do
Jason.encode!(snapshot, pretty: true)
end
defp load_snapshot(json) do
json
|> Jason.decode!(keys: :atoms!)
|> Map.update!(:identities, fn identities ->
Enum.map(identities, &load_identity/1)
end)
|> Map.update!(:attributes, fn attributes ->
Enum.map(attributes, &load_attribute/1)
end)
|> Map.update!(:repo, &String.to_atom/1)
end
defp load_attribute(attribute) do
attribute
|> Map.update!(:type, &String.to_atom/1)
|> Map.update!(:name, &String.to_atom/1)
|> Map.update!(:references, fn
nil ->
nil
references ->
Map.update!(references, :destination_field, &String.to_atom/1)
end)
end
defp load_identity(identity) do
identity
|> Map.update!(:name, &String.to_atom/1)
|> Map.update!(:keys, fn keys ->
Enum.map(keys, &String.to_atom/1)
end)
end
end

View file

@ -0,0 +1,146 @@
defmodule AshPostgres.MigrationGenerator.Operation do
@moduledoc false
defmodule CreateTable do
@moduledoc false
defstruct [:table]
end
defmodule AddAttribute do
@moduledoc false
defstruct [:attribute, :table]
def up(%{
attribute:
%{references: %{table: table, destination_field: destination_field}} = attribute
}) do
"add #{inspect(attribute.name)}, references(#{inspect(table)}, type: #{
inspect(attribute.type)
}, column: #{inspect(destination_field)}), default: #{attribute.default}, primary_key: #{
attribute.primary_key?
}"
end
def up(%{attribute: attribute}) do
"add #{inspect(attribute.name)}, #{inspect(attribute.type)}, null: #{attribute.allow_nil?}, default: #{
attribute.default
}, primary_key: #{attribute.primary_key?}"
end
def down(%{attribute: attribute}) do
"remove #{inspect(attribute.name)}"
end
end
defmodule AlterAttribute do
@moduledoc false
defstruct [:old_attribute, :new_attribute, :table]
def up(%{
new_attribute:
%{references: %{table: table, destination_field: destination_field}} = attribute
}) do
"modify #{inspect(attribute.name)}, references(#{inspect(table)}, type: #{
inspect(attribute.type)
}, column: #{inspect(destination_field)}), default: #{attribute.default}, primary_key: #{
attribute.primary_key?
}"
end
def up(%{new_attribute: attribute}) do
"modify #{inspect(attribute.name)}, #{inspect(attribute.type)}, null: #{
attribute.allow_nil?
}, default: #{attribute.default}, primary_key: #{attribute.primary_key?}"
end
def down(%{
old_attribute:
%{references: %{table: table, destination_field: destination_field}} = attribute
}) do
"modify #{inspect(attribute.name)}, references(#{inspect(table)}, type: #{
inspect(attribute.type)
}, column: #{inspect(destination_field)}), default: #{attribute.default}, primary_key: #{
attribute.primary_key?
}"
end
def down(%{old_attribute: attribute}) do
"modify #{inspect(attribute.name)}, #{inspect(attribute.type)}, null: #{
attribute.allow_nil?
}, default: #{attribute.default}, primary_key: #{attribute.primary_key?}"
end
end
defmodule RenameAttribute do
@moduledoc false
defstruct [:old_attribute, :new_attribute, :table]
def up(%{old_attribute: old_attribute, new_attribute: new_attribute, table: table}) do
"rename table(:#{table}), #{inspect(old_attribute.name)}, to: #{inspect(new_attribute.name)}"
end
def down(%{new_attribute: old_attribute, old_attribute: new_attribute, table: table}) do
"rename table(:#{table}), #{inspect(old_attribute.name)}, to: #{inspect(new_attribute.name)}"
end
end
defmodule RemoveAttribute do
@moduledoc false
defstruct [:attribute, :table]
def up(%{attribute: attribute}) do
"remove #{inspect(attribute.name)}"
end
def down(%{
attribute:
%{references: %{table: table, destination_field: destination_field}} = attribute
}) do
"add #{inspect(attribute.name)}, references(#{inspect(table)}, type: #{
inspect(attribute.type)
}, column: #{inspect(destination_field)}), default: #{attribute.default}, primary_key: #{
attribute.primary_key?
}"
end
def down(%{attribute: attribute}) do
"add #{inspect(attribute.name)}, #{inspect(attribute.type)}, null: #{attribute.allow_nil?}, default: #{
attribute.default
}, primary_key: #{attribute.primary_key?}"
end
end
defmodule AddUniqueIndex do
@moduledoc false
defstruct [:identity, :table]
def up(%{identity: %{name: name, keys: keys}, table: table}) do
"create unique_index(:#{table}, [#{Enum.map_join(keys, ",", &inspect/1)}], name: \"#{table}_#{
name
}_unique_index\")"
end
def down(%{identity: %{name: name, keys: keys}, table: table}) do
# {
"drop unique_index(:#{table}, [#{Enum.map_join(keys, ",", &inspect/1)}], name: \"#{table}_#{
name
}_unique_index\")"
end
end
defmodule RemoveUniqueIndex do
@moduledoc false
defstruct [:identity, :table]
def up(%{identity: %{name: name, keys: keys}, table: table}) do
"drop unique_index(:#{table}, [#{Enum.map_join(keys, ",", &inspect/1)}], name: \"#{table}_#{
name
}_unique_index\")"
end
def down(%{identity: %{name: name, keys: keys}, table: table}) do
"create unique_index(:#{table}, [#{Enum.map_join(keys, ",", &inspect/1)}], name: \"#{table}_#{
name
}_unique_index\")"
end
end
end

View file

@ -0,0 +1,43 @@
defmodule AshPostgres.MigrationGenerator.Phase do
@moduledoc false
defmodule Create do
@moduledoc false
defstruct [:table, operations: []]
def up(%{table: table, operations: operations}) do
"create table(:#{table}, primary_key: false) do\n" <>
Enum.map_join(operations, "\n", fn operation -> operation.__struct__.up(operation) end) <>
"\nend"
end
def down(%{table: table}) do
"drop table(#{inspect(table)})"
end
end
defmodule Alter do
@moduledoc false
defstruct [:table, operations: []]
def up(%{table: table, operations: operations}) do
body =
Enum.map_join(operations, "\n", fn operation -> operation.__struct__.up(operation) end)
"alter table(#{inspect(table)}) do\n" <>
body <>
"\nend"
end
def down(%{table: table, operations: operations}) do
body =
operations
|> Enum.reverse()
|> Enum.map_join("\n", fn operation -> operation.__struct__.down(operation) end)
"alter table(:#{table}) do\n" <>
body <>
"\nend"
end
end
end

View file

@ -0,0 +1,71 @@
defmodule Mix.Tasks.AshPostgres.GenerateMigrations do
@description "Generates migrations, and stores a snapshot of your resources"
@moduledoc @description
use Mix.Task
@shortdoc @description
def run(args) do
{opts, _} =
OptionParser.parse!(args,
strict: [
apis: :string,
snapshot_path: :string,
migration_path: :string,
quiet: :boolean,
format: :boolean
]
)
apps =
if apps_paths = Mix.Project.apps_paths() do
apps_paths |> Map.keys() |> Enum.sort()
else
[Mix.Project.config()[:app]]
end
configured_apis = Enum.flat_map(apps, &Application.get_env(&1, :ash_apis, []))
apis =
opts[:apis]
|> Kernel.||("")
|> String.split(",")
|> Enum.flat_map(fn
"" ->
[]
api ->
[Module.concat([api])]
end)
|> Kernel.++(configured_apis)
|> Enum.map(&ensure_compiled(&1, args))
if apis == [] do
raise "must supply the --apis argument, or set `config :my_app, apis: [...]` in config"
end
AshPostgres.MigrationGenerator.generate(apis, opts)
end
defp ensure_compiled(api, args) do
if Code.ensure_loaded?(Mix.Tasks.App.Config) do
Mix.Task.run("app.config", args)
else
Mix.Task.run("loadpaths", args)
"--no-compile" not in args && Mix.Task.run("compile", args)
end
case Code.ensure_compiled(api) do
{:module, _} ->
api
|> Ash.Api.resources()
|> Enum.each(&Code.ensure_compiled/1)
# TODO: We shouldn't need to make sure that the resources are compiled
api
{:error, error} ->
Mix.raise("Could not load #{inspect(api)}, error: #{inspect(error)}. ")
end
end
end

View file

@ -23,7 +23,7 @@ defmodule AshPostgres.MixProject do
"coveralls.github": :test
],
dialyzer: [
plt_add_apps: [:ecto, :ash]
plt_add_apps: [:ecto, :ash, :mix]
],
docs: docs(),
aliases: aliases(),
@ -65,7 +65,7 @@ defmodule AshPostgres.MixProject do
[
{:ecto_sql, "~> 3.4"},
{:postgrex, ">= 0.0.0"},
{:ash, ash_version("~> 1.13")},
{:ash, ash_version("~> 1.13.2")},
{:git_ops, "~> 2.0.1", only: :dev},
{:ex_doc, "~> 0.22", only: :dev, runtime: false},
{:ex_check, "~> 0.11.0", only: :dev},
@ -87,7 +87,8 @@ defmodule AshPostgres.MixProject do
defp aliases do
[
sobelow: "sobelow --skip -i Config.Secrets",
sobelow:
"sobelow --skip -i Config.Secrets --ignore-files lib/migration_generator/migration_generator.ex",
credo: "credo --strict",
"ash.formatter": "ash.formatter --extensions AshPostgres.DataLayer"
]

View file

@ -1,5 +1,5 @@
%{
"ash": {:hex, :ash, "1.13.0", "7821561e4529628610ceac2b0ea0886b8bf0800ef4443cc50c806de7552ec7c3", [:mix], [{:ecto, "~> 3.4", [hex: :ecto, repo: "hexpm", optional: false]}, {:ets, "~> 0.8.0", [hex: :ets, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.3.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:picosat_elixir, "~> 0.1.4", [hex: :picosat_elixir, repo: "hexpm", optional: false]}], "hexpm", "be9302effcebcb2fbc1f79a133e8ee789678c8b6938dbd6355630d98cc88b687"},
"ash": {:hex, :ash, "1.13.2", "e3f0f2d831e69f956f78e69501cff39e3701566e6f00bf21c01ad8c172eeac0c", [:mix], [{:ecto, "~> 3.4", [hex: :ecto, repo: "hexpm", optional: false]}, {:ets, "~> 0.8.0", [hex: :ets, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.3.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:picosat_elixir, "~> 0.1.4", [hex: :picosat_elixir, repo: "hexpm", optional: false]}], "hexpm", "e3d44a9f123d126ced1614ba69a62af092210de78ddb64be6dc8849a850be158"},
"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"},
"certifi": {:hex, :certifi, "2.5.2", "b7cfeae9d2ed395695dd8201c57a2d019c0c43ecaf8b8bcb9320b40d6662f340", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm", "3b3b5f36493004ac3455966991eaf6e768ce9884693d9968055aeeeb1e575040"},
"connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], [], "hexpm", "4a0850c9be22a43af9920a71ab17c051f5f7d45c209e40269a1938832510e4d9"},
@ -11,7 +11,7 @@
"earmark": {:hex, :earmark, "1.4.5", "62ffd3bd7722fb7a7b1ecd2419ea0b458c356e7168c1f5d65caf09b4fbdd13c8", [:mix], [], "hexpm", "b7d0e6263d83dc27141a523467799a685965bf8b13b6743413f19a7079843f4f"},
"ecto": {:hex, :ecto, "3.4.6", "08f7afad3257d6eb8613309af31037e16c36808dfda5a3cd0cb4e9738db030e4", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6f13a9e2a62e75c2dcfc7207bfc65645ab387af8360db4c89fee8b5a4bf3f70b"},
"ecto_sql": {:hex, :ecto_sql, "3.4.4", "d28bac2d420f708993baed522054870086fd45016a9d09bb2cd521b9c48d32ea", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.4.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.3.0 or ~> 0.4.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.0", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "edb49af715dd72f213b66adfd0f668a43c17ed510b5d9ac7528569b23af57fe8"},
"elixir_make": {:hex, :elixir_make, "0.6.0", "38349f3e29aff4864352084fc736fa7fa0f2995a819a737554f7ebd28b85aaab", [:mix], [], "hexpm", "d522695b93b7f0b4c0fcb2dfe73a6b905b1c301226a5a55cb42e5b14d509e050"},
"elixir_make": {:hex, :elixir_make, "0.6.1", "8faa29a5597faba999aeeb72bbb9c91694ef8068f0131192fb199f98d32994ef", [:mix], [], "hexpm", "35d33270680f8d839a4003c3e9f43afb595310a592405a00afc12de4c7f55a18"},
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
"ets": {:hex, :ets, "0.8.1", "8ff9bcda5682b98493f8878fc9dbd990e48d566cba8cce59f7c2a78130da29ea", [:mix], [], "hexpm", "6be41b50adb5bc5c43626f25ea2d0af1f4a242fb3fad8d53f0c67c20b78915cc"},
"ex_check": {:hex, :ex_check, "0.11.0", "6d878d9ae30d19168157bcbf346b527825284e14e77a07ec0492b19cf0036479", [:mix], [], "hexpm", "d41894aa6193f089a05e3abb43ca457e289619fcfbbdd7b60d070b7a62b26832"},

View file

@ -0,0 +1,35 @@
{
"attributes": [
{
"allow_nil?": true,
"default": "fragment(\"uuid_generate_v4()\")",
"name": "id",
"primary_key?": true,
"references": null,
"type": "binary_id"
},
{
"allow_nil?": true,
"default": "nil",
"name": "post_id",
"primary_key?": false,
"references": {
"destination_field": "id",
"table": "posts"
},
"type": "binary_id"
},
{
"allow_nil?": true,
"default": "nil",
"name": "title",
"primary_key?": false,
"references": null,
"type": "text"
}
],
"hash": "308BC45BF8556BAD592F7C197D6EACD0A44835BD4108AD0987A4B1B2FC1D2608",
"identities": [],
"repo": "Elixir.AshPostgres.TestRepo",
"table": "comments"
}

View file

@ -0,0 +1,40 @@
{
"attributes": [
{
"allow_nil?": true,
"default": "fragment(\"uuid_generate_v4()\")",
"name": "id",
"primary_key?": true,
"references": null,
"type": "binary_id"
},
{
"allow_nil?": true,
"default": "nil",
"name": "public",
"primary_key?": false,
"references": null,
"type": "boolean"
},
{
"allow_nil?": true,
"default": "nil",
"name": "score",
"primary_key?": false,
"references": null,
"type": "integer"
},
{
"allow_nil?": true,
"default": "nil",
"name": "title",
"primary_key?": false,
"references": null,
"type": "text"
}
],
"hash": "4FC884968613A0BE533B06EB9633207611530A717FD30F665FCC40EFC9057B3C",
"identities": [],
"repo": "Elixir.AshPostgres.TestRepo",
"table": "posts"
}

View file

View file

@ -1,11 +0,0 @@
defmodule AshPostgres.TestRepo.Migrations.AddPosts do
use Ecto.Migration
def change do
create table(:posts) do
add(:title, :string)
add(:score, :integer)
add(:public, :boolean)
end
end
end

View file

@ -1,10 +0,0 @@
defmodule AshPostgres.TestRepo.Migrations.AddComments do
use Ecto.Migration
def change do
create table(:comments) do
add(:title, :string)
add(:post_id, references(:posts))
end
end
end

View file

@ -0,0 +1,7 @@
defmodule AshPostgres.TestRepo.Migrations.AddExtensions do
use Ecto.Migration
def change do
execute("CREATE EXTENSION \"uuid-ossp\";", "DROP EXTENSION \"uuid-ossp\"")
end
end

View file

@ -0,0 +1,34 @@
defmodule AshPostgres.TestRepo.Migrations.MigrateResources1 do
@moduledoc """
Updates resources based on their most recent snapshots.
This file was autogenerated with `mix ash_postgres.generate_migrations`
"""
use Ecto.Migration
def up() do
create table(:posts, primary_key: false) do
add(:id, :binary_id, null: true, default: fragment("uuid_generate_v4()"), primary_key: true)
add(:title, :text, null: true, default: nil, primary_key: false)
add(:score, :integer, null: true, default: nil, primary_key: false)
add(:public, :boolean, null: true, default: nil, primary_key: false)
end
create table(:comments, primary_key: false) do
add(:id, :binary_id, null: true, default: fragment("uuid_generate_v4()"), primary_key: true)
add(:title, :text, null: true, default: nil, primary_key: false)
add(:post_id, references("posts", type: :binary_id, column: :id),
default: nil,
primary_key: false
)
end
end
def down() do
drop(table("comments"))
drop(table("posts"))
end
end

View file

@ -1,64 +1,6 @@
defmodule AshPostgres.FilterTest do
use AshPostgres.RepoCase
defmodule Post do
use Ash.Resource,
data_layer: AshPostgres.DataLayer
postgres do
table "posts"
repo AshPostgres.TestRepo
end
actions do
read(:read)
create(:create)
end
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:title, :string)
attribute(:score, :integer)
attribute(:public, :boolean)
end
relationships do
has_many(:comments, AshPostgres.FilterTest.Comment, destination_field: :post_id)
end
end
defmodule Comment do
use Ash.Resource,
data_layer: AshPostgres.DataLayer
postgres do
table "comments"
repo AshPostgres.TestRepo
end
actions do
read(:read)
create(:create)
end
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:title, :string)
end
relationships do
belongs_to(:post, Post)
end
end
defmodule Api do
use Ash.Api
resources do
resource(Post)
resource(Comment)
end
end
use AshPostgres.RepoCase, async: false
alias AshPostgres.Test.{Api, Comment, Post}
describe "with no filter applied" do
test "with no data" do

View file

@ -0,0 +1,394 @@
defmodule AshPostgres.MigrationGeneratorTest do
use AshPostgres.RepoCase, async: false
defmacrop defposts(mod \\ Post, do: body) do
quote do
Code.compiler_options(ignore_module_conflict: true)
defmodule unquote(mod) do
use Ash.Resource,
data_layer: AshPostgres.DataLayer
postgres do
table "posts"
repo(AshPostgres.TestRepo)
end
actions do
read(:read)
create(:create)
end
unquote(body)
end
Code.compiler_options(ignore_module_conflict: false)
end
end
defmacrop defapi(resources) do
quote do
Code.compiler_options(ignore_module_conflict: true)
defmodule Api do
use Ash.Api
resources do
for resource <- unquote(resources) do
resource(resource)
end
end
end
Code.compiler_options(ignore_module_conflict: false)
end
end
describe "creating initial snapshots" do
setup do
on_exit(fn ->
"test_snapshots_path/**/*.json"
|> Path.wildcard()
|> Enum.each(&File.rm!/1)
"test_snapshots_path/*"
|> Path.wildcard()
|> Enum.each(&File.rmdir!/1)
"test_migration_path/**/*.exs"
|> Path.wildcard()
|> Enum.each(&File.rm!/1)
"test_migration_path/*"
|> Path.wildcard()
|> Enum.each(&File.rmdir!/1)
if File.exists?("test_snapshots_path") do
File.rmdir("test_snapshots_path")
end
if File.exists?("test_migration_path") do
File.rmdir("test_migration_path")
end
end)
defposts do
resource do
identities do
identity(:title, [:title])
end
end
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:title, :string)
end
end
defapi([Post])
Mix.shell(Mix.Shell.Process)
AshPostgres.MigrationGenerator.generate(Api,
snapshot_path: "test_snapshots_path",
migration_path: "test_migration_path",
quiet: true,
format: false
)
:ok
end
test "it creates a snapshot for each resource" do
assert File.exists?(Path.join(["test_snapshots_path", "test_repo", "posts.json"]))
end
test "the snapshots can be loaded" do
assert File.exists?(Path.join(["test_snapshots_path", "test_repo", "posts.json"]))
end
test "the snapshots contain valid json" do
assert File.read!(Path.join(["test_snapshots_path", "test_repo", "posts.json"]))
|> Jason.decode!(keys: :atoms!)
end
test "the migration creates the table" do
assert [file] = Path.wildcard("test_migration_path/*_migrate_resources*.exs")
assert File.read!(file) =~ "create table(:posts, primary_key: false) do"
end
test "the migration adds the id, with its default" do
assert [file] = Path.wildcard("test_migration_path/*_migrate_resources*.exs")
assert File.read!(file) =~
~S[add :id, :binary_id, null: true, default: fragment("uuid_generate_v4()"), primary_key: true]
end
test "the migration adds other attributes" do
assert [file] = Path.wildcard("test_migration_path/*_migrate_resources*.exs")
assert File.read!(file) =~
~S[add :title, :text, null: true, default: nil, primary_key: false]
end
test "the migration creates unique_indexes based on the identities of the resource" do
assert [file] = Path.wildcard("test_migration_path/*_migrate_resources*.exs")
assert File.read!(file) =~
~S{create unique_index(:posts, [:title], name: "posts_title_unique_index")}
end
end
describe "creating follow up migrations" do
setup do
on_exit(fn ->
"test_snapshots_path/**/*.json"
|> Path.wildcard()
|> Enum.each(&File.rm!/1)
"test_snapshots_path/*"
|> Path.wildcard()
|> Enum.each(&File.rmdir!/1)
"test_migration_path/**/*.exs"
|> Path.wildcard()
|> Enum.each(&File.rm!/1)
"test_migration_path/*"
|> Path.wildcard()
|> Enum.each(&File.rmdir!/1)
if File.exists?("test_snapshots_path") do
File.rmdir("test_snapshots_path")
end
if File.exists?("test_migration_path") do
File.rmdir("test_migration_path")
end
end)
defposts do
resource do
identities do
identity(:title, [:title])
end
end
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:title, :string)
end
end
defapi([Post])
Mix.shell(Mix.Shell.Process)
AshPostgres.MigrationGenerator.generate(Api,
snapshot_path: "test_snapshots_path",
migration_path: "test_migration_path",
quiet: true,
format: false
)
:ok
end
test "when adding a field, it adds the field" do
defposts do
resource do
identities do
identity(:title, [:title])
end
end
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:title, :string)
attribute(:name, :string, allow_nil?: false)
end
end
defapi([Post])
AshPostgres.MigrationGenerator.generate(Api,
snapshot_path: "test_snapshots_path",
migration_path: "test_migration_path",
quiet: true,
format: false
)
assert [_file1, file2] =
Enum.sort(Path.wildcard("test_migration_path/*_migrate_resources*.exs"))
assert File.read!(file2) =~
~S[add :name, :text, null: false, default: nil, primary_key: false]
end
test "when renaming a field, it asks if you are renaming it, and renames it if you are" do
defposts do
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:name, :string, allow_nil?: false)
end
end
defapi([Post])
send(self(), {:mix_shell_input, :yes?, true})
AshPostgres.MigrationGenerator.generate(Api,
snapshot_path: "test_snapshots_path",
migration_path: "test_migration_path",
quiet: true,
format: false
)
assert [_file1, file2] =
Enum.sort(Path.wildcard("test_migration_path/*_migrate_resources*.exs"))
assert File.read!(file2) =~ ~S[rename table(:posts), :title, to: :name]
end
test "when renaming a field, it asks if you are renaming it, and adds it if you aren't" do
defposts do
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:name, :string, allow_nil?: false)
end
end
defapi([Post])
send(self(), {:mix_shell_input, :yes?, false})
AshPostgres.MigrationGenerator.generate(Api,
snapshot_path: "test_snapshots_path",
migration_path: "test_migration_path",
quiet: true,
format: false
)
assert [_file1, file2] =
Enum.sort(Path.wildcard("test_migration_path/*_migrate_resources*.exs"))
assert File.read!(file2) =~
~S[add :name, :text, null: false, default: nil, primary_key: false]
end
test "when renaming a field, it asks which field you are renaming it to, and renames it if you are" do
defposts do
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:name, :string, allow_nil?: false)
attribute(:subject, :string, allow_nil?: false)
end
end
defapi([Post])
send(self(), {:mix_shell_input, :yes?, true})
send(self(), {:mix_shell_input, :prompt, "subject"})
AshPostgres.MigrationGenerator.generate(Api,
snapshot_path: "test_snapshots_path",
migration_path: "test_migration_path",
quiet: true,
format: false
)
assert [_file1, file2] =
Enum.sort(Path.wildcard("test_migration_path/*_migrate_resources*.exs"))
assert File.read!(file2) =~ ~S[rename table(:posts), :title, to: :subject]
end
test "when renaming a field, it asks which field you are renaming it to, and adds it if you arent" do
defposts do
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:name, :string, allow_nil?: false)
attribute(:subject, :string, allow_nil?: false)
end
end
defapi([Post])
send(self(), {:mix_shell_input, :yes?, false})
AshPostgres.MigrationGenerator.generate(Api,
snapshot_path: "test_snapshots_path",
migration_path: "test_migration_path",
quiet: true,
format: false
)
assert [_file1, file2] =
Enum.sort(Path.wildcard("test_migration_path/*_migrate_resources*.exs"))
assert File.read!(file2) =~
~S[add :subject, :text, null: false, default: nil, primary_key: false]
end
test "when changing the primary key, it changes properly" do
defposts do
attributes do
attribute(:id, :uuid, primary_key?: false, default: &Ecto.UUID.generate/0)
attribute(:guid, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:title, :string)
end
end
defapi([Post])
AshPostgres.MigrationGenerator.generate(Api,
snapshot_path: "test_snapshots_path",
migration_path: "test_migration_path",
quiet: true,
format: false
)
assert [_file1, file2] =
Enum.sort(Path.wildcard("test_migration_path/*_migrate_resources*.exs"))
assert File.read!(file2) =~
~S[add :guid, :binary_id, null: true, default: fragment("uuid_generate_v4()"), primary_key: true]
end
test "when multiple schemas apply to the same table, all attributes are added" do
defposts do
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:title, :string)
attribute(:foobar, :string)
end
end
defposts Post2 do
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:name, :string)
end
end
defapi([Post, Post2])
AshPostgres.MigrationGenerator.generate(Api,
snapshot_path: "test_snapshots_path",
migration_path: "test_migration_path",
quiet: true,
format: false
)
assert [_file1, file2] =
Enum.sort(Path.wildcard("test_migration_path/*_migrate_resources*.exs"))
assert File.read!(file2) =~
~S[add :foobar, :text, null: true, default: nil, primary_key: false]
assert File.read!(file2) =~
~S[add :foobar, :text, null: true, default: nil, primary_key: false]
end
end
end

9
test/support/api.ex Normal file
View file

@ -0,0 +1,9 @@
defmodule AshPostgres.Test.Api do
@moduledoc false
use Ash.Api
resources do
resource(AshPostgres.Test.Post)
resource(AshPostgres.Test.Comment)
end
end

View file

@ -0,0 +1,24 @@
defmodule AshPostgres.Test.Comment do
@moduledoc false
use Ash.Resource,
data_layer: AshPostgres.DataLayer
postgres do
table "comments"
repo AshPostgres.TestRepo
end
actions do
read(:read)
create(:create)
end
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:title, :string)
end
relationships do
belongs_to(:post, AshPostgres.Test.Post)
end
end

View file

@ -0,0 +1,26 @@
defmodule AshPostgres.Test.Post do
@moduledoc false
use Ash.Resource,
data_layer: AshPostgres.DataLayer
postgres do
table "posts"
repo AshPostgres.TestRepo
end
actions do
read(:read)
create(:create)
end
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:title, :string)
attribute(:score, :integer)
attribute(:public, :boolean)
end
relationships do
has_many(:comments, AshPostgres.Test.Comment, destination_field: :post_id)
end
end

View file

@ -2,4 +2,8 @@ defmodule AshPostgres.TestRepo do
@moduledoc false
use AshPostgres.Repo,
otp_app: :ash_postgres
def installed_extensions do
["uuid-ossp"]
end
end