2020-09-11 12:26:47 +12:00
|
|
|
defmodule AshPostgres.MigrationGenerator do
|
2022-08-24 11:56:46 +12:00
|
|
|
@moduledoc false
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2022-04-19 16:06:02 +12:00
|
|
|
require Logger
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
import Mix.Generator
|
|
|
|
|
|
|
|
alias AshPostgres.MigrationGenerator.{Operation, Phase}
|
|
|
|
|
2022-12-14 07:31:57 +13:00
|
|
|
defstruct snapshot_path: nil,
|
2020-09-20 10:08:09 +12:00
|
|
|
migration_path: nil,
|
2021-07-09 06:52:53 +12:00
|
|
|
name: nil,
|
2020-10-29 15:26:45 +13:00
|
|
|
tenant_migration_path: nil,
|
2020-09-20 10:08:09 +12:00
|
|
|
quiet: false,
|
2021-04-05 08:09:11 +12:00
|
|
|
current_snapshots: nil,
|
|
|
|
answers: [],
|
|
|
|
no_shell?: false,
|
2020-09-20 10:08:09 +12:00
|
|
|
format: true,
|
2020-11-20 16:09:26 +13:00
|
|
|
dry_run: false,
|
2022-05-19 05:21:58 +12:00
|
|
|
check: false,
|
2020-11-20 16:09:26 +13:00
|
|
|
drop_columns: false
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2024-03-28 09:52:28 +13:00
|
|
|
def generate(domains, opts \\ []) do
|
|
|
|
domains = List.wrap(domains)
|
2021-04-29 09:23:24 +12:00
|
|
|
opts = opts(opts)
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2024-03-28 09:52:28 +13:00
|
|
|
all_resources = Enum.uniq(Enum.flat_map(domains, &Ash.Domain.Info.resources/1))
|
2021-02-01 10:39:59 +13:00
|
|
|
|
2020-10-29 15:26:45 +13:00
|
|
|
{tenant_snapshots, snapshots} =
|
2021-02-01 10:39:59 +13:00
|
|
|
all_resources
|
2022-01-25 11:59:31 +13:00
|
|
|
|> Enum.filter(fn resource ->
|
|
|
|
Ash.DataLayer.data_layer(resource) == AshPostgres.DataLayer &&
|
2022-08-24 11:56:46 +12:00
|
|
|
AshPostgres.DataLayer.Info.migrate?(resource)
|
2022-01-25 11:59:31 +13:00
|
|
|
end)
|
2021-02-01 10:39:59 +13:00
|
|
|
|> Enum.flat_map(&get_snapshots(&1, all_resources))
|
2020-10-29 15:26:45 +13:00
|
|
|
|> Enum.split_with(&(&1.multitenancy.strategy == :context))
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2020-10-29 15:26:45 +13:00
|
|
|
tenant_snapshots_to_include_in_global =
|
|
|
|
tenant_snapshots
|
|
|
|
|> Enum.filter(& &1.multitenancy.global)
|
2021-04-05 08:09:11 +12:00
|
|
|
|> Enum.map(&Map.put(&1, :multitenancy, %{strategy: nil, attribute: nil, global: nil}))
|
2020-10-29 15:26:45 +13:00
|
|
|
|
|
|
|
snapshots = snapshots ++ tenant_snapshots_to_include_in_global
|
|
|
|
|
2021-03-03 06:33:24 +13:00
|
|
|
repos =
|
2024-02-28 14:23:18 +13:00
|
|
|
(snapshots ++ tenant_snapshots)
|
2021-03-03 06:33:24 +13:00
|
|
|
|> Enum.map(& &1.repo)
|
|
|
|
|> Enum.uniq()
|
|
|
|
|
2021-07-28 10:34:23 +12:00
|
|
|
Mix.shell().info("\nExtension Migrations: ")
|
2021-03-03 06:33:24 +13:00
|
|
|
create_extension_migrations(repos, opts)
|
2021-07-28 10:34:23 +12:00
|
|
|
Mix.shell().info("\nGenerating Tenant Migrations: ")
|
2023-04-12 09:41:53 +12:00
|
|
|
create_migrations(tenant_snapshots, opts, true, snapshots)
|
2021-07-28 10:34:23 +12:00
|
|
|
Mix.shell().info("\nGenerating Migrations:")
|
2020-10-29 15:26:45 +13:00
|
|
|
create_migrations(snapshots, opts, false)
|
|
|
|
end
|
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
@doc """
|
|
|
|
A work in progress utility for getting snapshots.
|
|
|
|
|
|
|
|
Does not support everything supported by the migration generator.
|
|
|
|
"""
|
2024-03-28 09:52:28 +13:00
|
|
|
def take_snapshots(domain, repo, only_resources \\ nil) do
|
|
|
|
all_resources = domain |> Ash.Domain.Info.resources() |> Enum.uniq()
|
2021-04-05 08:09:11 +12:00
|
|
|
|
|
|
|
all_resources
|
2022-01-25 11:59:31 +13:00
|
|
|
|> Enum.filter(fn resource ->
|
|
|
|
Ash.DataLayer.data_layer(resource) == AshPostgres.DataLayer &&
|
2023-11-15 04:56:22 +13:00
|
|
|
AshPostgres.DataLayer.Info.repo(resource, :mutate) == repo &&
|
2022-01-25 11:59:31 +13:00
|
|
|
(is_nil(only_resources) || resource in only_resources)
|
|
|
|
end)
|
2021-04-05 08:09:11 +12:00
|
|
|
|> Enum.flat_map(&get_snapshots(&1, all_resources))
|
|
|
|
end
|
|
|
|
|
|
|
|
@doc """
|
|
|
|
A work in progress utility for getting operations between snapshots.
|
|
|
|
|
|
|
|
Does not support everything supported by the migration generator.
|
|
|
|
"""
|
|
|
|
def get_operations_from_snapshots(old_snapshots, new_snapshots, opts \\ []) do
|
|
|
|
opts = %{opts(opts) | no_shell?: true}
|
|
|
|
|
2023-04-12 09:41:53 +12:00
|
|
|
old_snapshots =
|
|
|
|
old_snapshots
|
|
|
|
|> Enum.map(&sanitize_snapshot/1)
|
2021-04-05 08:09:11 +12:00
|
|
|
|
|
|
|
new_snapshots
|
2023-04-12 09:41:53 +12:00
|
|
|
|> deduplicate_snapshots(opts, [], old_snapshots)
|
2021-04-05 08:09:11 +12:00
|
|
|
|> fetch_operations(opts)
|
|
|
|
|> Enum.flat_map(&elem(&1, 1))
|
|
|
|
|> Enum.uniq()
|
|
|
|
|> organize_operations()
|
|
|
|
end
|
|
|
|
|
2023-04-12 09:41:53 +12:00
|
|
|
defp add_references_primary_key(snapshot, snapshots) do
|
|
|
|
%{
|
|
|
|
snapshot
|
|
|
|
| attributes:
|
|
|
|
snapshot.attributes
|
|
|
|
|> Enum.map(fn
|
|
|
|
%{references: references} = attribute when not is_nil(references) ->
|
|
|
|
if is_nil(Map.get(references, :primary_key?)) do
|
|
|
|
%{
|
|
|
|
attribute
|
|
|
|
| references:
|
|
|
|
Map.put(
|
|
|
|
references,
|
|
|
|
:primary_key?,
|
|
|
|
find_references_primary_key(
|
|
|
|
references,
|
|
|
|
snapshots
|
|
|
|
)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
else
|
|
|
|
attribute
|
|
|
|
end
|
|
|
|
|
|
|
|
attribute ->
|
|
|
|
attribute
|
|
|
|
end)
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp find_references_primary_key(references, snapshots) do
|
|
|
|
Enum.find_value(snapshots, false, fn snapshot ->
|
2023-04-20 11:32:37 +12:00
|
|
|
if snapshot && references && snapshot.table == references.table do
|
2023-04-12 09:41:53 +12:00
|
|
|
Enum.any?(snapshot.attributes, fn attribute ->
|
2023-05-02 10:03:55 +12:00
|
|
|
attribute.source == references.destination_attribute && attribute.primary_key?
|
2023-04-12 09:41:53 +12:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
defp opts(opts) do
|
|
|
|
case struct(__MODULE__, opts) do
|
2022-05-19 05:21:58 +12:00
|
|
|
%{check: true} = opts ->
|
2021-04-05 08:09:11 +12:00
|
|
|
%{opts | dry_run: true}
|
|
|
|
|
|
|
|
opts ->
|
|
|
|
opts
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-12-14 07:31:57 +13:00
|
|
|
defp snapshot_path(%{snapshot_path: snapshot_path}, _) when not is_nil(snapshot_path),
|
|
|
|
do: snapshot_path
|
|
|
|
|
2022-12-14 08:55:26 +13:00
|
|
|
defp snapshot_path(_config, repo) do
|
2022-12-14 07:31:57 +13:00
|
|
|
# Copied from ecto's mix task, thanks Ecto ❤️
|
|
|
|
config = repo.config()
|
|
|
|
|
|
|
|
app = Keyword.fetch!(config, :otp_app)
|
|
|
|
Path.join([Mix.Project.deps_paths()[app] || File.cwd!(), "priv", "resource_snapshots"])
|
|
|
|
end
|
|
|
|
|
2021-03-03 06:33:24 +13:00
|
|
|
defp create_extension_migrations(repos, opts) do
|
|
|
|
for repo <- repos do
|
2022-12-14 07:31:57 +13:00
|
|
|
snapshot_path = snapshot_path(opts, repo)
|
2024-02-28 14:23:18 +13:00
|
|
|
repo_name = repo_name(repo)
|
|
|
|
|
|
|
|
legacy_snapshot_file = Path.join(snapshot_path, "extensions.json")
|
|
|
|
|
|
|
|
snapshot_file =
|
|
|
|
snapshot_path
|
|
|
|
|> Path.join(repo_name)
|
|
|
|
|> Path.join("extensions.json")
|
|
|
|
|
|
|
|
unless opts.dry_run do
|
|
|
|
File.rename(legacy_snapshot_file, snapshot_file)
|
|
|
|
end
|
2021-03-03 06:33:24 +13:00
|
|
|
|
|
|
|
installed_extensions =
|
|
|
|
if File.exists?(snapshot_file) do
|
|
|
|
snapshot_file
|
|
|
|
|> File.read!()
|
2022-08-06 07:27:22 +12:00
|
|
|
|> Jason.decode!(keys: :atoms!)
|
2021-03-03 06:33:24 +13:00
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
2022-08-06 07:27:22 +12:00
|
|
|
{extensions_snapshot, installed_extensions} =
|
|
|
|
case installed_extensions do
|
|
|
|
installed when is_list(installed) ->
|
|
|
|
{%{
|
|
|
|
installed: installed
|
2023-07-13 07:16:28 +12:00
|
|
|
}, installed}
|
2022-08-06 07:27:22 +12:00
|
|
|
|
|
|
|
other ->
|
|
|
|
{other, other.installed}
|
|
|
|
end
|
|
|
|
|
2023-08-09 05:20:26 +12:00
|
|
|
requesteds =
|
|
|
|
repo.installed_extensions()
|
|
|
|
|> Enum.map(fn
|
|
|
|
extension_module when is_atom(extension_module) ->
|
|
|
|
{ext_name, version, _up_fn, _down_fn} = extension = extension_module.extension()
|
|
|
|
|
|
|
|
{"#{ext_name}_v#{version}", extension}
|
|
|
|
|
|
|
|
extension_name ->
|
|
|
|
{extension_name, extension_name}
|
|
|
|
end)
|
|
|
|
|
|
|
|
to_install =
|
|
|
|
requesteds
|
2023-12-15 11:10:11 +13:00
|
|
|
|> Enum.reject(fn
|
|
|
|
{"ash-functions", _} ->
|
|
|
|
extensions_snapshot[:ash_functions_version] ==
|
|
|
|
AshPostgres.MigrationGenerator.AshFunctions.latest_version()
|
2021-03-03 06:33:24 +13:00
|
|
|
|
2023-12-15 11:10:11 +13:00
|
|
|
{name, _} ->
|
|
|
|
Enum.member?(installed_extensions, name)
|
|
|
|
end)
|
|
|
|
|> Enum.map(fn {_name, extension} -> extension end)
|
2022-08-06 07:27:22 +12:00
|
|
|
|
2023-07-13 07:16:28 +12:00
|
|
|
if Enum.empty?(to_install) do
|
2021-07-28 10:34:23 +12:00
|
|
|
Mix.shell().info("No extensions to install")
|
2021-03-03 06:33:24 +13:00
|
|
|
:ok
|
|
|
|
else
|
|
|
|
{module, migration_name} =
|
|
|
|
case to_install do
|
2023-08-09 05:20:26 +12:00
|
|
|
[{ext_name, version, _up_fn, _down_fn}] ->
|
2024-03-28 14:30:42 +13:00
|
|
|
{"install_#{ext_name}_v#{version}_#{timestamp(true)}",
|
2023-08-09 05:20:26 +12:00
|
|
|
"#{timestamp(true)}_install_#{ext_name}_v#{version}_extension"}
|
|
|
|
|
2023-12-15 11:10:11 +13:00
|
|
|
["ash-functions" = single] ->
|
2024-03-28 14:30:42 +13:00
|
|
|
{"install_#{single}_extension_#{AshPostgres.MigrationGenerator.AshFunctions.latest_version()}_#{timestamp(true)}",
|
2023-12-15 11:10:11 +13:00
|
|
|
"#{timestamp(true)}_install_#{single}_extension_#{AshPostgres.MigrationGenerator.AshFunctions.latest_version()}"}
|
2021-03-03 06:33:24 +13:00
|
|
|
|
|
|
|
multiple ->
|
2024-03-28 14:30:42 +13:00
|
|
|
{"install_#{Enum.count(multiple)}_extensions_#{timestamp(true)}",
|
2021-04-28 02:52:47 +12:00
|
|
|
"#{timestamp(true)}_install_#{Enum.count(multiple)}_extensions"}
|
2021-03-03 06:33:24 +13:00
|
|
|
end
|
|
|
|
|
|
|
|
migration_file =
|
|
|
|
opts
|
|
|
|
|> migration_path(repo)
|
|
|
|
|> Path.join(migration_name <> ".exs")
|
|
|
|
|
2022-08-16 04:29:01 +12:00
|
|
|
sanitized_module =
|
|
|
|
module
|
|
|
|
|> String.replace("-", "_")
|
|
|
|
|> Macro.camelize()
|
|
|
|
|
|
|
|
module_name = Module.concat([repo, Migrations, sanitized_module])
|
2021-03-03 06:33:24 +13:00
|
|
|
|
|
|
|
install =
|
2022-07-21 06:19:06 +12:00
|
|
|
Enum.map_join(to_install, "\n", fn
|
|
|
|
"ash-functions" ->
|
2023-12-15 11:10:11 +13:00
|
|
|
AshPostgres.MigrationGenerator.AshFunctions.install(
|
|
|
|
extensions_snapshot[:ash_functions_version]
|
|
|
|
)
|
2022-07-21 06:19:06 +12:00
|
|
|
|
2024-05-06 03:40:07 +12:00
|
|
|
{ext_name, _version, up_fn, _down_fn} when is_function(up_fn, 1) ->
|
|
|
|
current_version =
|
|
|
|
Enum.find_value(extensions_snapshot[:installed] || [], 0, fn name ->
|
2024-05-13 04:50:13 +12:00
|
|
|
with ["", "_v" <> version] <- String.split(name, to_string(ext_name)),
|
2024-05-06 03:40:07 +12:00
|
|
|
{integer, ""} <- Integer.parse(version) do
|
|
|
|
integer
|
|
|
|
else
|
|
|
|
_ -> nil
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
|
|
|
|
up_fn.(current_version)
|
2023-08-09 05:20:26 +12:00
|
|
|
|
2022-07-21 06:19:06 +12:00
|
|
|
extension ->
|
|
|
|
"execute(\"CREATE EXTENSION IF NOT EXISTS \\\"#{extension}\\\"\")"
|
2021-03-03 06:33:24 +13:00
|
|
|
end)
|
|
|
|
|
|
|
|
uninstall =
|
2022-07-21 06:19:06 +12:00
|
|
|
Enum.map_join(to_install, "\n", fn
|
|
|
|
"ash-functions" ->
|
2023-12-15 11:10:11 +13:00
|
|
|
AshPostgres.MigrationGenerator.AshFunctions.drop(
|
|
|
|
extensions_snapshot[:ash_functions_version]
|
|
|
|
)
|
2022-07-21 06:19:06 +12:00
|
|
|
|
2023-08-09 05:20:26 +12:00
|
|
|
{_ext_name, version, _up_fn, down_fn} when is_function(down_fn, 1) ->
|
|
|
|
down_fn.(version)
|
|
|
|
|
2022-07-21 06:19:06 +12:00
|
|
|
extension ->
|
|
|
|
"# execute(\"DROP EXTENSION IF EXISTS \\\"#{extension}\\\"\")"
|
2021-03-03 06:33:24 +13:00
|
|
|
end)
|
|
|
|
|
|
|
|
contents = """
|
|
|
|
defmodule #{inspect(module_name)} do
|
|
|
|
@moduledoc \"\"\"
|
|
|
|
Installs any extensions that are mentioned in the repo's `installed_extensions/0` callback
|
|
|
|
|
|
|
|
This file was autogenerated with `mix ash_postgres.generate_migrations`
|
|
|
|
\"\"\"
|
|
|
|
|
|
|
|
use Ecto.Migration
|
|
|
|
|
|
|
|
def up do
|
|
|
|
#{install}
|
|
|
|
end
|
|
|
|
|
|
|
|
def down do
|
|
|
|
# Uncomment this if you actually want to uninstall the extensions
|
2021-04-26 12:29:05 +12:00
|
|
|
# when this migration is rolled back:
|
2021-03-03 06:33:24 +13:00
|
|
|
#{uninstall}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
"""
|
|
|
|
|
2023-08-09 05:20:26 +12:00
|
|
|
installed = Enum.map(requesteds, fn {name, _extension} -> name end)
|
|
|
|
|
2022-08-06 07:27:22 +12:00
|
|
|
snapshot_contents =
|
|
|
|
Jason.encode!(
|
|
|
|
%{
|
2023-08-09 05:20:26 +12:00
|
|
|
installed: installed
|
2022-08-06 07:27:22 +12:00
|
|
|
}
|
2023-08-09 05:20:26 +12:00
|
|
|
|> set_ash_functions(installed),
|
2022-08-06 07:27:22 +12:00
|
|
|
pretty: true
|
|
|
|
)
|
2021-03-03 06:33:24 +13:00
|
|
|
|
2024-03-21 09:18:27 +13:00
|
|
|
contents = format(migration_file, contents, opts)
|
2024-05-06 03:13:30 +12:00
|
|
|
|
|
|
|
if opts.dry_run do
|
|
|
|
Mix.shell().info(snapshot_contents)
|
|
|
|
Mix.shell().info(contents)
|
|
|
|
else
|
|
|
|
create_file(snapshot_file, snapshot_contents, force: true)
|
|
|
|
create_file(migration_file, contents)
|
|
|
|
end
|
2021-03-03 06:33:24 +13:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-07-13 07:16:28 +12:00
|
|
|
defp set_ash_functions(snapshot, installed_extensions) do
|
2022-08-06 07:27:22 +12:00
|
|
|
if "ash-functions" in installed_extensions do
|
2023-12-15 11:10:11 +13:00
|
|
|
Map.put(
|
|
|
|
snapshot,
|
|
|
|
:ash_functions_version,
|
|
|
|
AshPostgres.MigrationGenerator.AshFunctions.latest_version()
|
|
|
|
)
|
2022-08-06 07:27:22 +12:00
|
|
|
else
|
|
|
|
snapshot
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-04-12 09:41:53 +12:00
|
|
|
defp create_migrations(snapshots, opts, tenant?, non_tenant_snapshots \\ []) do
|
2020-09-11 12:26:47 +12:00
|
|
|
snapshots
|
|
|
|
|> Enum.group_by(& &1.repo)
|
|
|
|
|> Enum.each(fn {repo, snapshots} ->
|
2023-04-12 09:41:53 +12:00
|
|
|
deduped = deduplicate_snapshots(snapshots, opts, non_tenant_snapshots)
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2021-04-29 09:23:24 +12:00
|
|
|
snapshots_with_operations =
|
|
|
|
deduped
|
|
|
|
|> fetch_operations(opts)
|
|
|
|
|> Enum.map(&add_order_to_operations/1)
|
2021-02-01 10:39:59 +13:00
|
|
|
|
|
|
|
snapshots = Enum.map(snapshots_with_operations, &elem(&1, 0))
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2021-02-01 10:39:59 +13:00
|
|
|
snapshots_with_operations
|
|
|
|
|> Enum.flat_map(&elem(&1, 1))
|
2020-09-11 12:26:47 +12:00
|
|
|
|> Enum.uniq()
|
|
|
|
|> case do
|
|
|
|
[] ->
|
2020-10-29 15:26:45 +13:00
|
|
|
tenant_str =
|
|
|
|
if tenant? do
|
|
|
|
"tenant "
|
|
|
|
else
|
|
|
|
""
|
|
|
|
end
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
Mix.shell().info(
|
2020-10-29 15:26:45 +13:00
|
|
|
"No #{tenant_str}changes detected, so no migrations or snapshots have been created."
|
2020-09-11 12:26:47 +12:00
|
|
|
)
|
|
|
|
|
|
|
|
:ok
|
|
|
|
|
|
|
|
operations ->
|
2022-05-19 05:21:58 +12:00
|
|
|
if opts.check do
|
|
|
|
IO.puts("""
|
|
|
|
Migrations would have been generated, but the --check flag was provided.
|
|
|
|
|
|
|
|
To see what migration would have been generated, run with the `--dry-run`
|
|
|
|
option instead. To generate those migrations, run without either flag.
|
|
|
|
""")
|
|
|
|
|
|
|
|
exit({:shutdown, 1})
|
|
|
|
end
|
2021-01-13 14:47:17 +13:00
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
operations
|
2022-11-26 08:06:22 +13:00
|
|
|
|> split_into_migrations()
|
|
|
|
|> Enum.each(fn operations ->
|
|
|
|
run_without_transaction? =
|
|
|
|
Enum.any?(operations, fn
|
|
|
|
%Operation.AddCustomIndex{index: %{concurrently: true}} ->
|
|
|
|
true
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
false
|
|
|
|
end)
|
|
|
|
|
|
|
|
operations
|
|
|
|
|> organize_operations
|
|
|
|
|> build_up_and_down()
|
|
|
|
|> write_migration!(repo, opts, tenant?, run_without_transaction?)
|
|
|
|
end)
|
|
|
|
|
|
|
|
create_new_snapshot(snapshots, repo_name(repo), opts, tenant?)
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2022-11-26 08:06:22 +13:00
|
|
|
defp split_into_migrations(operations) do
|
|
|
|
operations
|
|
|
|
|> Enum.split_with(fn
|
|
|
|
%Operation.AddCustomIndex{index: %{concurrently: true}} ->
|
|
|
|
true
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
false
|
|
|
|
end)
|
|
|
|
|> case do
|
|
|
|
{[], ops} ->
|
|
|
|
[ops]
|
|
|
|
|
|
|
|
{concurrent_indexes, ops} ->
|
|
|
|
[ops, concurrent_indexes]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-04-29 09:23:24 +12:00
|
|
|
defp add_order_to_operations({snapshot, operations}) do
|
|
|
|
operations_with_order = Enum.map(operations, &add_order_to_operation(&1, snapshot.attributes))
|
|
|
|
|
|
|
|
{snapshot, operations_with_order}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp add_order_to_operation(%{attribute: attribute} = op, attributes) do
|
2022-02-15 05:39:50 +13:00
|
|
|
order = Enum.find_index(attributes, &(&1.source == attribute.source))
|
2021-04-29 09:23:24 +12:00
|
|
|
attribute = Map.put(attribute, :order, order)
|
|
|
|
|
|
|
|
%{op | attribute: attribute}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp add_order_to_operation(%{new_attribute: attribute} = op, attributes) do
|
2022-02-15 05:39:50 +13:00
|
|
|
order = Enum.find_index(attributes, &(&1.source == attribute.source))
|
2021-04-29 09:23:24 +12:00
|
|
|
attribute = Map.put(attribute, :order, order)
|
|
|
|
|
|
|
|
%{op | new_attribute: attribute}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp add_order_to_operation(op, _), do: op
|
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
defp organize_operations([]), do: []
|
|
|
|
|
|
|
|
defp organize_operations(operations) do
|
|
|
|
operations
|
|
|
|
|> sort_operations()
|
|
|
|
|> streamline()
|
|
|
|
|> group_into_phases()
|
2023-07-15 08:24:57 +12:00
|
|
|
|> clean_phases()
|
2021-04-05 08:09:11 +12:00
|
|
|
end
|
|
|
|
|
2023-07-15 08:24:57 +12:00
|
|
|
defp clean_phases(phases) do
|
|
|
|
phases
|
|
|
|
|> Enum.flat_map(fn
|
|
|
|
%{operations: []} ->
|
|
|
|
[]
|
2021-07-09 05:35:16 +12:00
|
|
|
|
2020-11-25 12:11:02 +13:00
|
|
|
%{operations: operations} = phase ->
|
|
|
|
if Enum.all?(operations, &match?(%{commented?: true}, &1)) do
|
2023-07-15 08:24:57 +12:00
|
|
|
[%{phase | commented?: true}]
|
2020-11-25 12:11:02 +13:00
|
|
|
else
|
2023-07-15 08:24:57 +12:00
|
|
|
[phase]
|
2020-11-25 12:11:02 +13:00
|
|
|
end
|
|
|
|
|
2023-07-15 08:24:57 +12:00
|
|
|
op ->
|
|
|
|
[op]
|
2020-11-25 12:11:02 +13:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2023-04-12 09:41:53 +12:00
|
|
|
defp deduplicate_snapshots(snapshots, opts, non_tenant_snapshots, existing_snapshots \\ []) do
|
|
|
|
grouped =
|
|
|
|
snapshots
|
|
|
|
|> Enum.group_by(fn snapshot ->
|
|
|
|
{snapshot.table, snapshot.schema}
|
|
|
|
end)
|
|
|
|
|
|
|
|
old_snapshots =
|
|
|
|
Map.new(grouped, fn {key, [snapshot | _]} ->
|
|
|
|
old_snapshot =
|
|
|
|
if opts.no_shell? do
|
|
|
|
Enum.find(existing_snapshots, &(&1.table == snapshot.table))
|
|
|
|
else
|
|
|
|
get_existing_snapshot(snapshot, opts)
|
|
|
|
end
|
|
|
|
|
|
|
|
{
|
|
|
|
key,
|
|
|
|
old_snapshot
|
|
|
|
}
|
|
|
|
end)
|
|
|
|
|
|
|
|
old_non_tenant_snapshots =
|
|
|
|
non_tenant_snapshots
|
|
|
|
|> Enum.uniq_by(&{&1.table, &1.schema})
|
|
|
|
|> Enum.map(fn snapshot ->
|
2021-04-05 08:09:11 +12:00
|
|
|
if opts.no_shell? do
|
2024-01-17 17:44:49 +13:00
|
|
|
Enum.find(
|
|
|
|
existing_snapshots,
|
|
|
|
&(&1.table == snapshot.table && &1.schema == snapshot.schema)
|
|
|
|
)
|
2021-04-05 08:09:11 +12:00
|
|
|
else
|
|
|
|
get_existing_snapshot(snapshot, opts)
|
|
|
|
end
|
2023-04-12 09:41:53 +12:00
|
|
|
end)
|
|
|
|
|
|
|
|
old_snapshots_list = Map.values(old_snapshots) ++ old_non_tenant_snapshots
|
|
|
|
|
|
|
|
old_snapshots =
|
|
|
|
Map.new(old_snapshots, fn {key, old_snapshot} ->
|
|
|
|
if old_snapshot do
|
|
|
|
{key, add_references_primary_key(old_snapshot, old_snapshots_list)}
|
|
|
|
else
|
|
|
|
{key, old_snapshot}
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
|
|
|
|
grouped
|
|
|
|
|> Enum.map(fn {key, [snapshot | _] = snapshots} ->
|
|
|
|
existing_snapshot = old_snapshots[key]
|
2021-04-05 08:09:11 +12:00
|
|
|
|
|
|
|
{primary_key, identities} = merge_primary_keys(existing_snapshot, snapshots, opts)
|
2020-09-11 12:26:47 +12:00
|
|
|
|
|
|
|
attributes = Enum.flat_map(snapshots, & &1.attributes)
|
2021-03-30 02:26:58 +13:00
|
|
|
|
|
|
|
count_with_create =
|
|
|
|
snapshots
|
2024-02-15 06:12:54 +13:00
|
|
|
|> Enum.count(& &1.has_create_action)
|
2020-09-11 12:26:47 +12:00
|
|
|
|
|
|
|
new_snapshot = %{
|
|
|
|
snapshot
|
2021-03-30 02:26:58 +13:00
|
|
|
| attributes: merge_attributes(attributes, snapshot.table, count_with_create),
|
2023-06-26 09:26:31 +12:00
|
|
|
identities: snapshots |> Enum.flat_map(& &1.identities) |> Enum.uniq(),
|
2022-07-22 05:34:38 +12:00
|
|
|
custom_indexes: snapshots |> Enum.flat_map(& &1.custom_indexes) |> Enum.uniq(),
|
|
|
|
custom_statements: snapshots |> Enum.flat_map(& &1.custom_statements) |> Enum.uniq()
|
2020-09-11 12:26:47 +12:00
|
|
|
}
|
|
|
|
|
|
|
|
all_identities =
|
|
|
|
new_snapshot.identities
|
|
|
|
|> Kernel.++(identities)
|
|
|
|
|> Enum.sort_by(& &1.name)
|
2020-09-20 10:08:09 +12:00
|
|
|
# We sort the identities by there being an identity with a matching name in the existing snapshot
|
|
|
|
# so that we prefer identities that currently exist over new ones
|
|
|
|
|> Enum.sort_by(fn identity ->
|
|
|
|
existing_snapshot
|
|
|
|
|> Kernel.||(%{})
|
|
|
|
|> Map.get(:identities, [])
|
|
|
|
|> Enum.any?(fn existing_identity ->
|
|
|
|
existing_identity.name == identity.name
|
|
|
|
end)
|
|
|
|
|> Kernel.!()
|
|
|
|
end)
|
2020-09-11 12:26:47 +12:00
|
|
|
|> Enum.uniq_by(fn identity ->
|
2020-09-20 10:08:09 +12:00
|
|
|
{Enum.sort(identity.keys), identity.base_filter}
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
|
|
|
|
new_snapshot = %{new_snapshot | identities: all_identities}
|
|
|
|
|
|
|
|
{
|
|
|
|
%{
|
|
|
|
new_snapshot
|
|
|
|
| attributes:
|
|
|
|
Enum.map(new_snapshot.attributes, fn attribute ->
|
2022-02-15 05:39:50 +13:00
|
|
|
if attribute.source in primary_key do
|
2020-09-11 12:26:47 +12:00
|
|
|
%{attribute | primary_key?: true}
|
|
|
|
else
|
|
|
|
%{attribute | primary_key?: false}
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
},
|
|
|
|
existing_snapshot
|
|
|
|
}
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2021-03-03 05:38:12 +13:00
|
|
|
defp merge_attributes(attributes, table, count) do
|
2020-09-11 12:26:47 +12:00
|
|
|
attributes
|
2021-04-29 09:23:24 +12:00
|
|
|
|> Enum.with_index()
|
|
|
|
|> Enum.map(fn {attr, i} -> Map.put(attr, :order, i) end)
|
2022-02-15 05:39:50 +13:00
|
|
|
|> Enum.group_by(& &1.source)
|
|
|
|
|> Enum.map(fn {source, attributes} ->
|
2021-11-10 22:18:36 +13:00
|
|
|
size =
|
|
|
|
attributes
|
|
|
|
|> Enum.map(& &1.size)
|
|
|
|
|> Enum.filter(& &1)
|
|
|
|
|> case do
|
|
|
|
[] ->
|
|
|
|
nil
|
|
|
|
|
|
|
|
sizes ->
|
|
|
|
Enum.max(sizes)
|
|
|
|
end
|
|
|
|
|
2021-04-01 19:19:30 +13:00
|
|
|
%{
|
2022-02-15 05:39:50 +13:00
|
|
|
source: source,
|
|
|
|
type: merge_types(Enum.map(attributes, & &1.type), source, table),
|
2021-11-10 22:18:36 +13:00
|
|
|
size: size,
|
2021-04-01 19:19:30 +13:00
|
|
|
default: merge_defaults(Enum.map(attributes, & &1.default)),
|
|
|
|
allow_nil?: Enum.any?(attributes, & &1.allow_nil?) || Enum.count(attributes) < count,
|
|
|
|
generated?: Enum.any?(attributes, & &1.generated?),
|
2022-02-15 05:39:50 +13:00
|
|
|
references: merge_references(Enum.map(attributes, & &1.references), source, table),
|
2021-04-29 09:23:24 +12:00
|
|
|
primary_key?: false,
|
|
|
|
order: attributes |> Enum.map(& &1.order) |> Enum.min()
|
2021-04-01 19:19:30 +13:00
|
|
|
}
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
2021-04-29 09:23:24 +12:00
|
|
|
|> Enum.sort(&(&1.order < &2.order))
|
|
|
|
|> Enum.map(&Map.drop(&1, [:order]))
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
|
|
|
defp merge_references(references, name, table) do
|
|
|
|
references
|
|
|
|
|> Enum.reject(&is_nil/1)
|
|
|
|
|> Enum.uniq()
|
|
|
|
|> case do
|
|
|
|
[] ->
|
|
|
|
nil
|
|
|
|
|
|
|
|
references ->
|
2021-04-01 19:19:30 +13:00
|
|
|
%{
|
2022-08-19 06:56:36 +12:00
|
|
|
destination_attribute: merge_uniq!(references, table, :destination_attribute, name),
|
2023-07-15 08:24:57 +12:00
|
|
|
deferrable: merge_uniq!(references, table, :deferrable, name),
|
2022-08-19 06:56:36 +12:00
|
|
|
destination_attribute_default:
|
|
|
|
merge_uniq!(references, table, :destination_attribute_default, name),
|
|
|
|
destination_attribute_generated:
|
|
|
|
merge_uniq!(references, table, :destination_attribute_generated, name),
|
2021-04-01 19:19:30 +13:00
|
|
|
multitenancy: merge_uniq!(references, table, :multitenancy, name),
|
2023-04-12 09:41:53 +12:00
|
|
|
primary_key?: merge_uniq!(references, table, :primary_key?, name),
|
2021-04-01 19:19:30 +13:00
|
|
|
on_delete: merge_uniq!(references, table, :on_delete, name),
|
|
|
|
on_update: merge_uniq!(references, table, :on_update, name),
|
2023-11-21 10:52:50 +13:00
|
|
|
match_with: merge_uniq!(references, table, :match_with, name) |> to_map(),
|
|
|
|
match_type: merge_uniq!(references, table, :match_type, name),
|
2021-04-01 19:19:30 +13:00
|
|
|
name: merge_uniq!(references, table, :name, name),
|
2022-05-14 09:41:30 +12:00
|
|
|
table: merge_uniq!(references, table, :table, name),
|
|
|
|
schema: merge_uniq!(references, table, :schema, name)
|
2021-04-01 19:19:30 +13:00
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-11-21 10:52:50 +13:00
|
|
|
defp to_map(nil), do: nil
|
|
|
|
defp to_map(kw_list) when is_list(kw_list), do: Map.new(kw_list)
|
|
|
|
|
2021-04-01 19:19:30 +13:00
|
|
|
defp merge_uniq!(references, table, field, attribute) do
|
|
|
|
references
|
|
|
|
|> Enum.map(&Map.get(&1, field))
|
2023-04-12 09:41:53 +12:00
|
|
|
|> Enum.reject(&is_nil/1)
|
2021-04-01 19:19:30 +13:00
|
|
|
|> Enum.uniq()
|
|
|
|
|> case do
|
|
|
|
[] ->
|
|
|
|
nil
|
|
|
|
|
|
|
|
[value] ->
|
|
|
|
value
|
|
|
|
|
|
|
|
values ->
|
|
|
|
values = Enum.map_join(values, "\n", &" * #{inspect(&1)}")
|
|
|
|
|
|
|
|
raise """
|
|
|
|
Conflicting configurations for references for #{table}.#{attribute}:
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2021-04-01 19:19:30 +13:00
|
|
|
Values:
|
|
|
|
|
|
|
|
#{values}
|
|
|
|
"""
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp merge_types(types, name, table) do
|
|
|
|
types
|
|
|
|
|> Enum.uniq()
|
|
|
|
|> case do
|
|
|
|
[type] ->
|
|
|
|
type
|
|
|
|
|
|
|
|
types ->
|
|
|
|
raise "Conflicting types for table `#{table}.#{name}`: #{inspect(types)}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp merge_defaults(defaults) do
|
|
|
|
defaults
|
|
|
|
|> Enum.uniq()
|
|
|
|
|> case do
|
|
|
|
[default] -> default
|
2020-10-29 15:26:45 +13:00
|
|
|
_ -> "nil"
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
defp merge_primary_keys(nil, [snapshot | _] = snapshots, opts) do
|
2020-09-11 12:26:47 +12:00
|
|
|
snapshots
|
|
|
|
|> Enum.map(&pkey_names(&1.attributes))
|
|
|
|
|> Enum.uniq()
|
|
|
|
|> case do
|
|
|
|
[pkey_names] ->
|
|
|
|
{pkey_names, []}
|
|
|
|
|
|
|
|
unique_primary_keys ->
|
|
|
|
unique_primary_key_names =
|
|
|
|
unique_primary_keys
|
|
|
|
|> Enum.with_index()
|
|
|
|
|> Enum.map_join("\n", fn {pkey, index} ->
|
|
|
|
"#{index}: #{inspect(pkey)}"
|
|
|
|
end)
|
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
choice =
|
|
|
|
if opts.no_shell? do
|
|
|
|
raise "Unimplemented: cannot resolve primary key ambiguity without shell input"
|
|
|
|
else
|
|
|
|
message = """
|
|
|
|
Which primary key should be used for the table `#{snapshot.table}` (enter the number)?
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
#{unique_primary_key_names}
|
|
|
|
"""
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2024-04-03 16:03:57 +13:00
|
|
|
opts
|
|
|
|
|> prompt(message)
|
2021-04-05 08:09:11 +12:00
|
|
|
|> String.to_integer()
|
|
|
|
end
|
2020-09-11 12:26:47 +12:00
|
|
|
|
|
|
|
identities =
|
|
|
|
unique_primary_keys
|
|
|
|
|> List.delete_at(choice)
|
|
|
|
|> Enum.map(fn pkey_names ->
|
|
|
|
pkey_name_string = Enum.join(pkey_names, "_")
|
|
|
|
name = snapshot.table <> "_" <> pkey_name_string
|
|
|
|
|
|
|
|
%{
|
|
|
|
keys: pkey_names,
|
|
|
|
name: name
|
|
|
|
}
|
|
|
|
end)
|
|
|
|
|
|
|
|
primary_key = Enum.sort(Enum.at(unique_primary_keys, choice))
|
|
|
|
|
|
|
|
identities =
|
|
|
|
Enum.reject(identities, fn identity ->
|
|
|
|
Enum.sort(identity.keys) == primary_key
|
|
|
|
end)
|
|
|
|
|
|
|
|
{primary_key, identities}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
defp merge_primary_keys(existing_snapshot, snapshots, opts) do
|
2020-09-11 12:26:47 +12:00
|
|
|
pkey_names = pkey_names(existing_snapshot.attributes)
|
|
|
|
|
|
|
|
one_pkey_exists? =
|
|
|
|
Enum.any?(snapshots, fn snapshot ->
|
|
|
|
pkey_names(snapshot.attributes) == pkey_names
|
|
|
|
end)
|
|
|
|
|
|
|
|
if one_pkey_exists? do
|
|
|
|
identities =
|
|
|
|
snapshots
|
|
|
|
|> Enum.map(&pkey_names(&1.attributes))
|
|
|
|
|> Enum.uniq()
|
|
|
|
|> Enum.reject(&(&1 == pkey_names))
|
|
|
|
|> Enum.map(fn pkey_names ->
|
|
|
|
pkey_name_string = Enum.join(pkey_names, "_")
|
|
|
|
name = existing_snapshot.table <> "_" <> pkey_name_string
|
|
|
|
|
|
|
|
%{
|
|
|
|
keys: pkey_names,
|
|
|
|
name: name
|
|
|
|
}
|
|
|
|
end)
|
|
|
|
|
|
|
|
{pkey_names, identities}
|
|
|
|
else
|
2021-04-05 08:09:11 +12:00
|
|
|
merge_primary_keys(nil, snapshots, opts)
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-04-03 16:03:57 +13:00
|
|
|
defp yes?(opts, message) do
|
|
|
|
if opts.check do
|
|
|
|
true
|
|
|
|
else
|
|
|
|
Mix.shell().yes?(message)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp prompt(opts, message) do
|
|
|
|
if opts.check do
|
|
|
|
"response"
|
|
|
|
else
|
|
|
|
Mix.shell().prompt(message)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
defp pkey_names(attributes) do
|
|
|
|
attributes
|
|
|
|
|> Enum.filter(& &1.primary_key?)
|
2022-02-15 05:39:50 +13:00
|
|
|
|> Enum.map(& &1.source)
|
2020-09-11 12:26:47 +12:00
|
|
|
|> Enum.sort()
|
|
|
|
end
|
|
|
|
|
2021-03-03 06:33:24 +13:00
|
|
|
defp migration_path(opts, repo, tenant? \\ false) do
|
|
|
|
repo_name = repo_name(repo)
|
2022-12-28 12:19:18 +13:00
|
|
|
# Copied from ecto's mix task, thanks Ecto ❤️
|
|
|
|
config = repo.config()
|
|
|
|
app = Keyword.fetch!(config, :otp_app)
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2021-03-03 06:33:24 +13:00
|
|
|
if tenant? do
|
|
|
|
if opts.tenant_migration_path do
|
|
|
|
opts.tenant_migration_path
|
2020-09-11 12:26:47 +12:00
|
|
|
else
|
2022-12-28 12:19:18 +13:00
|
|
|
Path.join([Mix.Project.deps_paths()[app] || File.cwd!(), "priv"])
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
2021-03-03 06:33:24 +13:00
|
|
|
|> Path.join(repo_name)
|
|
|
|
|> Path.join("tenant_migrations")
|
|
|
|
else
|
|
|
|
if opts.migration_path do
|
|
|
|
opts.migration_path
|
|
|
|
else
|
2022-12-28 12:19:18 +13:00
|
|
|
Path.join([Mix.Project.deps_paths()[app] || File.cwd!(), "priv"])
|
2021-03-03 06:33:24 +13:00
|
|
|
end
|
|
|
|
|> Path.join(repo_name)
|
|
|
|
|> Path.join("migrations")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp repo_name(repo) do
|
|
|
|
repo |> Module.split() |> List.last() |> Macro.underscore()
|
|
|
|
end
|
|
|
|
|
2022-11-26 08:06:22 +13:00
|
|
|
defp write_migration!({up, down}, repo, opts, tenant?, run_without_transaction?) do
|
2021-03-03 06:33:24 +13:00
|
|
|
migration_path = migration_path(opts, repo, tenant?)
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2021-07-09 06:52:53 +12:00
|
|
|
{migration_name, last_part} =
|
|
|
|
if opts.name do
|
|
|
|
{"#{timestamp(true)}_#{opts.name}", "#{opts.name}"}
|
|
|
|
else
|
|
|
|
count =
|
|
|
|
migration_path
|
|
|
|
|> Path.join("*_migrate_resources*")
|
|
|
|
|> Path.wildcard()
|
2023-02-04 05:44:34 +13:00
|
|
|
|> Enum.map(fn path ->
|
|
|
|
path
|
|
|
|
|> Path.basename()
|
|
|
|
|> String.split("_migrate_resources", parts: 2)
|
|
|
|
|> Enum.at(1)
|
|
|
|
|> Integer.parse()
|
|
|
|
|> case do
|
|
|
|
{integer, _} ->
|
|
|
|
integer
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
0
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
|> Enum.max(fn -> 0 end)
|
2021-07-09 06:52:53 +12:00
|
|
|
|> Kernel.+(1)
|
|
|
|
|
|
|
|
{"#{timestamp(true)}_migrate_resources#{count}", "migrate_resources#{count}"}
|
|
|
|
end
|
2020-09-11 12:26:47 +12:00
|
|
|
|
|
|
|
migration_file =
|
|
|
|
migration_path
|
|
|
|
|> Path.join(migration_name <> ".exs")
|
|
|
|
|
2020-10-29 15:26:45 +13:00
|
|
|
module_name =
|
|
|
|
if tenant? do
|
2021-07-09 06:52:53 +12:00
|
|
|
Module.concat([repo, TenantMigrations, Macro.camelize(last_part)])
|
2020-10-29 15:26:45 +13:00
|
|
|
else
|
2021-07-09 06:52:53 +12:00
|
|
|
Module.concat([repo, Migrations, Macro.camelize(last_part)])
|
2020-10-29 15:26:45 +13:00
|
|
|
end
|
|
|
|
|
2022-11-26 08:06:22 +13:00
|
|
|
module_attributes =
|
|
|
|
if run_without_transaction? do
|
|
|
|
"""
|
|
|
|
@disable_ddl_transaction true
|
|
|
|
@disable_migration_lock true
|
|
|
|
"""
|
|
|
|
end
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
contents = """
|
|
|
|
defmodule #{inspect(module_name)} do
|
|
|
|
@moduledoc \"\"\"
|
|
|
|
Updates resources based on their most recent snapshots.
|
|
|
|
|
|
|
|
This file was autogenerated with `mix ash_postgres.generate_migrations`
|
|
|
|
\"\"\"
|
|
|
|
|
|
|
|
use Ecto.Migration
|
|
|
|
|
2022-11-26 08:06:22 +13:00
|
|
|
#{module_attributes}
|
|
|
|
|
2021-01-12 07:15:21 +13:00
|
|
|
def up do
|
2020-09-11 12:26:47 +12:00
|
|
|
#{up}
|
|
|
|
end
|
|
|
|
|
2021-01-12 07:15:21 +13:00
|
|
|
def down do
|
2020-09-11 12:26:47 +12:00
|
|
|
#{down}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
"""
|
|
|
|
|
2021-01-07 18:37:41 +13:00
|
|
|
try do
|
2024-03-21 09:18:27 +13:00
|
|
|
contents = format(migration_file, contents, opts)
|
2021-01-07 18:37:41 +13:00
|
|
|
|
|
|
|
if opts.dry_run do
|
|
|
|
Mix.shell().info(contents)
|
|
|
|
else
|
|
|
|
create_file(migration_file, contents)
|
|
|
|
end
|
|
|
|
rescue
|
|
|
|
exception ->
|
|
|
|
reraise(
|
|
|
|
"""
|
|
|
|
Exception while formatting generated code:
|
|
|
|
#{Exception.format(:error, exception, __STACKTRACE__)}
|
|
|
|
|
|
|
|
Code:
|
|
|
|
|
|
|
|
#{add_line_numbers(contents)}
|
|
|
|
|
|
|
|
To generate it unformatted anyway, but manually fix it, use the `--no-format` option.
|
|
|
|
""",
|
|
|
|
__STACKTRACE__
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp add_line_numbers(contents) do
|
|
|
|
lines = String.split(contents, "\n")
|
|
|
|
|
|
|
|
digits = String.length(to_string(Enum.count(lines)))
|
|
|
|
|
|
|
|
lines
|
|
|
|
|> Enum.with_index()
|
|
|
|
|> Enum.map_join("\n", fn {line, index} ->
|
|
|
|
"#{String.pad_trailing(to_string(index), digits, " ")} | #{line}"
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp create_new_snapshot(snapshots, repo_name, opts, tenant?) do
|
|
|
|
unless opts.dry_run do
|
|
|
|
Enum.each(snapshots, fn snapshot ->
|
|
|
|
snapshot_binary = snapshot_to_binary(snapshot)
|
|
|
|
|
|
|
|
snapshot_folder =
|
|
|
|
if tenant? do
|
2022-12-14 07:31:57 +13:00
|
|
|
opts
|
|
|
|
|> snapshot_path(snapshot.repo)
|
2021-01-07 18:37:41 +13:00
|
|
|
|> Path.join(repo_name)
|
|
|
|
|> Path.join("tenants")
|
|
|
|
else
|
2022-12-14 07:31:57 +13:00
|
|
|
opts
|
|
|
|
|> snapshot_path(snapshot.repo)
|
2021-01-07 18:37:41 +13:00
|
|
|
|> Path.join(repo_name)
|
|
|
|
end
|
|
|
|
|
2024-01-17 17:44:49 +13:00
|
|
|
snapshot_file =
|
|
|
|
if snapshot.schema do
|
|
|
|
Path.join(snapshot_folder, "#{snapshot.schema}.#{snapshot.table}/#{timestamp()}.json")
|
|
|
|
else
|
|
|
|
Path.join(snapshot_folder, "#{snapshot.table}/#{timestamp()}.json")
|
|
|
|
end
|
2021-01-07 18:37:41 +13:00
|
|
|
|
|
|
|
File.mkdir_p(Path.dirname(snapshot_file))
|
|
|
|
File.write!(snapshot_file, snapshot_binary, [])
|
|
|
|
|
|
|
|
old_snapshot_folder = Path.join(snapshot_folder, "#{snapshot.table}.json")
|
|
|
|
|
|
|
|
if File.exists?(old_snapshot_folder) do
|
|
|
|
new_snapshot_folder = Path.join(snapshot_folder, "#{snapshot.table}/initial.json")
|
|
|
|
File.rename(old_snapshot_folder, new_snapshot_folder)
|
|
|
|
end
|
|
|
|
end)
|
2020-09-20 10:08:09 +12:00
|
|
|
end
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
@doc false
|
|
|
|
def build_up_and_down(phases) do
|
2020-09-11 12:26:47 +12:00
|
|
|
up =
|
|
|
|
Enum.map_join(phases, "\n", fn phase ->
|
2020-11-25 12:11:02 +13:00
|
|
|
phase
|
|
|
|
|> phase.__struct__.up()
|
|
|
|
|> Kernel.<>("\n")
|
|
|
|
|> maybe_comment(phase)
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
|
|
|
|
down =
|
|
|
|
phases
|
|
|
|
|> Enum.reverse()
|
|
|
|
|> Enum.map_join("\n", fn phase ->
|
2020-11-25 12:11:02 +13:00
|
|
|
phase
|
|
|
|
|> phase.__struct__.down()
|
|
|
|
|> Kernel.<>("\n")
|
|
|
|
|> maybe_comment(phase)
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
|
|
|
|
{up, down}
|
|
|
|
end
|
|
|
|
|
2020-11-25 12:11:02 +13:00
|
|
|
defp maybe_comment(text, %{commented?: true}) do
|
|
|
|
text
|
|
|
|
|> String.split("\n")
|
2022-01-25 11:59:31 +13:00
|
|
|
|> Enum.map_join("\n", fn line ->
|
2020-11-25 12:11:02 +13:00
|
|
|
if String.starts_with?(line, "#") do
|
|
|
|
line
|
|
|
|
else
|
|
|
|
"# #{line}"
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_comment(text, _), do: text
|
|
|
|
|
2024-03-12 15:08:25 +13:00
|
|
|
defp format(path, string, opts) do
|
2020-09-11 12:26:47 +12:00
|
|
|
if opts.format do
|
2024-03-12 15:08:25 +13:00
|
|
|
{func, _} = Mix.Tasks.Format.formatter_for_file(path)
|
|
|
|
func.(string)
|
2020-09-11 12:26:47 +12:00
|
|
|
else
|
|
|
|
string
|
|
|
|
end
|
2020-12-01 19:54:20 +13:00
|
|
|
rescue
|
|
|
|
exception ->
|
|
|
|
IO.puts("""
|
|
|
|
Exception while formatting:
|
|
|
|
|
|
|
|
#{inspect(exception)}
|
|
|
|
|
|
|
|
#{inspect(string)}
|
|
|
|
""")
|
|
|
|
|
|
|
|
reraise exception, __STACKTRACE__
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
|
|
|
defp streamline(ops, acc \\ [])
|
|
|
|
defp streamline([], acc), do: Enum.reverse(acc)
|
|
|
|
|
|
|
|
defp streamline(
|
|
|
|
[
|
|
|
|
%Operation.AddAttribute{
|
|
|
|
attribute: %{
|
2022-02-15 10:30:25 +13:00
|
|
|
source: name
|
2020-09-11 12:26:47 +12:00
|
|
|
},
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: schema,
|
2020-09-11 12:26:47 +12:00
|
|
|
table: table
|
2020-10-01 15:43:33 +13:00
|
|
|
} = add
|
2020-09-11 12:26:47 +12:00
|
|
|
| rest
|
|
|
|
],
|
|
|
|
acc
|
2020-10-01 15:43:33 +13:00
|
|
|
) do
|
|
|
|
rest
|
2022-09-29 15:21:37 +13:00
|
|
|
|> Enum.take_while(fn
|
|
|
|
%custom{} when custom in [Operation.AddCustomStatement, Operation.RemoveCustomStatement] ->
|
|
|
|
false
|
|
|
|
|
|
|
|
op ->
|
|
|
|
op.table == table && op.schema == schema
|
2020-10-01 15:43:33 +13:00
|
|
|
end)
|
|
|
|
|> Enum.with_index()
|
|
|
|
|> Enum.find(fn
|
|
|
|
{%Operation.AlterAttribute{
|
2022-02-15 10:30:25 +13:00
|
|
|
new_attribute: %{source: ^name, references: references},
|
|
|
|
old_attribute: %{source: ^name}
|
2020-10-01 15:43:33 +13:00
|
|
|
}, _}
|
|
|
|
when not is_nil(references) ->
|
|
|
|
true
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
false
|
|
|
|
end)
|
|
|
|
|> case do
|
|
|
|
nil ->
|
|
|
|
streamline(rest, [add | acc])
|
|
|
|
|
|
|
|
{alter, index} ->
|
|
|
|
new_attribute = Map.put(add.attribute, :references, alter.new_attribute.references)
|
|
|
|
streamline(List.delete_at(rest, index), [%{add | attribute: new_attribute} | acc])
|
|
|
|
end
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
|
|
|
defp streamline([first | rest], acc) do
|
|
|
|
streamline(rest, [first | acc])
|
|
|
|
end
|
|
|
|
|
|
|
|
defp group_into_phases(ops, current \\ nil, acc \\ [])
|
|
|
|
|
|
|
|
defp group_into_phases([], nil, acc), do: Enum.reverse(acc)
|
|
|
|
|
|
|
|
defp group_into_phases([], phase, acc) do
|
|
|
|
phase = %{phase | operations: Enum.reverse(phase.operations)}
|
|
|
|
Enum.reverse([phase | acc])
|
|
|
|
end
|
|
|
|
|
2020-10-29 15:26:45 +13:00
|
|
|
defp group_into_phases(
|
2022-05-14 09:41:30 +12:00
|
|
|
[
|
|
|
|
%Operation.CreateTable{table: table, schema: schema, multitenancy: multitenancy} | rest
|
|
|
|
],
|
2020-10-29 15:26:45 +13:00
|
|
|
nil,
|
|
|
|
acc
|
|
|
|
) do
|
2022-05-14 09:41:30 +12:00
|
|
|
group_into_phases(
|
|
|
|
rest,
|
|
|
|
%Phase.Create{table: table, schema: schema, multitenancy: multitenancy},
|
|
|
|
acc
|
|
|
|
)
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
|
|
|
defp group_into_phases(
|
2022-05-14 09:41:30 +12:00
|
|
|
[%Operation.AddAttribute{table: table, schema: schema} = op | rest],
|
|
|
|
%{table: table, schema: schema} = phase,
|
2020-09-11 12:26:47 +12:00
|
|
|
acc
|
|
|
|
) do
|
|
|
|
group_into_phases(rest, %{phase | operations: [op | phase.operations]}, acc)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp group_into_phases(
|
2022-05-14 09:41:30 +12:00
|
|
|
[%Operation.AlterAttribute{table: table, schema: schema} = op | rest],
|
|
|
|
%Phase.Alter{table: table, schema: schema} = phase,
|
2020-09-11 12:26:47 +12:00
|
|
|
acc
|
|
|
|
) do
|
|
|
|
group_into_phases(rest, %{phase | operations: [op | phase.operations]}, acc)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp group_into_phases(
|
2022-05-14 09:41:30 +12:00
|
|
|
[%Operation.RenameAttribute{table: table, schema: schema} = op | rest],
|
|
|
|
%Phase.Alter{table: table, schema: schema} = phase,
|
2020-09-11 12:26:47 +12:00
|
|
|
acc
|
|
|
|
) do
|
|
|
|
group_into_phases(rest, %{phase | operations: [op | phase.operations]}, acc)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp group_into_phases(
|
2022-05-14 09:41:30 +12:00
|
|
|
[%Operation.RemoveAttribute{table: table, schema: schema} = op | rest],
|
|
|
|
%{table: table, schema: schema} = phase,
|
2020-09-11 12:26:47 +12:00
|
|
|
acc
|
|
|
|
) do
|
|
|
|
group_into_phases(rest, %{phase | operations: [op | phase.operations]}, acc)
|
|
|
|
end
|
|
|
|
|
2020-09-20 10:08:09 +12:00
|
|
|
defp group_into_phases([%{no_phase: true} = op | rest], nil, acc) do
|
|
|
|
group_into_phases(rest, nil, [op | acc])
|
|
|
|
end
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
defp group_into_phases([operation | rest], nil, acc) do
|
2020-10-29 15:26:45 +13:00
|
|
|
phase = %Phase.Alter{
|
|
|
|
operations: [operation],
|
|
|
|
multitenancy: operation.multitenancy,
|
2022-05-14 09:41:30 +12:00
|
|
|
table: operation.table,
|
|
|
|
schema: operation.schema
|
2020-10-29 15:26:45 +13:00
|
|
|
}
|
|
|
|
|
2020-10-01 15:43:33 +13:00
|
|
|
group_into_phases(rest, phase, acc)
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
|
|
|
defp group_into_phases(operations, phase, acc) do
|
|
|
|
phase = %{phase | operations: Enum.reverse(phase.operations)}
|
|
|
|
group_into_phases(operations, nil, [phase | acc])
|
|
|
|
end
|
|
|
|
|
2023-05-02 02:51:09 +12:00
|
|
|
defp sort_operations(ops, acc \\ [])
|
|
|
|
defp sort_operations([], acc), do: acc
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2023-05-02 02:51:09 +12:00
|
|
|
defp sort_operations([op | rest], []), do: sort_operations(rest, [op])
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2023-05-02 02:51:09 +12:00
|
|
|
defp sort_operations([op | rest], acc) do
|
|
|
|
acc = Enum.reverse(acc)
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2023-05-02 02:51:09 +12:00
|
|
|
after_index = Enum.find_index(acc, &after?(op, &1))
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2023-05-02 02:51:09 +12:00
|
|
|
new_acc =
|
|
|
|
if after_index do
|
|
|
|
acc
|
|
|
|
|> List.insert_at(after_index, op)
|
|
|
|
|> Enum.reverse()
|
|
|
|
else
|
|
|
|
[op | Enum.reverse(acc)]
|
|
|
|
end
|
2023-04-12 09:41:53 +12:00
|
|
|
|
2023-05-02 02:51:09 +12:00
|
|
|
sort_operations(rest, new_acc)
|
|
|
|
end
|
2023-04-12 09:41:53 +12:00
|
|
|
|
2023-07-15 08:24:57 +12:00
|
|
|
defp after?(_, %Operation.AlterDeferrability{direction: :down}), do: true
|
|
|
|
defp after?(%Operation.AlterDeferrability{direction: :up}, _), do: true
|
|
|
|
|
2023-04-12 09:41:53 +12:00
|
|
|
defp after?(
|
|
|
|
%Operation.RemovePrimaryKey{},
|
|
|
|
%Operation.DropForeignKey{}
|
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
|
|
|
defp after?(
|
|
|
|
%Operation.DropForeignKey{},
|
|
|
|
%Operation.RemovePrimaryKey{}
|
|
|
|
),
|
|
|
|
do: false
|
|
|
|
|
|
|
|
defp after?(%Operation.RemovePrimaryKey{}, _), do: false
|
|
|
|
defp after?(_, %Operation.RemovePrimaryKey{}), do: true
|
|
|
|
defp after?(%Operation.RemovePrimaryKeyDown{}, _), do: true
|
|
|
|
defp after?(_, %Operation.RemovePrimaryKeyDown{}), do: false
|
|
|
|
|
2022-07-22 05:34:38 +12:00
|
|
|
defp after?(
|
|
|
|
%Operation.AddCustomStatement{},
|
|
|
|
_
|
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
|
|
|
defp after?(
|
|
|
|
_,
|
|
|
|
%Operation.RemoveCustomStatement{}
|
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
2021-04-29 09:23:24 +12:00
|
|
|
defp after?(
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.AddAttribute{attribute: %{order: l}, table: table, schema: schema},
|
|
|
|
%Operation.AddAttribute{attribute: %{order: r}, table: table, schema: schema}
|
2021-04-29 09:23:24 +12:00
|
|
|
),
|
|
|
|
do: l > r
|
|
|
|
|
2021-04-28 09:16:56 +12:00
|
|
|
defp after?(
|
|
|
|
%Operation.RenameUniqueIndex{
|
2022-05-14 09:41:30 +12:00
|
|
|
table: table,
|
|
|
|
schema: schema
|
2021-04-28 09:16:56 +12:00
|
|
|
},
|
2022-05-14 09:41:30 +12:00
|
|
|
%{table: table, schema: schema}
|
2021-04-28 09:16:56 +12:00
|
|
|
) do
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
defp after?(
|
2020-11-25 12:11:02 +13:00
|
|
|
%Operation.AddUniqueIndex{
|
2022-05-14 09:41:30 +12:00
|
|
|
table: table,
|
|
|
|
schema: schema
|
2020-11-25 12:11:02 +13:00
|
|
|
},
|
2022-05-14 09:41:30 +12:00
|
|
|
%{table: table, schema: schema}
|
2020-09-11 12:26:47 +12:00
|
|
|
) do
|
2021-04-28 08:08:16 +12:00
|
|
|
true
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
2021-04-20 06:26:41 +12:00
|
|
|
defp after?(
|
|
|
|
%Operation.AddCheckConstraint{
|
|
|
|
constraint: %{attribute: attribute_or_attributes},
|
|
|
|
table: table,
|
2022-05-14 09:41:30 +12:00
|
|
|
multitenancy: multitenancy,
|
|
|
|
schema: schema
|
2021-04-20 06:26:41 +12:00
|
|
|
},
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.AddAttribute{table: table, attribute: %{source: source}, schema: schema}
|
2021-04-20 06:26:41 +12:00
|
|
|
) do
|
2022-02-15 10:30:25 +13:00
|
|
|
source in List.wrap(attribute_or_attributes) ||
|
2021-04-20 06:26:41 +12:00
|
|
|
(multitenancy.attribute && multitenancy.attribute in List.wrap(attribute_or_attributes))
|
|
|
|
end
|
|
|
|
|
2021-09-21 08:38:36 +12:00
|
|
|
defp after?(
|
|
|
|
%Operation.AddCustomIndex{
|
2022-05-14 09:41:30 +12:00
|
|
|
table: table,
|
|
|
|
schema: schema
|
2021-09-21 08:38:36 +12:00
|
|
|
},
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.AddAttribute{table: table, schema: schema}
|
2021-09-21 08:38:36 +12:00
|
|
|
) do
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
2022-11-26 08:06:22 +13:00
|
|
|
defp after?(
|
|
|
|
%Operation.AddCustomIndex{
|
|
|
|
table: table,
|
|
|
|
schema: schema,
|
|
|
|
index: %{
|
|
|
|
concurrently: true
|
|
|
|
}
|
|
|
|
},
|
|
|
|
%Operation.AddCustomIndex{
|
|
|
|
table: table,
|
|
|
|
schema: schema,
|
|
|
|
index: %{
|
|
|
|
concurrently: false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
) do
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
2022-05-14 09:41:30 +12:00
|
|
|
defp after?(
|
2022-07-22 16:50:19 +12:00
|
|
|
%Operation.AddCheckConstraint{table: table, schema: schema, constraint: %{name: name}},
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.RemoveCheckConstraint{
|
|
|
|
table: table,
|
2022-07-22 16:50:19 +12:00
|
|
|
schema: schema,
|
|
|
|
constraint: %{
|
|
|
|
name: name
|
|
|
|
}
|
2022-05-14 09:41:30 +12:00
|
|
|
}
|
|
|
|
),
|
2021-04-20 06:26:41 +12:00
|
|
|
do: true
|
|
|
|
|
2023-04-12 09:41:53 +12:00
|
|
|
defp after?(
|
|
|
|
%Operation.RemoveCheckConstraint{
|
|
|
|
table: table,
|
|
|
|
schema: schema,
|
|
|
|
constraint: %{
|
|
|
|
name: name
|
|
|
|
}
|
|
|
|
},
|
|
|
|
%Operation.AddCheckConstraint{table: table, schema: schema, constraint: %{name: name}}
|
|
|
|
),
|
|
|
|
do: false
|
|
|
|
|
2021-04-20 06:26:41 +12:00
|
|
|
defp after?(
|
|
|
|
%Operation.AddCheckConstraint{
|
|
|
|
constraint: %{attribute: attribute_or_attributes},
|
2022-05-14 09:41:30 +12:00
|
|
|
table: table,
|
|
|
|
schema: schema
|
2021-04-20 06:26:41 +12:00
|
|
|
},
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.AlterAttribute{table: table, new_attribute: %{source: source}, schema: schema}
|
2021-04-20 06:26:41 +12:00
|
|
|
) do
|
2022-02-15 09:38:16 +13:00
|
|
|
source in List.wrap(attribute_or_attributes)
|
2021-04-20 06:26:41 +12:00
|
|
|
end
|
|
|
|
|
|
|
|
defp after?(
|
|
|
|
%Operation.AddCheckConstraint{
|
|
|
|
constraint: %{attribute: attribute_or_attributes},
|
2022-05-14 09:41:30 +12:00
|
|
|
table: table,
|
|
|
|
schema: schema
|
2021-04-20 06:26:41 +12:00
|
|
|
},
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.RenameAttribute{
|
|
|
|
table: table,
|
|
|
|
new_attribute: %{source: source},
|
|
|
|
schema: schema
|
|
|
|
}
|
2021-04-20 06:26:41 +12:00
|
|
|
) do
|
2022-02-15 09:38:16 +13:00
|
|
|
source in List.wrap(attribute_or_attributes)
|
2021-04-20 06:26:41 +12:00
|
|
|
end
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
defp after?(
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.RemoveUniqueIndex{table: table, schema: schema},
|
|
|
|
%Operation.AddUniqueIndex{table: table, schema: schema}
|
2020-09-11 12:26:47 +12:00
|
|
|
) do
|
2021-04-28 08:08:16 +12:00
|
|
|
false
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
|
|
|
defp after?(
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.RemoveUniqueIndex{table: table, schema: schema},
|
|
|
|
%{table: table, schema: schema}
|
2020-09-11 12:26:47 +12:00
|
|
|
) do
|
2021-04-28 08:08:16 +12:00
|
|
|
true
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
2021-04-20 06:26:41 +12:00
|
|
|
defp after?(
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.RemoveCheckConstraint{
|
|
|
|
constraint: %{attribute: attributes},
|
|
|
|
table: table,
|
|
|
|
schema: schema
|
|
|
|
},
|
|
|
|
%Operation.RemoveAttribute{table: table, attribute: %{source: source}, schema: schema}
|
2021-04-20 06:26:41 +12:00
|
|
|
) do
|
2022-02-15 09:38:16 +13:00
|
|
|
source in List.wrap(attributes)
|
2021-04-20 06:26:41 +12:00
|
|
|
end
|
|
|
|
|
|
|
|
defp after?(
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.RemoveCheckConstraint{
|
|
|
|
constraint: %{attribute: attributes},
|
|
|
|
table: table,
|
|
|
|
schema: schema
|
|
|
|
},
|
|
|
|
%Operation.RenameAttribute{
|
|
|
|
table: table,
|
|
|
|
old_attribute: %{source: source},
|
|
|
|
schema: schema
|
|
|
|
}
|
2021-04-20 06:26:41 +12:00
|
|
|
) do
|
2022-02-15 09:38:16 +13:00
|
|
|
source in List.wrap(attributes)
|
2021-04-20 06:26:41 +12:00
|
|
|
end
|
|
|
|
|
2022-05-14 09:41:30 +12:00
|
|
|
defp after?(%Operation.AlterAttribute{table: table, schema: schema}, %Operation.DropForeignKey{
|
2020-11-18 12:35:57 +13:00
|
|
|
table: table,
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: schema,
|
2020-11-18 12:35:57 +13:00
|
|
|
direction: :up
|
|
|
|
}),
|
|
|
|
do: true
|
|
|
|
|
2023-05-02 02:51:09 +12:00
|
|
|
defp after?(
|
|
|
|
%Operation.AlterAttribute{table: table, schema: schema},
|
|
|
|
%Operation.DropForeignKey{
|
|
|
|
table: table,
|
|
|
|
schema: schema,
|
|
|
|
direction: :down
|
|
|
|
}
|
|
|
|
),
|
|
|
|
do: false
|
|
|
|
|
2020-11-18 12:35:57 +13:00
|
|
|
defp after?(
|
|
|
|
%Operation.DropForeignKey{
|
|
|
|
table: table,
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: schema,
|
2020-11-18 12:35:57 +13:00
|
|
|
direction: :down
|
|
|
|
},
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.AlterAttribute{table: table, schema: schema}
|
2020-11-18 12:35:57 +13:00
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
2022-05-14 09:41:30 +12:00
|
|
|
defp after?(%Operation.AddAttribute{table: table, schema: schema}, %Operation.CreateTable{
|
|
|
|
table: table,
|
|
|
|
schema: schema
|
|
|
|
}) do
|
2020-09-11 12:26:47 +12:00
|
|
|
true
|
|
|
|
end
|
|
|
|
|
|
|
|
defp after?(
|
|
|
|
%Operation.AddAttribute{
|
|
|
|
attribute: %{
|
2022-08-19 06:56:36 +12:00
|
|
|
references: %{table: table, destination_attribute: name}
|
2020-09-11 12:26:47 +12:00
|
|
|
}
|
|
|
|
},
|
2022-05-19 05:47:08 +12:00
|
|
|
%Operation.AddAttribute{table: table, attribute: %{source: name}}
|
2020-09-11 12:26:47 +12:00
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
|
|
|
defp after?(
|
|
|
|
%Operation.AddAttribute{
|
|
|
|
table: table,
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: schema,
|
2020-09-11 12:26:47 +12:00
|
|
|
attribute: %{
|
|
|
|
primary_key?: false
|
|
|
|
}
|
|
|
|
},
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.AddAttribute{schema: schema, table: table, attribute: %{primary_key?: true}}
|
2020-09-11 12:26:47 +12:00
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
|
|
|
defp after?(
|
|
|
|
%Operation.AddAttribute{
|
|
|
|
table: table,
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: schema,
|
2020-09-11 12:26:47 +12:00
|
|
|
attribute: %{
|
|
|
|
primary_key?: true
|
|
|
|
}
|
|
|
|
},
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.RemoveAttribute{
|
|
|
|
schema: schema,
|
|
|
|
table: table,
|
|
|
|
attribute: %{primary_key?: true}
|
|
|
|
}
|
2020-09-11 12:26:47 +12:00
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
2023-04-12 09:41:53 +12:00
|
|
|
defp after?(
|
|
|
|
%Operation.AddAttribute{
|
|
|
|
table: table,
|
|
|
|
schema: schema,
|
|
|
|
attribute: %{
|
|
|
|
primary_key?: true
|
|
|
|
}
|
|
|
|
},
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
schema: schema,
|
|
|
|
table: table,
|
|
|
|
new_attribute: %{primary_key?: false},
|
|
|
|
old_attribute: %{primary_key?: true}
|
|
|
|
}
|
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
|
|
|
defp after?(
|
|
|
|
%Operation.AddAttribute{
|
|
|
|
table: table,
|
|
|
|
schema: schema,
|
|
|
|
attribute: %{
|
|
|
|
primary_key?: true
|
|
|
|
}
|
|
|
|
},
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
schema: schema,
|
|
|
|
table: table,
|
|
|
|
new_attribute: %{primary_key?: false},
|
|
|
|
old_attribute: %{primary_key?: true}
|
|
|
|
}
|
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
|
|
|
defp after?(
|
|
|
|
%Operation.RemoveAttribute{
|
|
|
|
schema: schema,
|
|
|
|
table: table,
|
|
|
|
attribute: %{primary_key?: true}
|
|
|
|
},
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
table: table,
|
|
|
|
schema: schema,
|
|
|
|
new_attribute: %{
|
|
|
|
primary_key?: true
|
|
|
|
},
|
|
|
|
old_attribute: %{
|
|
|
|
primary_key?: false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
|
|
|
defp after?(
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
schema: schema,
|
|
|
|
table: table,
|
|
|
|
new_attribute: %{primary_key?: false},
|
|
|
|
old_attribute: %{
|
|
|
|
primary_key?: true
|
|
|
|
}
|
|
|
|
},
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
table: table,
|
|
|
|
schema: schema,
|
|
|
|
new_attribute: %{
|
|
|
|
primary_key?: true
|
|
|
|
},
|
|
|
|
old_attribute: %{
|
|
|
|
primary_key?: false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
|
|
|
defp after?(
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
schema: schema,
|
|
|
|
table: table,
|
|
|
|
new_attribute: %{primary_key?: false},
|
|
|
|
old_attribute: %{
|
|
|
|
primary_key?: true
|
|
|
|
}
|
|
|
|
},
|
|
|
|
%Operation.AddAttribute{
|
|
|
|
table: table,
|
|
|
|
schema: schema,
|
|
|
|
attribute: %{
|
|
|
|
primary_key?: true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
),
|
|
|
|
do: false
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
defp after?(
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
table: table,
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: schema,
|
2020-09-11 12:26:47 +12:00
|
|
|
new_attribute: %{primary_key?: false},
|
|
|
|
old_attribute: %{primary_key?: true}
|
|
|
|
},
|
|
|
|
%Operation.AddAttribute{
|
|
|
|
table: table,
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: schema,
|
2020-09-11 12:26:47 +12:00
|
|
|
attribute: %{
|
|
|
|
primary_key?: true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
2023-04-12 09:41:53 +12:00
|
|
|
defp after?(
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
new_attribute: %{
|
|
|
|
references: %{destination_attribute: destination_attribute, table: table}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
%Operation.AddUniqueIndex{identity: %{keys: keys}, table: table}
|
|
|
|
) do
|
|
|
|
destination_attribute in keys
|
|
|
|
end
|
|
|
|
|
|
|
|
defp after?(
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
new_attribute: %{references: %{table: table, destination_attribute: source}}
|
|
|
|
},
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
new_attribute: %{
|
|
|
|
source: source
|
|
|
|
},
|
|
|
|
table: table
|
|
|
|
}
|
|
|
|
) do
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
|
|
|
defp after?(
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
new_attribute: %{
|
|
|
|
source: source
|
|
|
|
},
|
|
|
|
table: table
|
|
|
|
},
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
new_attribute: %{references: %{table: table, destination_attribute: source}}
|
|
|
|
}
|
|
|
|
) do
|
|
|
|
false
|
|
|
|
end
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
defp after?(
|
2022-05-19 05:47:08 +12:00
|
|
|
%Operation.RemoveAttribute{attribute: %{source: source}, table: table},
|
2020-09-11 12:26:47 +12:00
|
|
|
%Operation.AlterAttribute{
|
2022-05-14 09:41:30 +12:00
|
|
|
old_attribute: %{
|
2022-08-19 06:56:36 +12:00
|
|
|
references: %{table: table, destination_attribute: source}
|
2022-05-14 09:41:30 +12:00
|
|
|
}
|
2020-09-11 12:26:47 +12:00
|
|
|
}
|
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
|
|
|
defp after?(
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
new_attribute: %{
|
2022-08-19 06:56:36 +12:00
|
|
|
references: %{table: table, destination_attribute: name}
|
2020-09-11 12:26:47 +12:00
|
|
|
}
|
|
|
|
},
|
2022-05-19 05:47:08 +12:00
|
|
|
%Operation.AddAttribute{table: table, attribute: %{source: name}}
|
2020-09-11 12:26:47 +12:00
|
|
|
),
|
|
|
|
do: true
|
|
|
|
|
2022-05-14 09:41:30 +12:00
|
|
|
defp after?(%Operation.AddCheckConstraint{table: table, schema: schema}, %Operation.CreateTable{
|
|
|
|
table: table,
|
|
|
|
schema: schema
|
|
|
|
}) do
|
2021-04-20 06:26:41 +12:00
|
|
|
true
|
|
|
|
end
|
|
|
|
|
2021-06-09 09:41:35 +12:00
|
|
|
defp after?(
|
|
|
|
%Operation.AlterAttribute{new_attribute: %{references: references}, table: table},
|
|
|
|
%{table: table}
|
|
|
|
)
|
2020-09-11 12:26:47 +12:00
|
|
|
when not is_nil(references),
|
|
|
|
do: true
|
|
|
|
|
2021-04-29 09:23:24 +12:00
|
|
|
defp after?(%Operation.AddCheckConstraint{}, _), do: true
|
|
|
|
defp after?(%Operation.RemoveCheckConstraint{}, _), do: true
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
defp after?(_, _), do: false
|
|
|
|
|
2020-11-20 16:09:26 +13:00
|
|
|
defp fetch_operations(snapshots, opts) do
|
2021-02-01 10:39:59 +13:00
|
|
|
snapshots
|
|
|
|
|> Enum.map(fn {snapshot, existing_snapshot} ->
|
|
|
|
{snapshot, do_fetch_operations(snapshot, existing_snapshot, opts)}
|
|
|
|
end)
|
|
|
|
|> Enum.reject(fn {_, ops} ->
|
|
|
|
Enum.empty?(ops)
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2020-11-20 16:09:26 +13:00
|
|
|
defp do_fetch_operations(snapshot, existing_snapshot, opts, acc \\ [])
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2022-05-14 09:41:30 +12:00
|
|
|
defp do_fetch_operations(
|
|
|
|
%{schema: new_schema} = snapshot,
|
|
|
|
%{schema: old_schema},
|
|
|
|
opts,
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
when new_schema != old_schema do
|
|
|
|
do_fetch_operations(snapshot, nil, opts, [])
|
|
|
|
end
|
|
|
|
|
2020-11-20 16:09:26 +13:00
|
|
|
defp do_fetch_operations(snapshot, nil, opts, acc) do
|
2020-09-11 12:26:47 +12:00
|
|
|
empty_snapshot = %{
|
|
|
|
attributes: [],
|
|
|
|
identities: [],
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: nil,
|
2021-09-21 08:38:36 +12:00
|
|
|
custom_indexes: [],
|
2022-07-22 05:34:38 +12:00
|
|
|
custom_statements: [],
|
2021-04-20 06:26:41 +12:00
|
|
|
check_constraints: [],
|
2020-09-11 12:26:47 +12:00
|
|
|
table: snapshot.table,
|
2020-10-29 15:26:45 +13:00
|
|
|
repo: snapshot.repo,
|
2021-09-21 08:38:36 +12:00
|
|
|
base_filter: nil,
|
2023-04-12 09:41:53 +12:00
|
|
|
empty?: true,
|
2020-10-29 15:26:45 +13:00
|
|
|
multitenancy: %{
|
|
|
|
attribute: nil,
|
|
|
|
strategy: nil,
|
2021-04-05 08:09:11 +12:00
|
|
|
global: nil
|
2020-10-29 15:26:45 +13:00
|
|
|
}
|
2020-09-11 12:26:47 +12:00
|
|
|
}
|
|
|
|
|
2020-11-20 16:09:26 +13:00
|
|
|
do_fetch_operations(snapshot, empty_snapshot, opts, [
|
2020-10-29 15:26:45 +13:00
|
|
|
%Operation.CreateTable{
|
|
|
|
table: snapshot.table,
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: snapshot.schema,
|
2020-10-29 15:26:45 +13:00
|
|
|
multitenancy: snapshot.multitenancy,
|
|
|
|
old_multitenancy: empty_snapshot.multitenancy
|
|
|
|
}
|
|
|
|
| acc
|
2020-09-11 12:26:47 +12:00
|
|
|
])
|
|
|
|
end
|
|
|
|
|
2020-11-20 16:09:26 +13:00
|
|
|
defp do_fetch_operations(snapshot, old_snapshot, opts, acc) do
|
|
|
|
attribute_operations = attribute_operations(snapshot, old_snapshot, opts)
|
2024-02-05 04:12:39 +13:00
|
|
|
pkey_operations = pkey_operations(snapshot, old_snapshot, attribute_operations, opts)
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2020-11-25 12:11:02 +13:00
|
|
|
rewrite_all_identities? = changing_multitenancy_affects_identities?(snapshot, old_snapshot)
|
|
|
|
|
2022-07-22 05:34:38 +12:00
|
|
|
custom_statements_to_add =
|
|
|
|
snapshot.custom_statements
|
|
|
|
|> Enum.reject(fn statement ->
|
|
|
|
Enum.any?(old_snapshot.custom_statements, &(&1.name == statement.name))
|
|
|
|
end)
|
|
|
|
|> Enum.map(&%Operation.AddCustomStatement{statement: &1, table: snapshot.table})
|
|
|
|
|
|
|
|
custom_statements_to_remove =
|
|
|
|
old_snapshot.custom_statements
|
|
|
|
|> Enum.reject(fn old_statement ->
|
|
|
|
Enum.any?(snapshot.custom_statements, &(&1.name == old_statement.name))
|
|
|
|
end)
|
|
|
|
|> Enum.map(&%Operation.RemoveCustomStatement{statement: &1, table: snapshot.table})
|
|
|
|
|
|
|
|
custom_statements_to_alter =
|
|
|
|
snapshot.custom_statements
|
|
|
|
|> Enum.flat_map(fn statement ->
|
|
|
|
old_statement = Enum.find(old_snapshot.custom_statements, &(&1.name == statement.name))
|
|
|
|
|
|
|
|
if old_statement &&
|
|
|
|
(old_statement.code? != statement.code? ||
|
|
|
|
old_statement.up != statement.up || old_statement.down != statement.down) do
|
|
|
|
[
|
|
|
|
%Operation.RemoveCustomStatement{statement: old_statement, table: snapshot.table},
|
|
|
|
%Operation.AddCustomStatement{statement: statement, table: snapshot.table}
|
|
|
|
]
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
|
2021-09-21 08:38:36 +12:00
|
|
|
custom_indexes_to_add =
|
|
|
|
Enum.filter(snapshot.custom_indexes, fn index ->
|
2024-05-16 17:14:38 +12:00
|
|
|
(rewrite_all_identities? && !index.all_tenants?) ||
|
|
|
|
!Enum.find(old_snapshot.custom_indexes, fn old_custom_index ->
|
|
|
|
indexes_match?(snapshot.table, old_custom_index, index)
|
|
|
|
end)
|
2021-09-21 08:38:36 +12:00
|
|
|
end)
|
|
|
|
|> Enum.map(fn custom_index ->
|
|
|
|
%Operation.AddCustomIndex{
|
|
|
|
index: custom_index,
|
2022-05-14 09:41:30 +12:00
|
|
|
table: snapshot.table,
|
|
|
|
schema: snapshot.schema,
|
|
|
|
multitenancy: snapshot.multitenancy,
|
|
|
|
base_filter: snapshot.base_filter
|
2021-09-21 08:38:36 +12:00
|
|
|
}
|
|
|
|
end)
|
|
|
|
|
|
|
|
custom_indexes_to_remove =
|
|
|
|
Enum.filter(old_snapshot.custom_indexes, fn old_custom_index ->
|
2024-01-13 04:11:16 +13:00
|
|
|
(rewrite_all_identities? && !old_custom_index.all_tenants?) ||
|
2021-09-21 08:38:36 +12:00
|
|
|
!Enum.find(snapshot.custom_indexes, fn index ->
|
2023-06-06 17:33:50 +12:00
|
|
|
indexes_match?(snapshot.table, old_custom_index, index)
|
2021-09-21 08:38:36 +12:00
|
|
|
end)
|
|
|
|
end)
|
|
|
|
|> Enum.map(fn custom_index ->
|
|
|
|
%Operation.RemoveCustomIndex{
|
|
|
|
index: custom_index,
|
2022-05-14 09:41:30 +12:00
|
|
|
table: old_snapshot.table,
|
|
|
|
schema: old_snapshot.schema,
|
|
|
|
multitenancy: old_snapshot.multitenancy,
|
|
|
|
base_filter: old_snapshot.base_filter
|
2021-09-21 08:38:36 +12:00
|
|
|
}
|
|
|
|
end)
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
unique_indexes_to_remove =
|
2020-11-25 12:11:02 +13:00
|
|
|
if rewrite_all_identities? do
|
2024-01-13 04:11:16 +13:00
|
|
|
Enum.reject(old_snapshot.identities, & &1.all_tenants?)
|
2020-11-25 12:11:02 +13:00
|
|
|
else
|
|
|
|
Enum.reject(old_snapshot.identities, fn old_identity ->
|
|
|
|
Enum.find(snapshot.identities, fn identity ->
|
2021-04-28 09:16:56 +12:00
|
|
|
identity.name == old_identity.name &&
|
|
|
|
Enum.sort(old_identity.keys) == Enum.sort(identity.keys) &&
|
2024-01-13 04:11:16 +13:00
|
|
|
old_identity.base_filter == identity.base_filter &&
|
|
|
|
old_identity.all_tenants? == identity.all_tenants?
|
2020-11-25 12:11:02 +13:00
|
|
|
end)
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
2020-11-25 12:11:02 +13:00
|
|
|
end
|
2020-09-11 12:26:47 +12:00
|
|
|
|> Enum.map(fn identity ->
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.RemoveUniqueIndex{
|
|
|
|
identity: identity,
|
|
|
|
table: snapshot.table,
|
|
|
|
schema: snapshot.schema
|
|
|
|
}
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
|
2021-04-28 09:16:56 +12:00
|
|
|
unique_indexes_to_rename =
|
|
|
|
if rewrite_all_identities? do
|
2024-01-13 04:11:16 +13:00
|
|
|
snapshot.identities
|
|
|
|
|> Enum.filter(& &1.all_tenants?)
|
|
|
|
|> Enum.map(fn identity ->
|
|
|
|
Enum.find_value(old_snapshot.identities, fn old_identity ->
|
|
|
|
if old_identity.name == identity.name &&
|
|
|
|
old_identity.index_name != identity.index_name do
|
|
|
|
{old_identity, identity}
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
end)
|
|
|
|
|> Enum.filter(& &1)
|
2021-04-28 09:16:56 +12:00
|
|
|
else
|
|
|
|
snapshot.identities
|
|
|
|
|> Enum.map(fn identity ->
|
|
|
|
Enum.find_value(old_snapshot.identities, fn old_identity ->
|
|
|
|
if old_identity.name == identity.name &&
|
|
|
|
old_identity.index_name != identity.index_name do
|
|
|
|
{old_identity, identity}
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
end)
|
|
|
|
|> Enum.filter(& &1)
|
|
|
|
end
|
|
|
|
|> Enum.map(fn {old_identity, new_identity} ->
|
|
|
|
%Operation.RenameUniqueIndex{
|
|
|
|
old_identity: old_identity,
|
|
|
|
new_identity: new_identity,
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: snapshot.schema,
|
2021-04-28 09:16:56 +12:00
|
|
|
table: snapshot.table
|
|
|
|
}
|
|
|
|
end)
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
unique_indexes_to_add =
|
2020-11-25 12:11:02 +13:00
|
|
|
if rewrite_all_identities? do
|
|
|
|
snapshot.identities
|
2024-01-13 04:11:16 +13:00
|
|
|
|> Enum.reject(fn identity ->
|
|
|
|
if identity.all_tenants? do
|
|
|
|
Enum.find(old_snapshot.identities, fn old_identity ->
|
|
|
|
old_identity.name == identity.name &&
|
|
|
|
Enum.sort(old_identity.keys) == Enum.sort(identity.keys) &&
|
|
|
|
old_identity.base_filter == identity.base_filter &&
|
2024-05-24 17:14:55 +12:00
|
|
|
old_identity.all_tenants? == identity.all_tenants? &&
|
|
|
|
old_identity.nils_distinct? == identity.nils_distinct? &&
|
|
|
|
old_identity.where == identity.where
|
2024-01-13 04:11:16 +13:00
|
|
|
end)
|
|
|
|
else
|
|
|
|
false
|
|
|
|
end
|
|
|
|
end)
|
2020-11-25 12:11:02 +13:00
|
|
|
else
|
|
|
|
Enum.reject(snapshot.identities, fn identity ->
|
|
|
|
Enum.find(old_snapshot.identities, fn old_identity ->
|
2021-04-28 09:16:56 +12:00
|
|
|
old_identity.name == identity.name &&
|
|
|
|
Enum.sort(old_identity.keys) == Enum.sort(identity.keys) &&
|
2024-01-13 04:11:16 +13:00
|
|
|
old_identity.base_filter == identity.base_filter &&
|
2024-05-24 17:14:55 +12:00
|
|
|
old_identity.all_tenants? == identity.all_tenants? &&
|
|
|
|
old_identity.nils_distinct? == identity.nils_distinct? &&
|
|
|
|
old_identity.where == identity.where
|
2020-11-25 12:11:02 +13:00
|
|
|
end)
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
2020-11-25 12:11:02 +13:00
|
|
|
end
|
2020-09-11 12:26:47 +12:00
|
|
|
|> Enum.map(fn identity ->
|
2020-09-20 10:08:09 +12:00
|
|
|
%Operation.AddUniqueIndex{
|
|
|
|
identity: identity,
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: snapshot.schema,
|
2020-09-20 10:08:09 +12:00
|
|
|
table: snapshot.table
|
|
|
|
}
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
|
2021-04-20 06:26:41 +12:00
|
|
|
constraints_to_add =
|
|
|
|
snapshot.check_constraints
|
|
|
|
|> Enum.reject(fn constraint ->
|
|
|
|
Enum.find(old_snapshot.check_constraints, fn old_constraint ->
|
|
|
|
old_constraint.check == constraint.check && old_constraint.name == constraint.name
|
|
|
|
end)
|
|
|
|
end)
|
|
|
|
|> Enum.map(fn constraint ->
|
|
|
|
%Operation.AddCheckConstraint{
|
|
|
|
constraint: constraint,
|
2022-05-14 09:41:30 +12:00
|
|
|
table: snapshot.table,
|
|
|
|
schema: snapshot.schema
|
2021-04-20 06:26:41 +12:00
|
|
|
}
|
|
|
|
end)
|
|
|
|
|
|
|
|
constraints_to_remove =
|
|
|
|
old_snapshot.check_constraints
|
|
|
|
|> Enum.reject(fn old_constraint ->
|
|
|
|
Enum.find(snapshot.check_constraints, fn constraint ->
|
|
|
|
old_constraint.check == constraint.check && old_constraint.name == constraint.name
|
|
|
|
end)
|
|
|
|
end)
|
|
|
|
|> Enum.map(fn old_constraint ->
|
|
|
|
%Operation.RemoveCheckConstraint{
|
|
|
|
constraint: old_constraint,
|
2022-05-14 09:41:30 +12:00
|
|
|
table: old_snapshot.table,
|
|
|
|
schema: old_snapshot.schema
|
2021-04-20 06:26:41 +12:00
|
|
|
}
|
|
|
|
end)
|
|
|
|
|
|
|
|
[
|
2023-04-12 09:41:53 +12:00
|
|
|
pkey_operations,
|
2021-04-20 06:26:41 +12:00
|
|
|
unique_indexes_to_remove,
|
|
|
|
attribute_operations,
|
|
|
|
unique_indexes_to_add,
|
2021-04-28 09:16:56 +12:00
|
|
|
unique_indexes_to_rename,
|
2021-04-20 06:26:41 +12:00
|
|
|
constraints_to_remove,
|
2022-07-22 16:45:49 +12:00
|
|
|
constraints_to_add,
|
2021-09-21 08:38:36 +12:00
|
|
|
custom_indexes_to_add,
|
|
|
|
custom_indexes_to_remove,
|
2022-07-22 05:34:38 +12:00
|
|
|
custom_statements_to_add,
|
|
|
|
custom_statements_to_remove,
|
|
|
|
custom_statements_to_alter,
|
2021-04-20 06:26:41 +12:00
|
|
|
acc
|
|
|
|
]
|
2020-10-29 15:26:45 +13:00
|
|
|
|> Enum.concat()
|
|
|
|
|> Enum.map(&Map.put(&1, :multitenancy, snapshot.multitenancy))
|
|
|
|
|> Enum.map(&Map.put(&1, :old_multitenancy, old_snapshot.multitenancy))
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
2023-06-06 17:33:50 +12:00
|
|
|
defp indexes_match?(table, left, right) do
|
2022-11-26 08:06:22 +13:00
|
|
|
left =
|
2023-06-06 17:33:50 +12:00
|
|
|
left
|
|
|
|
|> Map.update!(:fields, fn fields ->
|
2022-11-26 08:06:22 +13:00
|
|
|
Enum.map(fields, &to_string/1)
|
|
|
|
end)
|
2023-06-06 17:33:50 +12:00
|
|
|
|> add_custom_index_name(table)
|
2024-01-28 04:56:11 +13:00
|
|
|
|> Map.delete(:error_fields)
|
2022-11-26 08:06:22 +13:00
|
|
|
|
|
|
|
right =
|
2023-06-06 17:33:50 +12:00
|
|
|
right
|
|
|
|
|> Map.update!(:fields, fn fields ->
|
2022-11-26 08:06:22 +13:00
|
|
|
Enum.map(fields, &to_string/1)
|
|
|
|
end)
|
2023-06-06 17:33:50 +12:00
|
|
|
|> add_custom_index_name(table)
|
2024-01-28 04:56:11 +13:00
|
|
|
|> Map.delete(:error_fields)
|
2022-11-26 08:06:22 +13:00
|
|
|
|
|
|
|
left == right
|
|
|
|
end
|
|
|
|
|
2023-06-06 17:33:50 +12:00
|
|
|
defp add_custom_index_name(custom_index, table) do
|
|
|
|
custom_index
|
|
|
|
|> Map.put_new_lazy(:name, fn ->
|
|
|
|
AshPostgres.CustomIndex.name(table, %{fields: custom_index.fields})
|
|
|
|
end)
|
|
|
|
|> Map.update!(
|
|
|
|
:name,
|
|
|
|
&(&1 || AshPostgres.CustomIndex.name(table, %{fields: custom_index.fields}))
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2024-02-05 04:12:39 +13:00
|
|
|
defp pkey_operations(snapshot, old_snapshot, attribute_operations, opts) do
|
2023-04-12 09:41:53 +12:00
|
|
|
if old_snapshot[:empty?] do
|
|
|
|
[]
|
|
|
|
else
|
|
|
|
must_drop_pkey? =
|
|
|
|
Enum.any?(
|
|
|
|
attribute_operations,
|
|
|
|
fn
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
old_attribute: %{primary_key?: old_primary_key},
|
|
|
|
new_attribute: %{primary_key?: new_primary_key}
|
|
|
|
}
|
|
|
|
when old_primary_key != new_primary_key ->
|
|
|
|
true
|
|
|
|
|
|
|
|
%Operation.AddAttribute{
|
|
|
|
attribute: %{primary_key?: true}
|
|
|
|
} ->
|
|
|
|
true
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
false
|
|
|
|
end
|
|
|
|
)
|
|
|
|
|
2024-02-05 04:12:39 +13:00
|
|
|
drop_in_down? =
|
|
|
|
Enum.any?(attribute_operations, fn
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
new_attribute: %{primary_key?: true}
|
|
|
|
} ->
|
|
|
|
true
|
|
|
|
|
|
|
|
%Operation.AddAttribute{
|
|
|
|
attribute: %{primary_key?: true}
|
|
|
|
} ->
|
|
|
|
true
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
false
|
|
|
|
end)
|
|
|
|
|
|
|
|
drop_in_down_commented? =
|
|
|
|
Enum.any?(attribute_operations, fn
|
|
|
|
%Operation.RemoveAttribute{
|
|
|
|
commented?: true,
|
|
|
|
attribute: %{primary_key?: true}
|
|
|
|
} ->
|
|
|
|
true
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
false
|
|
|
|
end)
|
|
|
|
|
|
|
|
[
|
|
|
|
must_drop_pkey? &&
|
2023-04-12 09:41:53 +12:00
|
|
|
%Operation.RemovePrimaryKey{schema: snapshot.schema, table: snapshot.table},
|
2024-02-05 04:12:39 +13:00
|
|
|
must_drop_pkey? && drop_in_down? &&
|
|
|
|
%Operation.RemovePrimaryKeyDown{
|
|
|
|
commented?: !opts.drop_columns && drop_in_down_commented?,
|
|
|
|
schema: snapshot.schema,
|
|
|
|
table: snapshot.table
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|> Enum.filter(& &1)
|
2023-04-12 09:41:53 +12:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-11-20 16:09:26 +13:00
|
|
|
defp attribute_operations(snapshot, old_snapshot, opts) do
|
2020-09-11 12:26:47 +12:00
|
|
|
attributes_to_add =
|
|
|
|
Enum.reject(snapshot.attributes, fn attribute ->
|
2022-02-15 05:39:50 +13:00
|
|
|
Enum.find(old_snapshot.attributes, &(&1.source == attribute.source))
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
|
|
|
|
attributes_to_remove =
|
|
|
|
Enum.reject(old_snapshot.attributes, fn attribute ->
|
2022-02-15 05:39:50 +13:00
|
|
|
Enum.find(snapshot.attributes, &(&1.source == attribute.source))
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
|
|
|
|
{attributes_to_add, attributes_to_remove, attributes_to_rename} =
|
2021-04-05 08:09:11 +12:00
|
|
|
resolve_renames(snapshot.table, attributes_to_add, attributes_to_remove, opts)
|
2020-09-11 12:26:47 +12:00
|
|
|
|
|
|
|
attributes_to_alter =
|
|
|
|
snapshot.attributes
|
|
|
|
|> Enum.map(fn attribute ->
|
|
|
|
{attribute,
|
2022-02-15 11:44:17 +13:00
|
|
|
Enum.find(
|
|
|
|
old_snapshot.attributes,
|
2024-02-16 03:23:48 +13:00
|
|
|
fn old_attribute ->
|
|
|
|
source_match =
|
|
|
|
Enum.find_value(attributes_to_rename, old_attribute.source, fn {new, old} ->
|
|
|
|
if old.source == old_attribute.source do
|
|
|
|
new.source
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
|
|
|
|
source_match ==
|
|
|
|
attribute.source &&
|
|
|
|
attributes_unequal?(
|
|
|
|
old_attribute,
|
|
|
|
attribute,
|
|
|
|
snapshot.repo,
|
|
|
|
old_snapshot,
|
|
|
|
snapshot
|
|
|
|
)
|
|
|
|
end
|
2022-02-15 11:44:17 +13:00
|
|
|
)}
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
|> Enum.filter(&elem(&1, 1))
|
|
|
|
|
|
|
|
rename_attribute_events =
|
|
|
|
Enum.map(attributes_to_rename, fn {new, old} ->
|
2022-05-14 09:41:30 +12:00
|
|
|
%Operation.RenameAttribute{
|
|
|
|
new_attribute: new,
|
|
|
|
old_attribute: old,
|
|
|
|
table: snapshot.table,
|
|
|
|
schema: snapshot.schema
|
|
|
|
}
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
|
|
|
|
add_attribute_events =
|
|
|
|
Enum.flat_map(attributes_to_add, fn attribute ->
|
|
|
|
if attribute.references do
|
2023-07-15 08:24:57 +12:00
|
|
|
reference_ops =
|
|
|
|
if attribute.references.deferrable do
|
|
|
|
[
|
|
|
|
%Operation.AlterDeferrability{
|
|
|
|
table: snapshot.table,
|
|
|
|
schema: snapshot.schema,
|
|
|
|
references: attribute.references,
|
|
|
|
direction: :up
|
|
|
|
},
|
|
|
|
%Operation.AlterDeferrability{
|
|
|
|
table: snapshot.table,
|
|
|
|
schema: snapshot.schema,
|
|
|
|
references: Map.get(attribute, :references),
|
|
|
|
direction: :down
|
|
|
|
}
|
|
|
|
]
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
[
|
|
|
|
%Operation.AddAttribute{
|
|
|
|
attribute: Map.delete(attribute, :references),
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: snapshot.schema,
|
2020-09-11 12:26:47 +12:00
|
|
|
table: snapshot.table
|
|
|
|
},
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
old_attribute: Map.delete(attribute, :references),
|
|
|
|
new_attribute: attribute,
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: snapshot.schema,
|
2020-09-11 12:26:47 +12:00
|
|
|
table: snapshot.table
|
2021-07-09 05:35:16 +12:00
|
|
|
},
|
|
|
|
%Operation.DropForeignKey{
|
|
|
|
attribute: attribute,
|
|
|
|
table: snapshot.table,
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: snapshot.schema,
|
2021-07-09 05:35:16 +12:00
|
|
|
multitenancy: Map.get(attribute, :multitenancy),
|
|
|
|
direction: :down
|
2020-09-11 12:26:47 +12:00
|
|
|
}
|
2023-07-15 08:24:57 +12:00
|
|
|
] ++ reference_ops
|
2020-09-11 12:26:47 +12:00
|
|
|
else
|
|
|
|
[
|
|
|
|
%Operation.AddAttribute{
|
|
|
|
attribute: attribute,
|
2022-05-14 09:41:30 +12:00
|
|
|
table: snapshot.table,
|
|
|
|
schema: snapshot.schema
|
2020-09-11 12:26:47 +12:00
|
|
|
}
|
|
|
|
]
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
|
|
|
|
alter_attribute_events =
|
2020-11-18 12:35:57 +13:00
|
|
|
Enum.flat_map(attributes_to_alter, fn {new_attribute, old_attribute} ->
|
2023-07-15 08:24:57 +12:00
|
|
|
deferrable_ops =
|
|
|
|
if differently_deferrable?(new_attribute, old_attribute) do
|
|
|
|
[
|
|
|
|
%Operation.AlterDeferrability{
|
|
|
|
table: snapshot.table,
|
|
|
|
schema: snapshot.schema,
|
|
|
|
references: new_attribute.references,
|
|
|
|
direction: :up
|
|
|
|
},
|
|
|
|
%Operation.AlterDeferrability{
|
|
|
|
table: snapshot.table,
|
|
|
|
schema: snapshot.schema,
|
|
|
|
references: Map.get(old_attribute, :references),
|
|
|
|
direction: :down
|
|
|
|
}
|
|
|
|
]
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
2020-11-18 12:35:57 +13:00
|
|
|
if has_reference?(old_snapshot.multitenancy, old_attribute) and
|
|
|
|
Map.get(old_attribute, :references) != Map.get(new_attribute, :references) do
|
2023-07-28 05:57:42 +12:00
|
|
|
redo_deferrability =
|
|
|
|
if differently_deferrable?(new_attribute, old_attribute) do
|
|
|
|
[]
|
|
|
|
else
|
|
|
|
[
|
|
|
|
%Operation.AlterDeferrability{
|
|
|
|
table: snapshot.table,
|
|
|
|
schema: snapshot.schema,
|
|
|
|
references: new_attribute.references,
|
|
|
|
direction: :up
|
|
|
|
}
|
|
|
|
]
|
|
|
|
end
|
|
|
|
|
|
|
|
old_and_alter =
|
|
|
|
[
|
|
|
|
%Operation.DropForeignKey{
|
|
|
|
attribute: old_attribute,
|
|
|
|
table: snapshot.table,
|
|
|
|
schema: snapshot.schema,
|
|
|
|
multitenancy: old_snapshot.multitenancy,
|
|
|
|
direction: :up
|
|
|
|
},
|
|
|
|
%Operation.AlterAttribute{
|
|
|
|
new_attribute: new_attribute,
|
|
|
|
old_attribute: old_attribute,
|
|
|
|
schema: snapshot.schema,
|
|
|
|
table: snapshot.table
|
|
|
|
}
|
|
|
|
] ++ redo_deferrability
|
2022-02-19 16:30:12 +13:00
|
|
|
|
|
|
|
if has_reference?(snapshot.multitenancy, new_attribute) do
|
2023-07-15 08:24:57 +12:00
|
|
|
reference_ops = [
|
|
|
|
%Operation.DropForeignKey{
|
|
|
|
attribute: new_attribute,
|
|
|
|
table: snapshot.table,
|
|
|
|
schema: snapshot.schema,
|
|
|
|
multitenancy: snapshot.multitenancy,
|
|
|
|
direction: :down
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
2022-02-19 16:30:12 +13:00
|
|
|
old_and_alter ++
|
2023-07-15 08:24:57 +12:00
|
|
|
reference_ops
|
2022-02-19 16:30:12 +13:00
|
|
|
else
|
|
|
|
old_and_alter
|
|
|
|
end
|
2020-11-18 12:35:57 +13:00
|
|
|
else
|
|
|
|
[
|
|
|
|
%Operation.AlterAttribute{
|
2021-04-14 04:19:50 +12:00
|
|
|
new_attribute: Map.delete(new_attribute, :references),
|
|
|
|
old_attribute: Map.delete(old_attribute, :references),
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: snapshot.schema,
|
2020-11-18 12:35:57 +13:00
|
|
|
table: snapshot.table
|
|
|
|
}
|
|
|
|
]
|
|
|
|
end
|
2023-07-15 08:24:57 +12:00
|
|
|
|> Enum.concat(deferrable_ops)
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
|
|
|
|
remove_attribute_events =
|
|
|
|
Enum.map(attributes_to_remove, fn attribute ->
|
2020-11-20 16:09:26 +13:00
|
|
|
%Operation.RemoveAttribute{
|
|
|
|
attribute: attribute,
|
|
|
|
table: snapshot.table,
|
2022-05-14 09:41:30 +12:00
|
|
|
schema: snapshot.schema,
|
2020-11-20 16:09:26 +13:00
|
|
|
commented?: !opts.drop_columns
|
|
|
|
}
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
|
|
|
|
add_attribute_events ++
|
|
|
|
alter_attribute_events ++ remove_attribute_events ++ rename_attribute_events
|
|
|
|
end
|
|
|
|
|
2023-07-15 08:24:57 +12:00
|
|
|
defp differently_deferrable?(%{references: %{deferrable: left}}, %{
|
|
|
|
references: %{deferrable: right}
|
|
|
|
})
|
|
|
|
when left != right do
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
|
|
|
defp differently_deferrable?(%{references: %{deferrable: same}}, %{
|
|
|
|
references: %{deferrable: same}
|
|
|
|
}) do
|
|
|
|
false
|
|
|
|
end
|
|
|
|
|
|
|
|
defp differently_deferrable?(%{references: %{deferrable: left}}, _) when left != false, do: true
|
|
|
|
|
|
|
|
defp differently_deferrable?(_, %{references: %{deferrable: right}}) when right != false,
|
|
|
|
do: true
|
|
|
|
|
|
|
|
defp differently_deferrable?(_, _), do: false
|
|
|
|
|
2022-02-15 11:44:17 +13:00
|
|
|
# This exists to handle the fact that the remapping of the key name -> source caused attributes
|
|
|
|
# to be considered unequal. We ignore things that only differ in that way using this function.
|
2023-04-12 09:41:53 +12:00
|
|
|
defp attributes_unequal?(left, right, repo, _old_snapshot, _new_snapshot) do
|
|
|
|
left = clean_for_equality(left, repo)
|
2022-02-15 11:44:17 +13:00
|
|
|
|
2023-04-12 09:41:53 +12:00
|
|
|
right = clean_for_equality(right, repo)
|
2022-02-15 11:44:17 +13:00
|
|
|
|
2022-11-21 20:42:26 +13:00
|
|
|
left != right
|
2022-02-15 11:44:17 +13:00
|
|
|
end
|
|
|
|
|
2023-04-12 09:41:53 +12:00
|
|
|
defp clean_for_equality(attribute, repo) do
|
2022-02-15 11:44:17 +13:00
|
|
|
cond do
|
|
|
|
attribute[:source] ->
|
|
|
|
Map.put(attribute, :name, attribute[:source])
|
|
|
|
|> Map.update!(:source, &to_string/1)
|
|
|
|
|> Map.update!(:name, &to_string/1)
|
|
|
|
|
|
|
|
attribute[:name] ->
|
|
|
|
attribute
|
|
|
|
|> Map.put(:source, attribute[:name])
|
|
|
|
|> Map.update!(:source, &to_string/1)
|
|
|
|
|> Map.update!(:name, &to_string/1)
|
|
|
|
|
|
|
|
true ->
|
|
|
|
attribute
|
|
|
|
end
|
2022-05-24 16:38:13 +12:00
|
|
|
|> add_schema(repo)
|
2022-05-24 18:11:54 +12:00
|
|
|
|> add_ignore()
|
2023-04-12 09:41:53 +12:00
|
|
|
|> then(fn
|
|
|
|
# only :integer cares about `destination_attribute_generated`
|
|
|
|
# so we clean it here to avoid generating unnecessary snapshots
|
|
|
|
# during the transitionary period of adding it
|
|
|
|
%{type: type, references: references} = attribute
|
|
|
|
when not is_nil(references) and type != :integer ->
|
|
|
|
Map.update!(attribute, :references, &Map.delete(&1, :destination_attribute_generated))
|
|
|
|
|
|
|
|
attribute ->
|
|
|
|
attribute
|
|
|
|
end)
|
2022-05-24 18:11:54 +12:00
|
|
|
end
|
|
|
|
|
|
|
|
defp add_ignore(%{references: references} = attribute) when is_map(references) do
|
|
|
|
%{attribute | references: Map.put_new(references, :ignore?, false)}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp add_ignore(attribute) do
|
|
|
|
attribute
|
2022-05-24 16:38:13 +12:00
|
|
|
end
|
|
|
|
|
|
|
|
defp add_schema(%{references: references} = attribute, repo) when is_map(references) do
|
|
|
|
schema = Map.get(references, :schema) || repo.config()[:default_prefix] || "public"
|
|
|
|
|
|
|
|
%{
|
|
|
|
attribute
|
|
|
|
| references: Map.put(references, :schema, schema)
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp add_schema(attribute, _) do
|
|
|
|
attribute
|
2022-02-15 11:44:17 +13:00
|
|
|
end
|
|
|
|
|
2020-11-25 12:11:02 +13:00
|
|
|
def changing_multitenancy_affects_identities?(snapshot, old_snapshot) do
|
2021-09-21 08:38:36 +12:00
|
|
|
snapshot.multitenancy != old_snapshot.multitenancy ||
|
|
|
|
snapshot.base_filter != old_snapshot.base_filter
|
2020-11-25 12:11:02 +13:00
|
|
|
end
|
|
|
|
|
2020-11-18 12:35:57 +13:00
|
|
|
def has_reference?(multitenancy, attribute) do
|
|
|
|
not is_nil(Map.get(attribute, :references)) and
|
|
|
|
!(attribute.references.multitenancy &&
|
|
|
|
attribute.references.multitenancy.strategy == :context &&
|
|
|
|
(is_nil(multitenancy) || multitenancy.strategy == :attribute))
|
|
|
|
end
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
def get_existing_snapshot(snapshot, opts) do
|
|
|
|
repo_name = snapshot.repo |> Module.split() |> List.last() |> Macro.underscore()
|
2020-10-29 15:26:45 +13:00
|
|
|
|
|
|
|
folder =
|
|
|
|
if snapshot.multitenancy.strategy == :context do
|
2022-12-14 07:31:57 +13:00
|
|
|
opts
|
|
|
|
|> snapshot_path(snapshot.repo)
|
2020-10-29 15:26:45 +13:00
|
|
|
|> Path.join(repo_name)
|
|
|
|
|> Path.join("tenants")
|
|
|
|
else
|
2022-12-14 07:31:57 +13:00
|
|
|
opts
|
|
|
|
|> snapshot_path(snapshot.repo)
|
2021-01-07 18:37:41 +13:00
|
|
|
|> Path.join(repo_name)
|
2020-10-29 15:26:45 +13:00
|
|
|
end
|
|
|
|
|
2024-01-17 17:44:49 +13:00
|
|
|
snapshot_folder =
|
|
|
|
if snapshot.schema do
|
|
|
|
schema_dir = Path.join(folder, "#{snapshot.schema}.#{snapshot.table}")
|
|
|
|
|
|
|
|
if File.dir?(schema_dir) do
|
|
|
|
schema_dir
|
|
|
|
else
|
|
|
|
Path.join(folder, snapshot.table)
|
|
|
|
end
|
|
|
|
else
|
|
|
|
Path.join(folder, snapshot.table)
|
|
|
|
end
|
2021-01-07 18:37:41 +13:00
|
|
|
|
|
|
|
if File.exists?(snapshot_folder) do
|
|
|
|
snapshot_folder
|
|
|
|
|> File.ls!()
|
|
|
|
|> Enum.filter(&String.ends_with?(&1, ".json"))
|
|
|
|
|> Enum.map(&String.trim_trailing(&1, ".json"))
|
|
|
|
|> Enum.map(&Integer.parse/1)
|
2021-01-13 08:21:44 +13:00
|
|
|
|> Enum.filter(fn
|
|
|
|
{_int, remaining} ->
|
|
|
|
remaining == ""
|
|
|
|
|
|
|
|
:error ->
|
|
|
|
false
|
|
|
|
end)
|
2021-01-07 18:37:41 +13:00
|
|
|
|> Enum.map(&elem(&1, 0))
|
|
|
|
|> case do
|
|
|
|
[] ->
|
|
|
|
get_old_snapshot(folder, snapshot)
|
|
|
|
|
|
|
|
timestamps ->
|
|
|
|
timestamp = Enum.max(timestamps)
|
|
|
|
snapshot_file = Path.join(snapshot_folder, "#{timestamp}.json")
|
|
|
|
|
|
|
|
snapshot_file
|
|
|
|
|> File.read!()
|
2022-05-24 16:38:13 +12:00
|
|
|
|> load_snapshot()
|
2021-01-07 18:37:41 +13:00
|
|
|
end
|
|
|
|
else
|
|
|
|
get_old_snapshot(folder, snapshot)
|
|
|
|
end
|
|
|
|
end
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2021-01-07 18:37:41 +13:00
|
|
|
defp get_old_snapshot(folder, snapshot) do
|
2024-01-17 17:44:49 +13:00
|
|
|
schema_file =
|
|
|
|
if snapshot.schema do
|
|
|
|
old_snapshot_file = Path.join(folder, "#{snapshot.schema}.#{snapshot.table}.json")
|
|
|
|
|
|
|
|
if File.exists?(old_snapshot_file) do
|
|
|
|
old_snapshot_file
|
|
|
|
|> File.read!()
|
|
|
|
|> load_snapshot()
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if schema_file do
|
|
|
|
schema_file
|
|
|
|
else
|
|
|
|
old_snapshot_file = Path.join(folder, "#{snapshot.table}.json")
|
|
|
|
# This is adapter code for the old version, where migrations were stored in a flat directory
|
|
|
|
if File.exists?(old_snapshot_file) do
|
|
|
|
old_snapshot_file
|
|
|
|
|> File.read!()
|
|
|
|
|> load_snapshot()
|
|
|
|
end
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
defp resolve_renames(_table, adding, [], _opts), do: {adding, [], []}
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
defp resolve_renames(_table, [], removing, _opts), do: {[], removing, []}
|
2020-11-20 16:09:26 +13:00
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
defp resolve_renames(table, [adding], [removing], opts) do
|
2022-02-15 05:39:50 +13:00
|
|
|
if renaming_to?(table, removing.source, adding.source, opts) do
|
2020-09-11 12:26:47 +12:00
|
|
|
{[], [], [{adding, removing}]}
|
|
|
|
else
|
|
|
|
{[adding], [removing], []}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
defp resolve_renames(table, adding, [removing | rest], opts) do
|
2020-09-11 12:26:47 +12:00
|
|
|
{new_adding, new_removing, new_renames} =
|
2021-04-05 08:09:11 +12:00
|
|
|
if renaming?(table, removing, opts) do
|
|
|
|
new_attribute =
|
|
|
|
if opts.no_shell? do
|
|
|
|
raise "Unimplemented: Cannot get new_attribute without the shell!"
|
|
|
|
else
|
2024-04-03 16:03:57 +13:00
|
|
|
get_new_attribute(adding, opts)
|
2021-04-05 08:09:11 +12:00
|
|
|
end
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2024-02-16 03:23:48 +13:00
|
|
|
{Enum.reject(adding, &(&1.source == new_attribute.source)), [],
|
|
|
|
[{new_attribute, removing}]}
|
2020-09-11 12:26:47 +12:00
|
|
|
else
|
|
|
|
{adding, [removing], []}
|
|
|
|
end
|
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
{rest_adding, rest_removing, rest_renames} = resolve_renames(table, new_adding, rest, opts)
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2024-05-07 01:02:34 +12:00
|
|
|
{rest_adding, new_removing ++ rest_removing, rest_renames ++ new_renames}
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
2021-04-05 08:09:11 +12:00
|
|
|
defp renaming_to?(table, removing, adding, opts) do
|
|
|
|
if opts.no_shell? do
|
2021-09-14 04:58:23 +12:00
|
|
|
raise "Unimplemented: cannot determine: Are you renaming #{table}.#{removing} to #{table}.#{adding}? without shell input"
|
2021-04-05 08:09:11 +12:00
|
|
|
else
|
2024-04-03 16:03:57 +13:00
|
|
|
yes?(opts, "Are you renaming #{table}.#{removing} to #{table}.#{adding}?")
|
2021-04-05 08:09:11 +12:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp renaming?(table, removing, opts) do
|
|
|
|
if opts.no_shell? do
|
2022-02-15 05:39:50 +13:00
|
|
|
raise "Unimplemented: cannot determine: Are you renaming #{table}.#{removing.source}? without shell input"
|
2021-04-05 08:09:11 +12:00
|
|
|
else
|
2024-04-03 16:03:57 +13:00
|
|
|
yes?(opts, "Are you renaming #{table}.#{removing.source}?")
|
2021-04-05 08:09:11 +12:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-04-03 16:03:57 +13:00
|
|
|
defp get_new_attribute(adding, opts, tries \\ 3)
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2024-04-03 16:03:57 +13:00
|
|
|
defp get_new_attribute(_adding, _opts, 0) do
|
2020-09-11 12:26:47 +12:00
|
|
|
raise "Could not get matching name after 3 attempts."
|
|
|
|
end
|
|
|
|
|
2024-04-03 16:03:57 +13:00
|
|
|
defp get_new_attribute(adding, opts, tries) do
|
2020-09-11 12:26:47 +12:00
|
|
|
name =
|
2024-04-03 16:03:57 +13:00
|
|
|
prompt(
|
|
|
|
opts,
|
2022-02-15 05:39:50 +13:00
|
|
|
"What are you renaming it to?: #{Enum.map_join(adding, ", ", & &1.source)}"
|
2020-09-11 12:26:47 +12:00
|
|
|
)
|
|
|
|
|
2021-02-09 09:29:52 +13:00
|
|
|
name =
|
|
|
|
if name do
|
|
|
|
String.trim(name)
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
|
2022-02-15 05:39:50 +13:00
|
|
|
case Enum.find(adding, &(to_string(&1.source) == name)) do
|
2024-04-03 16:03:57 +13:00
|
|
|
nil -> get_new_attribute(adding, opts, tries - 1)
|
2020-09-11 12:26:47 +12:00
|
|
|
new_attribute -> new_attribute
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-03 06:33:24 +13:00
|
|
|
defp timestamp(require_unique? \\ false) do
|
|
|
|
# Alright, this is silly I know. But migration ids need to be unique
|
|
|
|
# and "synthesizing" that behavior is significantly more annoying than
|
|
|
|
# just waiting a bit, ensuring the migration versions are unique.
|
|
|
|
if require_unique?, do: :timer.sleep(1500)
|
2020-09-11 12:26:47 +12:00
|
|
|
{{y, m, d}, {hh, mm, ss}} = :calendar.universal_time()
|
|
|
|
"#{y}#{pad(m)}#{pad(d)}#{pad(hh)}#{pad(mm)}#{pad(ss)}"
|
|
|
|
end
|
|
|
|
|
|
|
|
defp pad(i) when i < 10, do: <<?0, ?0 + i>>
|
|
|
|
defp pad(i), do: to_string(i)
|
|
|
|
|
2021-02-01 10:39:59 +13:00
|
|
|
def get_snapshots(resource, all_resources) do
|
2023-11-15 04:56:22 +13:00
|
|
|
Code.ensure_compiled!(AshPostgres.DataLayer.Info.repo(resource, :mutate))
|
2022-02-08 10:48:36 +13:00
|
|
|
|
2022-08-24 11:56:46 +12:00
|
|
|
if AshPostgres.DataLayer.Info.polymorphic?(resource) do
|
2021-02-01 10:39:59 +13:00
|
|
|
all_resources
|
2021-02-23 17:53:18 +13:00
|
|
|
|> Enum.flat_map(&Ash.Resource.Info.relationships/1)
|
2021-02-01 10:39:59 +13:00
|
|
|
|> Enum.filter(&(&1.destination == resource))
|
2021-02-07 10:03:06 +13:00
|
|
|
|> Enum.reject(&(&1.type == :belongs_to))
|
2021-02-01 10:39:59 +13:00
|
|
|
|> Enum.filter(& &1.context[:data_layer][:table])
|
2021-09-22 07:10:25 +12:00
|
|
|
|> Enum.uniq()
|
2021-02-01 10:39:59 +13:00
|
|
|
|> Enum.map(fn relationship ->
|
|
|
|
resource
|
2022-05-14 09:41:30 +12:00
|
|
|
|> do_snapshot(
|
|
|
|
relationship.context[:data_layer][:table],
|
|
|
|
relationship.context[:data_layer][:schema]
|
|
|
|
)
|
2021-09-22 05:41:58 +12:00
|
|
|
|> Map.update!(:identities, fn identities ->
|
2022-08-24 11:56:46 +12:00
|
|
|
identity_index_names = AshPostgres.DataLayer.Info.identity_index_names(resource)
|
2021-09-22 05:41:58 +12:00
|
|
|
|
|
|
|
Enum.map(identities, fn identity ->
|
|
|
|
Map.put(
|
|
|
|
identity,
|
|
|
|
:index_name,
|
|
|
|
identity_index_names[identity.name] ||
|
|
|
|
"#{relationship.context[:data_layer][:table]}_#{identity.name}_index"
|
|
|
|
)
|
|
|
|
end)
|
|
|
|
end)
|
2021-02-01 10:39:59 +13:00
|
|
|
|> Map.update!(:attributes, fn attributes ->
|
|
|
|
Enum.map(attributes, fn attribute ->
|
2022-08-19 06:56:36 +12:00
|
|
|
destination_attribute_source =
|
2022-02-15 05:39:50 +13:00
|
|
|
relationship.destination
|
2022-08-19 06:56:36 +12:00
|
|
|
|> Ash.Resource.Info.attribute(relationship.destination_attribute)
|
2022-02-15 05:39:50 +13:00
|
|
|
|> Map.get(:source)
|
|
|
|
|
2022-08-19 06:56:36 +12:00
|
|
|
if attribute.source == destination_attribute_source do
|
2021-04-20 09:28:00 +12:00
|
|
|
source_attribute =
|
2022-08-19 06:56:36 +12:00
|
|
|
Ash.Resource.Info.attribute(relationship.source, relationship.source_attribute)
|
2021-04-20 09:28:00 +12:00
|
|
|
|
2021-02-01 10:39:59 +13:00
|
|
|
Map.put(attribute, :references, %{
|
2022-08-19 06:56:36 +12:00
|
|
|
destination_attribute: source_attribute.source,
|
|
|
|
destination_attribute_default:
|
2022-08-24 11:56:46 +12:00
|
|
|
default(
|
|
|
|
source_attribute,
|
2022-11-21 20:38:00 +13:00
|
|
|
relationship.destination,
|
2023-11-15 04:56:22 +13:00
|
|
|
AshPostgres.DataLayer.Info.repo(relationship.destination, :mutate)
|
2022-08-24 11:56:46 +12:00
|
|
|
),
|
2023-07-15 08:24:57 +12:00
|
|
|
deferrable: false,
|
2022-08-19 06:56:36 +12:00
|
|
|
destination_attribute_generated: source_attribute.generated?,
|
2021-02-01 10:39:59 +13:00
|
|
|
multitenancy: multitenancy(relationship.source),
|
2022-08-24 11:56:46 +12:00
|
|
|
table: AshPostgres.DataLayer.Info.table(relationship.source),
|
|
|
|
schema: AshPostgres.DataLayer.Info.schema(relationship.source),
|
|
|
|
on_delete: AshPostgres.DataLayer.Info.polymorphic_on_delete(relationship.source),
|
|
|
|
on_update: AshPostgres.DataLayer.Info.polymorphic_on_update(relationship.source),
|
2023-04-12 09:41:53 +12:00
|
|
|
primary_key?: source_attribute.primary_key?,
|
2021-04-01 19:19:30 +13:00
|
|
|
name:
|
2022-08-24 11:56:46 +12:00
|
|
|
AshPostgres.DataLayer.Info.polymorphic_name(relationship.source) ||
|
2022-08-19 06:56:36 +12:00
|
|
|
"#{relationship.context[:data_layer][:table]}_#{destination_attribute_source}_fkey"
|
2021-02-01 10:39:59 +13:00
|
|
|
})
|
|
|
|
else
|
|
|
|
attribute
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
end)
|
|
|
|
end)
|
|
|
|
else
|
2022-08-24 11:56:46 +12:00
|
|
|
[do_snapshot(resource, AshPostgres.DataLayer.Info.table(resource))]
|
2021-02-01 10:39:59 +13:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-05-14 09:41:30 +12:00
|
|
|
defp do_snapshot(resource, table, schema \\ nil) do
|
2020-09-11 12:26:47 +12:00
|
|
|
snapshot = %{
|
2021-09-22 07:10:25 +12:00
|
|
|
attributes: attributes(resource, table),
|
2020-09-11 12:26:47 +12:00
|
|
|
identities: identities(resource),
|
2022-08-24 11:56:46 +12:00
|
|
|
table: table || AshPostgres.DataLayer.Info.table(resource),
|
|
|
|
schema: schema || AshPostgres.DataLayer.Info.schema(resource),
|
2021-04-20 06:26:41 +12:00
|
|
|
check_constraints: check_constraints(resource),
|
2021-09-21 08:38:36 +12:00
|
|
|
custom_indexes: custom_indexes(resource),
|
2022-07-22 05:34:38 +12:00
|
|
|
custom_statements: custom_statements(resource),
|
2023-11-15 04:56:22 +13:00
|
|
|
repo: AshPostgres.DataLayer.Info.repo(resource, :mutate),
|
2020-10-29 15:26:45 +13:00
|
|
|
multitenancy: multitenancy(resource),
|
2022-08-24 11:56:46 +12:00
|
|
|
base_filter: AshPostgres.DataLayer.Info.base_filter_sql(resource),
|
2021-03-30 02:26:58 +13:00
|
|
|
has_create_action: has_create_action?(resource)
|
2020-09-11 12:26:47 +12:00
|
|
|
}
|
|
|
|
|
|
|
|
hash =
|
|
|
|
:sha256
|
|
|
|
|> :crypto.hash(inspect(snapshot))
|
|
|
|
|> Base.encode16()
|
|
|
|
|
|
|
|
Map.put(snapshot, :hash, hash)
|
|
|
|
end
|
|
|
|
|
2021-03-30 02:26:58 +13:00
|
|
|
defp has_create_action?(resource) do
|
|
|
|
resource
|
|
|
|
|> Ash.Resource.Info.actions()
|
2024-04-02 06:58:44 +13:00
|
|
|
|> Enum.any?(&(&1.type == :create && !&1.manual))
|
2021-03-30 02:26:58 +13:00
|
|
|
end
|
|
|
|
|
2021-04-20 06:26:41 +12:00
|
|
|
defp check_constraints(resource) do
|
|
|
|
resource
|
2022-08-24 11:56:46 +12:00
|
|
|
|> AshPostgres.DataLayer.Info.check_constraints()
|
2021-04-20 06:26:41 +12:00
|
|
|
|> Enum.filter(& &1.check)
|
|
|
|
|> case do
|
|
|
|
[] ->
|
|
|
|
[]
|
|
|
|
|
|
|
|
constraints ->
|
|
|
|
base_filter = Ash.Resource.Info.base_filter(resource)
|
|
|
|
|
2022-08-24 11:56:46 +12:00
|
|
|
if base_filter && !AshPostgres.DataLayer.Info.base_filter_sql(resource) do
|
2021-04-20 06:26:41 +12:00
|
|
|
raise """
|
|
|
|
Cannot create a check constraint for a resource with a base filter without also configuring `base_filter_sql`.
|
|
|
|
|
|
|
|
You must provide the `base_filter_sql` option, or manually create add the check constraint to your migrations.
|
|
|
|
"""
|
|
|
|
end
|
|
|
|
|
|
|
|
constraints
|
|
|
|
end
|
|
|
|
|> Enum.map(fn constraint ->
|
2022-02-15 10:30:25 +13:00
|
|
|
attributes =
|
|
|
|
constraint.attribute
|
|
|
|
|> List.wrap()
|
|
|
|
|> Enum.map(fn attribute ->
|
2022-10-08 08:50:20 +13:00
|
|
|
attr =
|
|
|
|
resource
|
|
|
|
|> Ash.Resource.Info.attribute(attribute)
|
|
|
|
|
|
|
|
attr.source || attr.name
|
2022-02-15 10:30:25 +13:00
|
|
|
end)
|
|
|
|
|
2021-04-20 06:26:41 +12:00
|
|
|
%{
|
|
|
|
name: constraint.name,
|
2022-02-15 10:30:25 +13:00
|
|
|
attribute: attributes,
|
2021-04-20 06:26:41 +12:00
|
|
|
check: constraint.check,
|
2022-08-24 11:56:46 +12:00
|
|
|
base_filter: AshPostgres.DataLayer.Info.base_filter_sql(resource)
|
2021-04-20 06:26:41 +12:00
|
|
|
}
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2021-09-21 08:38:36 +12:00
|
|
|
defp custom_indexes(resource) do
|
|
|
|
resource
|
2022-08-24 11:56:46 +12:00
|
|
|
|> AshPostgres.DataLayer.Info.custom_indexes()
|
2021-09-21 08:38:36 +12:00
|
|
|
|> Enum.map(fn custom_index ->
|
2023-04-27 14:23:43 +12:00
|
|
|
Map.take(custom_index, AshPostgres.CustomIndex.fields())
|
2021-09-21 08:38:36 +12:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2022-07-22 05:34:38 +12:00
|
|
|
defp custom_statements(resource) do
|
|
|
|
resource
|
2022-08-24 11:56:46 +12:00
|
|
|
|> AshPostgres.DataLayer.Info.custom_statements()
|
2022-07-22 05:34:38 +12:00
|
|
|
|> Enum.map(fn custom_statement ->
|
2023-04-27 14:23:43 +12:00
|
|
|
Map.take(custom_statement, AshPostgres.Statement.fields())
|
2022-07-22 05:34:38 +12:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2020-10-29 15:26:45 +13:00
|
|
|
defp multitenancy(resource) do
|
2021-02-23 17:53:18 +13:00
|
|
|
strategy = Ash.Resource.Info.multitenancy_strategy(resource)
|
|
|
|
attribute = Ash.Resource.Info.multitenancy_attribute(resource)
|
|
|
|
global = Ash.Resource.Info.multitenancy_global?(resource)
|
2020-10-29 15:26:45 +13:00
|
|
|
|
|
|
|
%{
|
|
|
|
strategy: strategy,
|
|
|
|
attribute: attribute,
|
|
|
|
global: global
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2021-09-22 07:10:25 +12:00
|
|
|
defp attributes(resource, table) do
|
2023-11-15 04:56:22 +13:00
|
|
|
repo = AshPostgres.DataLayer.Info.repo(resource, :mutate)
|
2023-02-01 20:05:04 +13:00
|
|
|
ignored = AshPostgres.DataLayer.Info.migration_ignore_attributes(resource) || []
|
2020-09-11 12:26:47 +12:00
|
|
|
|
|
|
|
resource
|
2021-02-23 17:53:18 +13:00
|
|
|
|> Ash.Resource.Info.attributes()
|
2023-02-01 20:05:04 +13:00
|
|
|
|> Enum.reject(&(&1.name in ignored))
|
2022-02-15 05:39:50 +13:00
|
|
|
|> Enum.map(
|
2023-04-08 02:29:45 +12:00
|
|
|
&Map.take(&1, [
|
|
|
|
:name,
|
|
|
|
:source,
|
|
|
|
:type,
|
|
|
|
:default,
|
|
|
|
:allow_nil?,
|
|
|
|
:generated?,
|
|
|
|
:primary_key?,
|
|
|
|
:constraints
|
|
|
|
])
|
2022-02-15 05:39:50 +13:00
|
|
|
)
|
2020-09-11 12:26:47 +12:00
|
|
|
|> Enum.map(fn attribute ->
|
2022-11-21 20:38:00 +13:00
|
|
|
default = default(attribute, resource, repo)
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2021-11-10 22:18:36 +13:00
|
|
|
type =
|
2022-08-24 11:56:46 +12:00
|
|
|
AshPostgres.DataLayer.Info.migration_types(resource)[attribute.name] ||
|
2023-04-08 02:12:32 +12:00
|
|
|
migration_type(attribute.type, attribute.constraints)
|
2021-11-10 22:18:36 +13:00
|
|
|
|
|
|
|
type =
|
|
|
|
if :erlang.function_exported(repo, :override_migration_type, 1) do
|
|
|
|
repo.override_migration_type(type)
|
|
|
|
else
|
|
|
|
type
|
|
|
|
end
|
|
|
|
|
|
|
|
{type, size} =
|
|
|
|
case type do
|
|
|
|
{:varchar, size} ->
|
|
|
|
{:varchar, size}
|
|
|
|
|
|
|
|
{:binary, size} ->
|
|
|
|
{:binary, size}
|
|
|
|
|
2023-09-12 14:34:51 +12:00
|
|
|
{other, size} when is_atom(other) and is_integer(size) ->
|
|
|
|
{other, size}
|
|
|
|
|
2021-11-10 22:18:36 +13:00
|
|
|
other ->
|
|
|
|
{other, nil}
|
|
|
|
end
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
attribute
|
|
|
|
|> Map.put(:default, default)
|
2021-11-10 22:18:36 +13:00
|
|
|
|> Map.put(:size, size)
|
|
|
|
|> Map.put(:type, type)
|
2022-06-22 14:33:24 +12:00
|
|
|
|> Map.put(:source, attribute.source || attribute.name)
|
2023-04-08 02:29:45 +12:00
|
|
|
|> Map.drop([:name, :constraints])
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
|> Enum.map(fn attribute ->
|
2021-09-22 07:10:25 +12:00
|
|
|
references = find_reference(resource, table, attribute)
|
2020-09-11 12:26:47 +12:00
|
|
|
|
|
|
|
Map.put(attribute, :references, references)
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2021-09-22 07:10:25 +12:00
|
|
|
defp find_reference(resource, table, attribute) do
|
2021-02-23 17:53:18 +13:00
|
|
|
Enum.find_value(Ash.Resource.Info.relationships(resource), fn relationship ->
|
2022-08-19 06:56:36 +12:00
|
|
|
source_attribute_name =
|
2022-02-15 05:39:50 +13:00
|
|
|
relationship.source
|
2022-08-19 06:56:36 +12:00
|
|
|
|> Ash.Resource.Info.attribute(relationship.source_attribute)
|
2022-06-29 07:10:26 +12:00
|
|
|
|> then(fn attribute ->
|
|
|
|
attribute.source || attribute.name
|
|
|
|
end)
|
2022-02-15 05:39:50 +13:00
|
|
|
|
2022-08-19 06:56:36 +12:00
|
|
|
if attribute.source == source_attribute_name && relationship.type == :belongs_to &&
|
2020-09-11 12:26:47 +12:00
|
|
|
foreign_key?(relationship) do
|
2022-02-15 05:39:50 +13:00
|
|
|
configured_reference =
|
2022-06-29 07:57:30 +12:00
|
|
|
configured_reference(resource, table, attribute.source || attribute.name, relationship)
|
2021-04-01 19:19:30 +13:00
|
|
|
|
2022-05-24 18:11:54 +12:00
|
|
|
unless Map.get(configured_reference, :ignore?) do
|
2023-04-12 09:41:53 +12:00
|
|
|
destination_attribute =
|
|
|
|
Ash.Resource.Info.attribute(
|
|
|
|
relationship.destination,
|
|
|
|
relationship.destination_attribute
|
|
|
|
)
|
|
|
|
|
2022-08-19 06:56:36 +12:00
|
|
|
destination_attribute_source =
|
2023-04-12 09:41:53 +12:00
|
|
|
destination_attribute.source || destination_attribute.name
|
2022-05-24 18:11:54 +12:00
|
|
|
|
|
|
|
%{
|
2022-08-19 06:56:36 +12:00
|
|
|
destination_attribute: destination_attribute_source,
|
2023-07-15 08:24:57 +12:00
|
|
|
deferrable: configured_reference.deferrable,
|
2022-05-24 18:11:54 +12:00
|
|
|
multitenancy: multitenancy(relationship.destination),
|
|
|
|
on_delete: configured_reference.on_delete,
|
|
|
|
on_update: configured_reference.on_update,
|
2023-11-21 10:52:50 +13:00
|
|
|
match_with: configured_reference.match_with,
|
|
|
|
match_type: configured_reference.match_type,
|
2022-05-24 18:11:54 +12:00
|
|
|
name: configured_reference.name,
|
2023-04-12 09:41:53 +12:00
|
|
|
primary_key?: destination_attribute.primary_key?,
|
2022-05-24 18:11:54 +12:00
|
|
|
schema:
|
|
|
|
relationship.context[:data_layer][:schema] ||
|
2022-08-24 11:56:46 +12:00
|
|
|
AshPostgres.DataLayer.Info.schema(relationship.destination) ||
|
2023-11-15 04:56:22 +13:00
|
|
|
AshPostgres.DataLayer.Info.repo(relationship.destination, :mutate).config()[
|
2022-08-24 11:56:46 +12:00
|
|
|
:default_prefix
|
|
|
|
],
|
2022-05-24 18:11:54 +12:00
|
|
|
table:
|
|
|
|
relationship.context[:data_layer][:table] ||
|
2022-08-24 11:56:46 +12:00
|
|
|
AshPostgres.DataLayer.Info.table(relationship.destination)
|
2022-05-24 18:11:54 +12:00
|
|
|
}
|
|
|
|
end
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2021-09-22 07:10:25 +12:00
|
|
|
defp configured_reference(resource, table, attribute, relationship) do
|
2021-04-14 04:19:50 +12:00
|
|
|
ref =
|
|
|
|
resource
|
2022-08-24 11:56:46 +12:00
|
|
|
|> AshPostgres.DataLayer.Info.references()
|
2021-09-22 07:10:25 +12:00
|
|
|
|> Enum.find(&(&1.relationship == relationship.name))
|
2021-04-14 04:19:50 +12:00
|
|
|
|> Kernel.||(%{
|
|
|
|
on_delete: nil,
|
|
|
|
on_update: nil,
|
2023-11-21 10:52:50 +13:00
|
|
|
match_with: nil,
|
|
|
|
match_type: nil,
|
2023-07-15 08:24:57 +12:00
|
|
|
deferrable: false,
|
2022-05-14 09:41:30 +12:00
|
|
|
schema:
|
|
|
|
relationship.context[:data_layer][:schema] ||
|
2022-08-24 11:56:46 +12:00
|
|
|
AshPostgres.DataLayer.Info.schema(relationship.destination) ||
|
2023-11-15 04:56:22 +13:00
|
|
|
AshPostgres.DataLayer.Info.repo(relationship.destination, :mutate).config()[
|
|
|
|
:default_prefix
|
|
|
|
],
|
2022-05-24 18:11:54 +12:00
|
|
|
name: nil,
|
|
|
|
ignore?: false
|
2021-04-14 04:19:50 +12:00
|
|
|
})
|
|
|
|
|
2023-04-12 09:41:53 +12:00
|
|
|
ref
|
|
|
|
|> Map.put(:name, ref.name || "#{table}_#{attribute}_fkey")
|
|
|
|
|> Map.put(
|
|
|
|
:primary_key?,
|
|
|
|
Ash.Resource.Info.attribute(
|
|
|
|
relationship.destination,
|
|
|
|
relationship.destination_attribute
|
|
|
|
).primary_key?
|
|
|
|
)
|
2021-04-01 19:19:30 +13:00
|
|
|
end
|
|
|
|
|
2023-04-08 02:56:06 +12:00
|
|
|
def get_migration_type(type, constraints), do: migration_type(type, constraints)
|
|
|
|
|
2023-04-08 02:12:32 +12:00
|
|
|
defp migration_type({:array, type}, constraints),
|
|
|
|
do: {:array, migration_type(type, constraints)}
|
|
|
|
|
|
|
|
defp migration_type(Ash.Type.CiString, _), do: :citext
|
|
|
|
defp migration_type(Ash.Type.UUID, _), do: :uuid
|
|
|
|
defp migration_type(Ash.Type.Integer, _), do: :bigint
|
|
|
|
|
2023-04-08 02:56:06 +12:00
|
|
|
defp migration_type(other, constraints) do
|
|
|
|
type = Ash.Type.get_type(other)
|
2023-04-08 02:12:32 +12:00
|
|
|
|
2023-10-26 01:44:32 +13:00
|
|
|
migration_type_from_storage_type(Ash.Type.storage_type(type, constraints))
|
2023-04-08 02:56:06 +12:00
|
|
|
end
|
2023-04-08 02:12:32 +12:00
|
|
|
|
2021-01-22 09:32:26 +13:00
|
|
|
defp migration_type_from_storage_type(:string), do: :text
|
2023-10-11 07:12:25 +13:00
|
|
|
defp migration_type_from_storage_type(:ci_string), do: :citext
|
2021-01-22 09:32:26 +13:00
|
|
|
defp migration_type_from_storage_type(storage_type), do: storage_type
|
2020-09-11 12:26:47 +12:00
|
|
|
|
|
|
|
defp foreign_key?(relationship) do
|
2021-02-23 17:53:18 +13:00
|
|
|
Ash.DataLayer.data_layer(relationship.source) == AshPostgres.DataLayer &&
|
2023-11-15 04:56:22 +13:00
|
|
|
AshPostgres.DataLayer.Info.repo(relationship.source, :mutate) ==
|
|
|
|
AshPostgres.DataLayer.Info.repo(relationship.destination, :mutate)
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
|
|
|
defp identities(resource) do
|
2022-08-24 11:56:46 +12:00
|
|
|
identity_index_names = AshPostgres.DataLayer.Info.identity_index_names(resource)
|
2021-04-28 09:16:56 +12:00
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
resource
|
2021-02-23 17:53:18 +13:00
|
|
|
|> Ash.Resource.Info.identities()
|
2020-09-20 10:08:09 +12:00
|
|
|
|> case do
|
|
|
|
[] ->
|
|
|
|
[]
|
|
|
|
|
|
|
|
identities ->
|
2021-02-23 17:53:18 +13:00
|
|
|
base_filter = Ash.Resource.Info.base_filter(resource)
|
2020-09-20 10:08:09 +12:00
|
|
|
|
2022-08-24 11:56:46 +12:00
|
|
|
if base_filter && !AshPostgres.DataLayer.Info.base_filter_sql(resource) do
|
2020-09-20 10:08:09 +12:00
|
|
|
raise """
|
2021-04-20 06:26:41 +12:00
|
|
|
Cannot create a unique index for a resource with a base filter without also configuring `base_filter_sql`.
|
|
|
|
|
|
|
|
You must provide the `base_filter_sql` option, or skip unique indexes with `skip_unique_indexes`"
|
2020-09-20 10:08:09 +12:00
|
|
|
"""
|
|
|
|
end
|
|
|
|
|
|
|
|
identities
|
|
|
|
end
|
|
|
|
|> Enum.reject(fn identity ->
|
2022-12-21 20:16:30 +13:00
|
|
|
identity.name in AshPostgres.DataLayer.Info.skip_unique_indexes(resource)
|
2020-09-20 10:08:09 +12:00
|
|
|
end)
|
2020-09-11 12:26:47 +12:00
|
|
|
|> Enum.sort_by(& &1.name)
|
2022-10-05 09:13:38 +13:00
|
|
|
|> Enum.map(fn %{keys: keys} = identity ->
|
|
|
|
%{
|
|
|
|
identity
|
|
|
|
| keys:
|
|
|
|
Enum.map(keys, fn key ->
|
2024-05-24 17:14:55 +12:00
|
|
|
case Ash.Resource.Info.field(resource, key) do
|
|
|
|
%Ash.Resource.Attribute{} = attribute ->
|
|
|
|
to_string(attribute.source || attribute.name)
|
|
|
|
|
|
|
|
%Ash.Resource.Calculation{} ->
|
|
|
|
AshPostgres.DataLayer.Info.calculation_to_sql(resource, key) ||
|
|
|
|
raise "Must define an entry for :#{key} in `postgres.calculations_to_sql`, or skip this identity with `postgres.skip_unique_indexes`"
|
|
|
|
end
|
2022-10-05 09:13:38 +13:00
|
|
|
end)
|
2024-05-24 17:14:55 +12:00
|
|
|
|> Enum.sort(),
|
|
|
|
where:
|
|
|
|
if identity.where do
|
|
|
|
AshPostgres.DataLayer.Info.identity_where_to_sql(resource, identity.name) ||
|
|
|
|
raise(
|
|
|
|
"Must provide an entry for :#{identity.name} in `postgres.identity_wheres_to_sql`, or skip this identity with `postgres.skip_unique_indexes`"
|
|
|
|
)
|
|
|
|
end
|
2022-10-05 09:13:38 +13:00
|
|
|
}
|
|
|
|
end)
|
2024-05-24 17:14:55 +12:00
|
|
|
|> Enum.map(&Map.take(&1, [:name, :keys, :where, :all_tenants?, :nils_distinct?]))
|
2021-04-28 09:16:56 +12:00
|
|
|
|> Enum.map(fn identity ->
|
|
|
|
Map.put(
|
|
|
|
identity,
|
|
|
|
:index_name,
|
|
|
|
identity_index_names[identity.name] ||
|
2022-08-24 11:56:46 +12:00
|
|
|
"#{AshPostgres.DataLayer.Info.table(resource)}_#{identity.name}_index"
|
2021-04-28 09:16:56 +12:00
|
|
|
)
|
|
|
|
end)
|
2022-08-24 11:56:46 +12:00
|
|
|
|> Enum.map(&Map.put(&1, :base_filter, AshPostgres.DataLayer.Info.base_filter_sql(resource)))
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
2021-02-25 07:59:49 +13:00
|
|
|
@uuid_functions [&Ash.UUID.generate/0, &Ecto.UUID.generate/0]
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2024-03-28 10:20:07 +13:00
|
|
|
defp default(%{name: name, default: default}, resource, _repo) when is_function(default) do
|
2022-11-21 20:38:00 +13:00
|
|
|
configured_default(resource, name) ||
|
|
|
|
cond do
|
2024-03-28 10:20:07 +13:00
|
|
|
default in @uuid_functions ->
|
|
|
|
~S[fragment("gen_random_uuid()")]
|
2022-11-21 16:56:49 +13:00
|
|
|
|
2022-11-21 20:38:00 +13:00
|
|
|
default == (&DateTime.utc_now/0) ->
|
2024-03-28 10:07:33 +13:00
|
|
|
~S[fragment("(now() AT TIME ZONE 'utc')")]
|
2020-09-11 12:26:47 +12:00
|
|
|
|
2023-10-28 03:33:03 +13:00
|
|
|
default == (&Date.utc_today/0) ->
|
|
|
|
~S[fragment("CURRENT_DATE")]
|
|
|
|
|
2022-11-21 20:38:00 +13:00
|
|
|
true ->
|
|
|
|
"nil"
|
|
|
|
end
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
2022-11-21 20:38:00 +13:00
|
|
|
defp default(%{name: name, default: {_, _, _}}, resource, _),
|
|
|
|
do: configured_default(resource, name) || "nil"
|
|
|
|
|
|
|
|
defp default(%{name: name, default: nil}, resource, _),
|
|
|
|
do: configured_default(resource, name) || "nil"
|
|
|
|
|
2023-07-28 03:22:06 +12:00
|
|
|
defp default(%{name: name, default: []}, resource, _),
|
|
|
|
do: configured_default(resource, name) || "[]"
|
|
|
|
|
2023-07-28 03:22:41 +12:00
|
|
|
defp default(%{name: name, default: default}, resource, _) when default == %{},
|
|
|
|
do: configured_default(resource, name) || "%{}"
|
|
|
|
|
2023-08-19 03:22:01 +12:00
|
|
|
defp default(%{name: name, default: value, type: type} = attr, resource, _) do
|
|
|
|
case configured_default(resource, name) do
|
|
|
|
nil ->
|
|
|
|
case migration_default(type, Map.get(attr, :constraints, []), value) do
|
|
|
|
{:ok, default} ->
|
|
|
|
default
|
|
|
|
|
|
|
|
:error ->
|
|
|
|
EctoMigrationDefault.to_default(value)
|
|
|
|
end
|
|
|
|
|
|
|
|
default ->
|
|
|
|
default
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp migration_default(type, constraints, value) do
|
|
|
|
type =
|
|
|
|
type
|
|
|
|
|> unwrap_type()
|
|
|
|
|> Ash.Type.get_type()
|
|
|
|
|
2023-10-27 17:06:55 +13:00
|
|
|
type = Code.ensure_compiled!(type)
|
|
|
|
|
2023-08-19 03:22:01 +12:00
|
|
|
if function_exported?(type, :value_to_postgres_default, 3) do
|
|
|
|
type.value_to_postgres_default(type, constraints, value)
|
|
|
|
else
|
|
|
|
:error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp unwrap_type({:array, type}), do: unwrap_type(type)
|
|
|
|
defp unwrap_type(type), do: type
|
2022-11-21 20:38:00 +13:00
|
|
|
|
|
|
|
defp configured_default(resource, attribute) do
|
|
|
|
AshPostgres.DataLayer.Info.migration_defaults(resource)[attribute]
|
|
|
|
end
|
2020-09-11 12:26:47 +12:00
|
|
|
|
|
|
|
defp snapshot_to_binary(snapshot) do
|
2021-01-27 09:07:26 +13:00
|
|
|
snapshot
|
|
|
|
|> Map.update!(:attributes, fn attributes ->
|
2024-05-17 12:18:17 +12:00
|
|
|
Enum.map(attributes, &attribute_to_binary/1)
|
2021-01-27 09:07:26 +13:00
|
|
|
end)
|
2024-01-23 05:32:34 +13:00
|
|
|
|> Map.update!(:custom_indexes, fn indexes ->
|
|
|
|
Enum.map(indexes, fn index ->
|
|
|
|
fields =
|
|
|
|
Enum.map(index.fields, fn
|
|
|
|
field when is_atom(field) -> %{type: "atom", value: field}
|
|
|
|
field when is_binary(field) -> %{type: "string", value: field}
|
|
|
|
end)
|
|
|
|
|
|
|
|
%{index | fields: fields}
|
|
|
|
end)
|
|
|
|
end)
|
2021-01-27 09:07:26 +13:00
|
|
|
|> Jason.encode!(pretty: true)
|
|
|
|
end
|
|
|
|
|
2024-05-17 12:18:17 +12:00
|
|
|
defp attribute_to_binary(attribute) do
|
|
|
|
attribute
|
|
|
|
|> Map.update!(:references, fn
|
|
|
|
nil ->
|
|
|
|
nil
|
|
|
|
|
|
|
|
references ->
|
|
|
|
references
|
|
|
|
|> Map.update!(:on_delete, &(&1 && references_on_delete_to_binary(&1)))
|
|
|
|
end)
|
|
|
|
|> Map.update!(:type, fn type -> sanitize_type(type, attribute[:size]) end)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp references_on_delete_to_binary(value) when is_atom(value), do: value
|
|
|
|
defp references_on_delete_to_binary({:nilify, columns}), do: [:nilify, columns]
|
|
|
|
|
2021-11-10 22:18:36 +13:00
|
|
|
defp sanitize_type({:array, type}, size) do
|
|
|
|
["array", sanitize_type(type, size)]
|
2021-01-27 09:07:26 +13:00
|
|
|
end
|
|
|
|
|
2021-11-10 22:18:36 +13:00
|
|
|
defp sanitize_type(:varchar, size) when not is_nil(size) do
|
|
|
|
["varchar", size]
|
|
|
|
end
|
|
|
|
|
|
|
|
defp sanitize_type(:binary, size) when not is_nil(size) do
|
|
|
|
["binary", size]
|
|
|
|
end
|
|
|
|
|
2023-09-12 14:34:51 +12:00
|
|
|
defp sanitize_type(type, size) when is_atom(type) and is_integer(size) do
|
|
|
|
[sanitize_type(type, nil), size]
|
|
|
|
end
|
|
|
|
|
2021-11-10 22:18:36 +13:00
|
|
|
defp sanitize_type(type, _) do
|
2021-01-27 09:07:26 +13:00
|
|
|
type
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
2022-05-24 16:38:13 +12:00
|
|
|
defp load_snapshot(json) do
|
2020-09-11 12:26:47 +12:00
|
|
|
json
|
|
|
|
|> Jason.decode!(keys: :atoms!)
|
2022-05-24 16:38:13 +12:00
|
|
|
|> sanitize_snapshot()
|
2021-04-05 08:09:11 +12:00
|
|
|
end
|
|
|
|
|
2022-05-24 16:38:13 +12:00
|
|
|
defp sanitize_snapshot(snapshot) do
|
2021-04-05 08:09:11 +12:00
|
|
|
snapshot
|
2021-03-30 02:26:58 +13:00
|
|
|
|> Map.put_new(:has_create_action, true)
|
2022-05-14 09:41:30 +12:00
|
|
|
|> Map.put_new(:schema, nil)
|
2020-09-11 12:26:47 +12:00
|
|
|
|> Map.update!(:identities, fn identities ->
|
2021-04-28 09:16:56 +12:00
|
|
|
Enum.map(identities, &load_identity(&1, snapshot.table))
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
|> Map.update!(:attributes, fn attributes ->
|
2022-05-24 18:11:54 +12:00
|
|
|
Enum.map(attributes, fn attribute ->
|
|
|
|
attribute = load_attribute(attribute, snapshot.table)
|
|
|
|
|
|
|
|
if is_map(Map.get(attribute, :references)) do
|
|
|
|
%{
|
|
|
|
attribute
|
|
|
|
| references: rewrite(attribute.references, :ignore, :ignore?)
|
|
|
|
}
|
|
|
|
else
|
|
|
|
attribute
|
|
|
|
end
|
|
|
|
end)
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
2021-09-21 08:38:36 +12:00
|
|
|
|> Map.put_new(:custom_indexes, [])
|
|
|
|
|> Map.update!(:custom_indexes, &load_custom_indexes/1)
|
2022-07-22 05:34:38 +12:00
|
|
|
|> Map.put_new(:custom_statements, [])
|
|
|
|
|> Map.update!(:custom_statements, &load_custom_statements/1)
|
2021-04-20 06:26:41 +12:00
|
|
|
|> Map.put_new(:check_constraints, [])
|
|
|
|
|> Map.update!(:check_constraints, &load_check_constraints/1)
|
2024-02-03 06:25:02 +13:00
|
|
|
|> Map.update!(:repo, &maybe_to_atom/1)
|
2020-10-29 15:26:45 +13:00
|
|
|
|> Map.put_new(:multitenancy, %{
|
|
|
|
attribute: nil,
|
|
|
|
strategy: nil,
|
2021-04-05 08:09:11 +12:00
|
|
|
global: nil
|
2020-10-29 15:26:45 +13:00
|
|
|
})
|
|
|
|
|> Map.update!(:multitenancy, &load_multitenancy/1)
|
2021-09-21 08:38:36 +12:00
|
|
|
|> Map.put_new(:base_filter, nil)
|
2020-10-29 15:26:45 +13:00
|
|
|
end
|
|
|
|
|
2021-04-20 06:26:41 +12:00
|
|
|
defp load_check_constraints(constraints) do
|
|
|
|
Enum.map(constraints, fn constraint ->
|
|
|
|
Map.update!(constraint, :attribute, fn attribute ->
|
|
|
|
attribute
|
|
|
|
|> List.wrap()
|
2024-02-03 06:25:02 +13:00
|
|
|
|> Enum.map(&maybe_to_atom/1)
|
2021-04-20 06:26:41 +12:00
|
|
|
end)
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2021-09-21 08:38:36 +12:00
|
|
|
defp load_custom_indexes(custom_indexes) do
|
2021-09-21 12:48:19 +12:00
|
|
|
Enum.map(custom_indexes || [], fn custom_index ->
|
2021-09-21 08:38:36 +12:00
|
|
|
custom_index
|
2024-01-23 05:32:34 +13:00
|
|
|
|> Map.update(:fields, [], fn fields ->
|
|
|
|
Enum.map(fields, fn
|
2024-02-03 06:25:02 +13:00
|
|
|
%{type: "atom", value: field} -> maybe_to_atom(field)
|
2024-01-23 05:32:34 +13:00
|
|
|
%{type: "string", value: field} -> field
|
|
|
|
field -> field
|
|
|
|
end)
|
|
|
|
end)
|
2021-09-21 12:48:19 +12:00
|
|
|
|> Map.put_new(:include, [])
|
2024-03-21 05:01:53 +13:00
|
|
|
|> Map.put_new(:nulls_distinct, true)
|
2022-12-14 07:31:57 +13:00
|
|
|
|> Map.put_new(:message, nil)
|
2024-01-13 04:11:16 +13:00
|
|
|
|> Map.put_new(:all_tenants?, false)
|
2021-09-21 08:38:36 +12:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2022-07-22 05:34:38 +12:00
|
|
|
defp load_custom_statements(statements) do
|
|
|
|
Enum.map(statements || [], fn statement ->
|
2024-02-03 06:25:02 +13:00
|
|
|
Map.update!(statement, :name, &maybe_to_atom/1)
|
2022-07-22 05:34:38 +12:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2020-10-29 15:26:45 +13:00
|
|
|
defp load_multitenancy(multitenancy) do
|
|
|
|
multitenancy
|
2024-02-03 06:25:02 +13:00
|
|
|
|> Map.update!(:strategy, fn strategy -> strategy && maybe_to_atom(strategy) end)
|
|
|
|
|> Map.update!(:attribute, fn attribute -> attribute && maybe_to_atom(attribute) end)
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
|
|
|
|
2022-05-24 16:38:13 +12:00
|
|
|
defp load_attribute(attribute, table) do
|
2021-11-10 22:18:36 +13:00
|
|
|
type = load_type(attribute.type)
|
|
|
|
|
|
|
|
{type, size} =
|
|
|
|
case type do
|
|
|
|
{:varchar, size} ->
|
|
|
|
{:varchar, size}
|
|
|
|
|
|
|
|
{:binary, size} ->
|
|
|
|
{:binary, size}
|
|
|
|
|
2023-09-12 14:34:51 +12:00
|
|
|
{other, size} when is_atom(other) and is_integer(size) ->
|
|
|
|
{other, size}
|
|
|
|
|
2021-11-10 22:18:36 +13:00
|
|
|
other ->
|
|
|
|
{other, nil}
|
|
|
|
end
|
|
|
|
|
2022-02-15 05:39:50 +13:00
|
|
|
attribute =
|
|
|
|
if Map.has_key?(attribute, :name) do
|
2024-02-03 06:25:02 +13:00
|
|
|
Map.put(attribute, :source, maybe_to_atom(attribute.name))
|
2022-02-15 05:39:50 +13:00
|
|
|
else
|
2024-02-03 06:25:02 +13:00
|
|
|
Map.update!(attribute, :source, &maybe_to_atom/1)
|
2022-02-15 05:39:50 +13:00
|
|
|
end
|
|
|
|
|
2020-09-11 12:26:47 +12:00
|
|
|
attribute
|
2021-11-10 22:18:36 +13:00
|
|
|
|> Map.put(:type, type)
|
|
|
|
|> Map.put(:size, size)
|
2021-01-07 18:37:41 +13:00
|
|
|
|> Map.put_new(:default, "nil")
|
|
|
|
|> Map.update!(:default, &(&1 || "nil"))
|
2020-09-11 12:26:47 +12:00
|
|
|
|> Map.update!(:references, fn
|
|
|
|
nil ->
|
|
|
|
nil
|
|
|
|
|
|
|
|
references ->
|
2020-10-29 15:26:45 +13:00
|
|
|
references
|
2022-08-19 08:11:47 +12:00
|
|
|
|> rewrite(
|
|
|
|
destination_field: :destination_attribute,
|
|
|
|
destination_field_default: :destination_attribute_default,
|
|
|
|
destination_field_generated: :destination_attribute_generated
|
|
|
|
)
|
2022-05-24 18:11:54 +12:00
|
|
|
|> Map.delete(:ignore)
|
|
|
|
|> rewrite(:ignore?, :ignore)
|
2024-02-03 06:25:02 +13:00
|
|
|
|> Map.update!(:destination_attribute, &maybe_to_atom/1)
|
2023-07-15 08:24:57 +12:00
|
|
|
|> Map.put_new(:deferrable, false)
|
|
|
|
|> Map.update!(:deferrable, fn
|
|
|
|
"initially" -> :initially
|
|
|
|
other -> other
|
|
|
|
end)
|
2022-05-24 16:38:13 +12:00
|
|
|
|> Map.put_new(:schema, nil)
|
2022-08-19 06:56:36 +12:00
|
|
|
|> Map.put_new(:destination_attribute_default, "nil")
|
|
|
|
|> Map.put_new(:destination_attribute_generated, false)
|
2021-04-01 19:19:30 +13:00
|
|
|
|> Map.put_new(:on_delete, nil)
|
|
|
|
|> Map.put_new(:on_update, nil)
|
2024-05-17 12:18:17 +12:00
|
|
|
|> Map.update!(:on_delete, &(&1 && load_references_on_delete(&1)))
|
2024-02-03 06:25:02 +13:00
|
|
|
|> Map.update!(:on_update, &(&1 && maybe_to_atom(&1)))
|
2023-11-21 10:52:50 +13:00
|
|
|
|> Map.put_new(:match_with, nil)
|
|
|
|
|> Map.put_new(:match_type, nil)
|
|
|
|
|> Map.update!(
|
|
|
|
:match_with,
|
2024-02-03 06:25:02 +13:00
|
|
|
&(&1 && Enum.into(&1, %{}, fn {k, v} -> {maybe_to_atom(k), maybe_to_atom(v)} end))
|
2023-11-21 10:52:50 +13:00
|
|
|
)
|
2024-02-03 06:25:02 +13:00
|
|
|
|> Map.update!(:match_type, &(&1 && maybe_to_atom(&1)))
|
2022-02-15 05:39:50 +13:00
|
|
|
|> Map.put(
|
|
|
|
:name,
|
|
|
|
Map.get(references, :name) || "#{table}_#{attribute.source}_fkey"
|
|
|
|
)
|
2020-10-29 15:26:45 +13:00
|
|
|
|> Map.put_new(:multitenancy, %{
|
|
|
|
attribute: nil,
|
|
|
|
strategy: nil,
|
2021-04-05 08:09:11 +12:00
|
|
|
global: nil
|
2020-10-29 15:26:45 +13:00
|
|
|
})
|
|
|
|
|> Map.update!(:multitenancy, &load_multitenancy/1)
|
2021-09-22 07:10:25 +12:00
|
|
|
|> sanitize_name(table)
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2022-08-19 08:11:47 +12:00
|
|
|
defp rewrite(map, keys) do
|
2022-08-19 08:13:32 +12:00
|
|
|
Enum.reduce(keys, map, fn {key, to}, map ->
|
2022-08-19 08:11:47 +12:00
|
|
|
rewrite(map, key, to)
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2022-05-24 18:11:54 +12:00
|
|
|
defp rewrite(map, key, to) do
|
|
|
|
if Map.has_key?(map, key) do
|
|
|
|
map
|
|
|
|
|> Map.put(to, Map.get(map, key))
|
|
|
|
|> Map.delete(key)
|
|
|
|
else
|
|
|
|
map
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-09-22 07:10:25 +12:00
|
|
|
defp sanitize_name(reference, table) do
|
|
|
|
if String.starts_with?(reference.name, "_") do
|
|
|
|
Map.put(reference, :name, "#{table}#{reference.name}")
|
|
|
|
else
|
|
|
|
reference
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-01-27 09:07:26 +13:00
|
|
|
defp load_type(["array", type]) do
|
|
|
|
{:array, load_type(type)}
|
|
|
|
end
|
|
|
|
|
2021-11-10 22:18:36 +13:00
|
|
|
defp load_type(["varchar", size]) do
|
|
|
|
{:varchar, size}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp load_type(["binary", size]) do
|
|
|
|
{:binary, size}
|
|
|
|
end
|
|
|
|
|
2023-09-12 14:34:51 +12:00
|
|
|
defp load_type([string, size]) when is_binary(string) and is_integer(size) do
|
|
|
|
{String.to_existing_atom(string), size}
|
|
|
|
end
|
|
|
|
|
2021-01-27 09:07:26 +13:00
|
|
|
defp load_type(type) do
|
2024-02-03 06:25:02 +13:00
|
|
|
maybe_to_atom(type)
|
2021-01-27 09:07:26 +13:00
|
|
|
end
|
|
|
|
|
2021-04-28 09:16:56 +12:00
|
|
|
defp load_identity(identity, table) do
|
2020-09-11 12:26:47 +12:00
|
|
|
identity
|
2024-02-03 06:25:02 +13:00
|
|
|
|> Map.update!(:name, &maybe_to_atom/1)
|
2020-09-11 12:26:47 +12:00
|
|
|
|> Map.update!(:keys, fn keys ->
|
2024-05-24 17:14:55 +12:00
|
|
|
Enum.sort(keys)
|
2020-09-11 12:26:47 +12:00
|
|
|
end)
|
2021-04-28 09:16:56 +12:00
|
|
|
|> add_index_name(table)
|
2020-09-20 10:08:09 +12:00
|
|
|
|> Map.put_new(:base_filter, nil)
|
2024-01-13 04:11:16 +13:00
|
|
|
|> Map.put_new(:all_tenants?, false)
|
2024-05-24 17:14:55 +12:00
|
|
|
|> Map.put_new(:where, nil)
|
|
|
|
|> Map.put_new(:nils_distinct?, true)
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|
2021-04-28 09:16:56 +12:00
|
|
|
|
|
|
|
defp add_index_name(%{name: name} = index, table) do
|
|
|
|
Map.put_new(index, :index_name, "#{table}_#{name}_unique_index")
|
|
|
|
end
|
2024-02-03 06:25:02 +13:00
|
|
|
|
2024-05-17 12:18:17 +12:00
|
|
|
defp load_references_on_delete(["nilify", columns]) when is_list(columns) do
|
|
|
|
{:nilify, Enum.map(columns, &maybe_to_atom/1)}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp load_references_on_delete(value) do
|
|
|
|
maybe_to_atom(value)
|
|
|
|
end
|
|
|
|
|
2024-02-03 06:25:02 +13:00
|
|
|
defp maybe_to_atom(value) when is_atom(value), do: value
|
|
|
|
defp maybe_to_atom(value), do: String.to_atom(value)
|
2020-09-11 12:26:47 +12:00
|
|
|
end
|