2020-09-11 12:26:47 +12:00
defmodule AshPostgres.MigrationGenerator do
2020-09-11 15:53:43 +12:00
@moduledoc """
Generates migrations based on resource snapshots
See ` Mix.Tasks.AshPostgres.GenerateMigrations ` for more information .
"""
2020-09-11 12:26:47 +12:00
@default_snapshot_path " priv/resource_snapshots "
import Mix.Generator
alias AshPostgres.MigrationGenerator . { Operation , Phase }
2020-09-20 10:08:09 +12:00
defstruct snapshot_path : @default_snapshot_path ,
migration_path : nil ,
2020-10-29 15:26:45 +13:00
tenant_migration_path : nil ,
2020-09-20 10:08:09 +12:00
quiet : false ,
2021-04-05 08:09:11 +12:00
current_snapshots : nil ,
answers : [ ] ,
no_shell? : false ,
2020-09-20 10:08:09 +12:00
format : true ,
2020-11-20 16:09:26 +13:00
dry_run : false ,
2021-01-13 14:47:17 +13:00
check_generated : false ,
2020-11-20 16:09:26 +13:00
drop_columns : false
2020-09-11 12:26:47 +12:00
def generate ( apis , opts \\ [ ] ) do
apis = List . wrap ( apis )
2021-01-13 14:47:17 +13:00
opts =
case struct ( __MODULE__ , opts ) do
%{ check_generated : true } = opts ->
%{ opts | dry_run : true }
opts ->
opts
end
2020-09-11 12:26:47 +12:00
2021-02-01 10:39:59 +13:00
all_resources = Enum . flat_map ( apis , & Ash.Api . resources / 1 )
2020-10-29 15:26:45 +13:00
{ tenant_snapshots , snapshots } =
2021-02-01 10:39:59 +13:00
all_resources
2021-02-23 17:53:18 +13:00
|> Enum . filter ( & ( Ash.DataLayer . data_layer ( &1 ) == AshPostgres.DataLayer ) )
2020-09-11 12:26:47 +12:00
|> Enum . filter ( & AshPostgres . migrate? / 1 )
2021-02-01 10:39:59 +13:00
|> Enum . flat_map ( & get_snapshots ( &1 , all_resources ) )
2020-10-29 15:26:45 +13:00
|> Enum . split_with ( & ( &1 . multitenancy . strategy == :context ) )
2020-09-11 12:26:47 +12:00
2020-10-29 15:26:45 +13:00
tenant_snapshots_to_include_in_global =
tenant_snapshots
|> Enum . filter ( & &1 . multitenancy . global )
2021-04-05 08:09:11 +12:00
|> Enum . map ( & Map . put ( &1 , :multitenancy , %{ strategy : nil , attribute : nil , global : nil } ) )
2020-10-29 15:26:45 +13:00
snapshots = snapshots ++ tenant_snapshots_to_include_in_global
2021-03-03 06:33:24 +13:00
repos =
snapshots
|> Enum . map ( & &1 . repo )
|> Enum . uniq ( )
create_extension_migrations ( repos , opts )
2020-10-29 15:26:45 +13:00
create_migrations ( tenant_snapshots , opts , true )
create_migrations ( snapshots , opts , false )
end
2021-04-05 08:09:11 +12:00
@doc """
A work in progress utility for getting snapshots .
Does not support everything supported by the migration generator .
"""
def take_snapshots ( api , repo ) do
all_resources = Ash.Api . resources ( api )
all_resources
|> Enum . filter ( & ( Ash.DataLayer . data_layer ( &1 ) == AshPostgres.DataLayer ) )
|> Enum . filter ( & ( AshPostgres . repo ( &1 ) == repo ) )
|> Enum . flat_map ( & get_snapshots ( &1 , all_resources ) )
end
@doc """
A work in progress utility for getting operations between snapshots .
Does not support everything supported by the migration generator .
"""
def get_operations_from_snapshots ( old_snapshots , new_snapshots , opts \\ [ ] ) do
opts = %{ opts ( opts ) | no_shell? : true }
old_snapshots = Enum . map ( old_snapshots , & sanitize_snapshot / 1 )
new_snapshots
|> deduplicate_snapshots ( opts , old_snapshots )
|> fetch_operations ( opts )
|> Enum . flat_map ( & elem ( &1 , 1 ) )
|> Enum . uniq ( )
|> organize_operations ( )
end
defp opts ( opts ) do
case struct ( __MODULE__ , opts ) do
%{ check_generated : true } = opts ->
%{ opts | dry_run : true }
opts ->
opts
end
end
2021-03-03 06:33:24 +13:00
defp create_extension_migrations ( repos , opts ) do
for repo <- repos do
snapshot_file = Path . join ( opts . snapshot_path , " extensions.json " )
installed_extensions =
if File . exists? ( snapshot_file ) do
snapshot_file
|> File . read! ( )
|> Jason . decode! ( )
else
[ ]
end
to_install = List . wrap ( repo . installed_extensions ( ) ) -- List . wrap ( installed_extensions )
if Enum . empty? ( to_install ) do
:ok
else
{ module , migration_name } =
case to_install do
[ single ] ->
{ " install_ #{ single } " , " #{ timestamp ( true ) } _install_ #{ single } .exs " }
multiple ->
{ " install_ #{ Enum . count ( multiple ) } _extensions " ,
" #{ timestamp ( true ) } _install_ #{ Enum . count ( multiple ) } _extensions.exs " }
end
migration_file =
opts
|> migration_path ( repo )
|> Path . join ( migration_name <> " .exs " )
module_name = Module . concat ( [ repo , Migrations , Macro . camelize ( module ) ] )
install =
Enum . map_join ( to_install , " \n " , fn extension ->
" execute( \" CREATE EXTENSION IF NOT EXISTS \\ \" #{ extension } \\ \" \" ) "
end )
uninstall =
Enum . map_join ( to_install , " \n " , fn extension ->
" execute( \" DROP EXTENSION IF EXISTS \\ \" #{ extension } \\ \" \" ) "
end )
contents = """
defmodule #{inspect(module_name)} do
@moduledoc \ " \" \"
Installs any extensions that are mentioned in the repo ' s `installed_extensions/0` callback
This file was autogenerated with ` mix ash_postgres . generate_migrations `
\ " \" \"
use Ecto.Migration
def up do
#{install}
end
def down do
# Uncomment this if you actually want to uninstall the extensions
# when this migration is rolled back.
#{uninstall}
end
end
"""
snapshot_contents = Jason . encode! ( repo . installed_extensions ( ) , pretty : true )
contents = format ( contents , opts )
2021-03-20 11:41:16 +13:00
create_file ( snapshot_file , snapshot_contents , force : true )
2021-03-03 06:33:24 +13:00
create_file ( migration_file , contents )
end
end
end
2020-10-29 15:26:45 +13:00
defp create_migrations ( snapshots , opts , tenant? ) do
2020-09-11 12:26:47 +12:00
snapshots
|> Enum . group_by ( & &1 . repo )
|> Enum . each ( fn { repo , snapshots } ->
deduped = deduplicate_snapshots ( snapshots , opts )
2021-02-01 10:39:59 +13:00
snapshots_with_operations = fetch_operations ( deduped , opts )
snapshots = Enum . map ( snapshots_with_operations , & elem ( &1 , 0 ) )
2020-09-11 12:26:47 +12:00
2021-02-01 10:39:59 +13:00
snapshots_with_operations
|> Enum . flat_map ( & elem ( &1 , 1 ) )
2020-09-11 12:26:47 +12:00
|> Enum . uniq ( )
|> case do
[ ] ->
2020-10-29 15:26:45 +13:00
tenant_str =
if tenant? do
" tenant "
else
" "
end
2020-09-11 12:26:47 +12:00
Mix . shell ( ) . info (
2020-10-29 15:26:45 +13:00
" No #{ tenant_str } changes detected, so no migrations or snapshots have been created. "
2020-09-11 12:26:47 +12:00
)
:ok
operations ->
2021-01-13 14:47:17 +13:00
if opts . check_generated , do : exit ( { :shutdown , 1 } )
2020-09-11 12:26:47 +12:00
operations
2021-04-05 08:09:11 +12:00
|> organize_operations
2020-09-11 12:26:47 +12:00
|> build_up_and_down ( )
2021-01-07 18:37:41 +13:00
|> write_migration! ( snapshots , repo , opts , tenant? )
2020-09-11 12:26:47 +12:00
end
end )
end
2021-04-05 08:09:11 +12:00
defp organize_operations ( [ ] ) , do : [ ]
defp organize_operations ( operations ) do
operations
|> sort_operations ( )
|> streamline ( )
|> group_into_phases ( )
|> comment_out_phases ( )
end
2020-11-25 12:11:02 +13:00
defp comment_out_phases ( phases ) do
Enum . map ( phases , fn
%{ operations : operations } = phase ->
if Enum . all? ( operations , & match? ( %{ commented? : true } , &1 ) ) do
%{ phase | commented? : true }
else
phase
end
phase ->
phase
end )
end
2021-04-05 08:09:11 +12:00
defp deduplicate_snapshots ( snapshots , opts , existing_snapshots \\ [ ] ) do
2020-09-11 12:26:47 +12:00
snapshots
|> Enum . group_by ( fn snapshot ->
snapshot . table
end )
|> Enum . map ( fn { _table , [ snapshot | _ ] = snapshots } ->
2021-04-05 08:09:11 +12:00
existing_snapshot =
if opts . no_shell? do
Enum . find ( existing_snapshots , & ( &1 . table == snapshot . table ) )
else
get_existing_snapshot ( snapshot , opts )
end
{ primary_key , identities } = merge_primary_keys ( existing_snapshot , snapshots , opts )
2020-09-11 12:26:47 +12:00
attributes = Enum . flat_map ( snapshots , & &1 . attributes )
2021-03-30 02:26:58 +13:00
count_with_create =
snapshots
|> Enum . filter ( & &1 . has_create_action )
|> Enum . count ( )
2020-09-11 12:26:47 +12:00
snapshot_identities =
snapshots
|> Enum . map ( & &1 . identities )
|> Enum . concat ( )
new_snapshot = %{
snapshot
2021-03-30 02:26:58 +13:00
| attributes : merge_attributes ( attributes , snapshot . table , count_with_create ) ,
2020-09-11 12:26:47 +12:00
identities : snapshot_identities
}
all_identities =
new_snapshot . identities
|> Kernel . ++ ( identities )
|> Enum . sort_by ( & &1 . name )
2020-09-20 10:08:09 +12:00
# We sort the identities by there being an identity with a matching name in the existing snapshot
# so that we prefer identities that currently exist over new ones
|> Enum . sort_by ( fn identity ->
existing_snapshot
|> Kernel . || ( %{ } )
|> Map . get ( :identities , [ ] )
|> Enum . any? ( fn existing_identity ->
existing_identity . name == identity . name
end )
|> Kernel . ! ( )
end )
2020-09-11 12:26:47 +12:00
|> Enum . uniq_by ( fn identity ->
2020-09-20 10:08:09 +12:00
{ Enum . sort ( identity . keys ) , identity . base_filter }
2020-09-11 12:26:47 +12:00
end )
new_snapshot = %{ new_snapshot | identities : all_identities }
{
%{
new_snapshot
| attributes :
Enum . map ( new_snapshot . attributes , fn attribute ->
if attribute . name in primary_key do
%{ attribute | primary_key? : true }
else
%{ attribute | primary_key? : false }
end
end )
} ,
existing_snapshot
}
end )
end
2021-03-03 05:38:12 +13:00
defp merge_attributes ( attributes , table , count ) do
2020-09-11 12:26:47 +12:00
attributes
|> Enum . group_by ( & &1 . name )
2021-04-01 19:19:30 +13:00
|> Enum . map ( fn { name , attributes } ->
%{
name : name ,
type : merge_types ( Enum . map ( attributes , & &1 . type ) , name , table ) ,
default : merge_defaults ( Enum . map ( attributes , & &1 . default ) ) ,
allow_nil? : Enum . any? ( attributes , & &1 . allow_nil? ) || Enum . count ( attributes ) < count ,
generated? : Enum . any? ( attributes , & &1 . generated? ) ,
references : merge_references ( Enum . map ( attributes , & &1 . references ) , name , table ) ,
primary_key? : false
}
2020-09-11 12:26:47 +12:00
end )
end
defp merge_references ( references , name , table ) do
references
|> Enum . reject ( & is_nil / 1 )
|> Enum . uniq ( )
|> case do
[ ] ->
nil
references ->
2021-04-01 19:19:30 +13:00
%{
destination_field : merge_uniq! ( references , table , :destination_field , name ) ,
multitenancy : merge_uniq! ( references , table , :multitenancy , name ) ,
on_delete : merge_uniq! ( references , table , :on_delete , name ) ,
on_update : merge_uniq! ( references , table , :on_update , name ) ,
name : merge_uniq! ( references , table , :name , name ) ,
table : merge_uniq! ( references , table , :table , name )
}
end
end
defp merge_uniq! ( references , table , field , attribute ) do
references
|> Enum . map ( & Map . get ( &1 , field ) )
|> Enum . filter ( & &1 )
|> Enum . uniq ( )
|> case do
[ ] ->
nil
[ value ] ->
value
values ->
values = Enum . map_join ( values , " \n " , & " * #{ inspect ( &1 ) } " )
raise """
Conflicting configurations for references for #{table}.#{attribute}:
2020-09-11 12:26:47 +12:00
2021-04-01 19:19:30 +13:00
Values :
#{values}
"""
2020-09-11 12:26:47 +12:00
end
end
defp merge_types ( types , name , table ) do
types
|> Enum . uniq ( )
|> case do
[ type ] ->
type
types ->
raise " Conflicting types for table ` #{ table } . #{ name } `: #{ inspect ( types ) } "
end
end
defp merge_defaults ( defaults ) do
defaults
|> Enum . uniq ( )
|> case do
[ default ] -> default
2020-10-29 15:26:45 +13:00
_ -> " nil "
2020-09-11 12:26:47 +12:00
end
end
2021-04-05 08:09:11 +12:00
defp merge_primary_keys ( nil , [ snapshot | _ ] = snapshots , opts ) do
2020-09-11 12:26:47 +12:00
snapshots
|> Enum . map ( & pkey_names ( &1 . attributes ) )
|> Enum . uniq ( )
|> case do
[ pkey_names ] ->
{ pkey_names , [ ] }
unique_primary_keys ->
unique_primary_key_names =
unique_primary_keys
|> Enum . with_index ( )
|> Enum . map_join ( " \n " , fn { pkey , index } ->
" #{ index } : #{ inspect ( pkey ) } "
end )
2021-04-05 08:09:11 +12:00
choice =
if opts . no_shell? do
raise " Unimplemented: cannot resolve primary key ambiguity without shell input "
else
message = """
Which primary key should be used for the table ` #{snapshot.table}` (enter the number)?
2020-09-11 12:26:47 +12:00
2021-04-05 08:09:11 +12:00
#{unique_primary_key_names}
"""
2020-09-11 12:26:47 +12:00
2021-04-05 08:09:11 +12:00
message
|> Mix . shell ( ) . prompt ( )
|> String . to_integer ( )
end
2020-09-11 12:26:47 +12:00
identities =
unique_primary_keys
|> List . delete_at ( choice )
|> Enum . map ( fn pkey_names ->
pkey_name_string = Enum . join ( pkey_names , " _ " )
name = snapshot . table <> " _ " <> pkey_name_string
%{
keys : pkey_names ,
name : name
}
end )
primary_key = Enum . sort ( Enum . at ( unique_primary_keys , choice ) )
identities =
Enum . reject ( identities , fn identity ->
Enum . sort ( identity . keys ) == primary_key
end )
{ primary_key , identities }
end
end
2021-04-05 08:09:11 +12:00
defp merge_primary_keys ( existing_snapshot , snapshots , opts ) do
2020-09-11 12:26:47 +12:00
pkey_names = pkey_names ( existing_snapshot . attributes )
one_pkey_exists? =
Enum . any? ( snapshots , fn snapshot ->
pkey_names ( snapshot . attributes ) == pkey_names
end )
if one_pkey_exists? do
identities =
snapshots
|> Enum . map ( & pkey_names ( &1 . attributes ) )
|> Enum . uniq ( )
|> Enum . reject ( & ( &1 == pkey_names ) )
|> Enum . map ( fn pkey_names ->
pkey_name_string = Enum . join ( pkey_names , " _ " )
name = existing_snapshot . table <> " _ " <> pkey_name_string
%{
keys : pkey_names ,
name : name
}
end )
{ pkey_names , identities }
else
2021-04-05 08:09:11 +12:00
merge_primary_keys ( nil , snapshots , opts )
2020-09-11 12:26:47 +12:00
end
end
defp pkey_names ( attributes ) do
attributes
|> Enum . filter ( & &1 . primary_key? )
|> Enum . map ( & &1 . name )
|> Enum . sort ( )
end
2021-03-03 06:33:24 +13:00
defp migration_path ( opts , repo , tenant? \\ false ) do
repo_name = repo_name ( repo )
2020-09-11 12:26:47 +12:00
2021-03-03 06:33:24 +13:00
if tenant? do
if opts . tenant_migration_path do
opts . tenant_migration_path
2020-09-11 12:26:47 +12:00
else
2021-03-03 06:33:24 +13:00
" priv/ "
2020-09-11 12:26:47 +12:00
end
2021-03-03 06:33:24 +13:00
|> Path . join ( repo_name )
|> Path . join ( " tenant_migrations " )
else
if opts . migration_path do
opts . migration_path
else
" priv/ "
end
|> Path . join ( repo_name )
|> Path . join ( " migrations " )
end
end
defp repo_name ( repo ) do
repo |> Module . split ( ) |> List . last ( ) |> Macro . underscore ( )
end
defp write_migration! ( { up , down } , snapshots , repo , opts , tenant? ) do
repo_name = repo_name ( repo )
migration_path = migration_path ( opts , repo , tenant? )
2020-09-11 12:26:47 +12:00
count =
migration_path
|> Path . join ( " *_migrate_resources* " )
|> Path . wildcard ( )
|> Enum . count ( )
|> Kernel . + ( 1 )
2021-03-03 06:33:24 +13:00
migration_name = " #{ timestamp ( true ) } _migrate_resources #{ count } "
2020-09-11 12:26:47 +12:00
migration_file =
migration_path
|> Path . join ( migration_name <> " .exs " )
2020-10-29 15:26:45 +13:00
module_name =
if tenant? do
Module . concat ( [ repo , TenantMigrations , Macro . camelize ( " migrate_resources #{ count } " ) ] )
else
Module . concat ( [ repo , Migrations , Macro . camelize ( " migrate_resources #{ count } " ) ] )
end
2020-09-11 12:26:47 +12:00
contents = """
defmodule #{inspect(module_name)} do
@moduledoc \ " \" \"
Updates resources based on their most recent snapshots .
This file was autogenerated with ` mix ash_postgres . generate_migrations `
\ " \" \"
use Ecto.Migration
2021-01-12 07:15:21 +13:00
def up do
2020-09-11 12:26:47 +12:00
#{up}
end
2021-01-12 07:15:21 +13:00
def down do
2020-09-11 12:26:47 +12:00
#{down}
end
end
"""
2021-01-07 18:37:41 +13:00
try do
contents = format ( contents , opts )
create_new_snapshot ( snapshots , repo_name , opts , tenant? )
if opts . dry_run do
Mix . shell ( ) . info ( contents )
else
create_file ( migration_file , contents )
end
rescue
exception ->
reraise (
"""
Exception while formatting generated code :
#{Exception.format(:error, exception, __STACKTRACE__)}
Code :
#{add_line_numbers(contents)}
To generate it unformatted anyway , but manually fix it , use the ` -- no - format ` option .
""" ,
__STACKTRACE__
)
end
end
defp add_line_numbers ( contents ) do
lines = String . split ( contents , " \n " )
digits = String . length ( to_string ( Enum . count ( lines ) ) )
lines
|> Enum . with_index ( )
|> Enum . map_join ( " \n " , fn { line , index } ->
" #{ String . pad_trailing ( to_string ( index ) , digits , " " ) } | #{ line } "
end )
end
defp create_new_snapshot ( snapshots , repo_name , opts , tenant? ) do
unless opts . dry_run do
Enum . each ( snapshots , fn snapshot ->
snapshot_binary = snapshot_to_binary ( snapshot )
snapshot_folder =
if tenant? do
opts . snapshot_path
|> Path . join ( repo_name )
|> Path . join ( " tenants " )
else
opts . snapshot_path
|> Path . join ( repo_name )
end
snapshot_file = Path . join ( snapshot_folder , " #{ snapshot . table } / #{ timestamp ( ) } .json " )
File . mkdir_p ( Path . dirname ( snapshot_file ) )
File . write! ( snapshot_file , snapshot_binary , [ ] )
old_snapshot_folder = Path . join ( snapshot_folder , " #{ snapshot . table } .json " )
if File . exists? ( old_snapshot_folder ) do
new_snapshot_folder = Path . join ( snapshot_folder , " #{ snapshot . table } /initial.json " )
File . rename ( old_snapshot_folder , new_snapshot_folder )
end
end )
2020-09-20 10:08:09 +12:00
end
2020-09-11 12:26:47 +12:00
end
2021-04-05 08:09:11 +12:00
@doc false
def build_up_and_down ( phases ) do
2020-09-11 12:26:47 +12:00
up =
Enum . map_join ( phases , " \n " , fn phase ->
2020-11-25 12:11:02 +13:00
phase
|> phase . __struct__ . up ( )
|> Kernel . <> ( " \n " )
|> maybe_comment ( phase )
2020-09-11 12:26:47 +12:00
end )
down =
phases
|> Enum . reverse ( )
|> Enum . map_join ( " \n " , fn phase ->
2020-11-25 12:11:02 +13:00
phase
|> phase . __struct__ . down ( )
|> Kernel . <> ( " \n " )
|> maybe_comment ( phase )
2020-09-11 12:26:47 +12:00
end )
{ up , down }
end
2020-11-25 12:11:02 +13:00
defp maybe_comment ( text , %{ commented? : true } ) do
text
|> String . split ( " \n " )
|> Enum . map ( fn line ->
if String . starts_with? ( line , " # " ) do
line
else
" # #{ line } "
end
end )
|> Enum . join ( " \n " )
end
defp maybe_comment ( text , _ ) , do : text
2020-09-11 12:26:47 +12:00
defp format ( string , opts ) do
if opts . format do
2021-01-11 04:59:33 +13:00
Code . format_string! ( string , locals_without_parens : ecto_sql_locals_without_parens ( ) )
2020-09-11 12:26:47 +12:00
else
string
end
2020-12-01 19:54:20 +13:00
rescue
exception ->
IO . puts ( """
Exception while formatting :
#{inspect(exception)}
#{inspect(string)}
""" )
reraise exception , __STACKTRACE__
2020-09-11 12:26:47 +12:00
end
2021-01-11 04:59:33 +13:00
defp ecto_sql_locals_without_parens do
path = File . cwd! ( ) |> Path . join ( " deps/ecto_sql/.formatter.exs " )
if File . exists? ( path ) do
{ opts , _ } = Code . eval_file ( path )
Keyword . get ( opts , :locals_without_parens , [ ] )
else
[ ]
end
end
2020-09-11 12:26:47 +12:00
defp streamline ( ops , acc \\ [ ] )
defp streamline ( [ ] , acc ) , do : Enum . reverse ( acc )
defp streamline (
[
% Operation.AddAttribute {
attribute : %{
name : name
} ,
table : table
2020-10-01 15:43:33 +13:00
} = add
2020-09-11 12:26:47 +12:00
| rest
] ,
acc
2020-10-01 15:43:33 +13:00
) do
rest
|> Enum . take_while ( fn op ->
op . table == table
end )
|> Enum . with_index ( )
|> Enum . find ( fn
{ % Operation.AlterAttribute {
new_attribute : %{ name : ^ name , references : references } ,
old_attribute : %{ name : ^ name }
} , _ }
when not is_nil ( references ) ->
true
_ ->
false
end )
|> case do
nil ->
streamline ( rest , [ add | acc ] )
{ alter , index } ->
new_attribute = Map . put ( add . attribute , :references , alter . new_attribute . references )
streamline ( List . delete_at ( rest , index ) , [ %{ add | attribute : new_attribute } | acc ] )
end
2020-09-11 12:26:47 +12:00
end
defp streamline ( [ first | rest ] , acc ) do
streamline ( rest , [ first | acc ] )
end
defp group_into_phases ( ops , current \\ nil , acc \\ [ ] )
defp group_into_phases ( [ ] , nil , acc ) , do : Enum . reverse ( acc )
defp group_into_phases ( [ ] , phase , acc ) do
phase = %{ phase | operations : Enum . reverse ( phase . operations ) }
Enum . reverse ( [ phase | acc ] )
end
2020-10-29 15:26:45 +13:00
defp group_into_phases (
[ % Operation.CreateTable { table : table , multitenancy : multitenancy } | rest ] ,
nil ,
acc
) do
group_into_phases ( rest , % Phase.Create { table : table , multitenancy : multitenancy } , acc )
2020-09-11 12:26:47 +12:00
end
defp group_into_phases (
[ % Operation.AddAttribute { table : table } = op | rest ] ,
%{ table : table } = phase ,
acc
) do
group_into_phases ( rest , %{ phase | operations : [ op | phase . operations ] } , acc )
end
defp group_into_phases (
[ % Operation.AlterAttribute { table : table } = op | rest ] ,
2020-10-01 15:43:33 +13:00
% Phase.Alter { table : table } = phase ,
2020-09-11 12:26:47 +12:00
acc
) do
group_into_phases ( rest , %{ phase | operations : [ op | phase . operations ] } , acc )
end
defp group_into_phases (
[ % Operation.RenameAttribute { table : table } = op | rest ] ,
2020-10-01 15:43:33 +13:00
% Phase.Alter { table : table } = phase ,
2020-09-11 12:26:47 +12:00
acc
) do
group_into_phases ( rest , %{ phase | operations : [ op | phase . operations ] } , acc )
end
defp group_into_phases (
[ % Operation.RemoveAttribute { table : table } = op | rest ] ,
%{ table : table } = phase ,
acc
) do
group_into_phases ( rest , %{ phase | operations : [ op | phase . operations ] } , acc )
end
2020-09-20 10:08:09 +12:00
defp group_into_phases ( [ %{ no_phase : true } = op | rest ] , nil , acc ) do
group_into_phases ( rest , nil , [ op | acc ] )
end
2020-09-11 12:26:47 +12:00
defp group_into_phases ( [ operation | rest ] , nil , acc ) do
2020-10-29 15:26:45 +13:00
phase = % Phase.Alter {
operations : [ operation ] ,
multitenancy : operation . multitenancy ,
table : operation . table
}
2020-10-01 15:43:33 +13:00
group_into_phases ( rest , phase , acc )
2020-09-11 12:26:47 +12:00
end
defp group_into_phases ( operations , phase , acc ) do
phase = %{ phase | operations : Enum . reverse ( phase . operations ) }
group_into_phases ( operations , nil , [ phase | acc ] )
end
defp sort_operations ( ops , acc \\ [ ] )
defp sort_operations ( [ ] , acc ) , do : acc
defp sort_operations ( [ op | rest ] , [ ] ) , do : sort_operations ( rest , [ op ] )
defp sort_operations ( [ op | rest ] , acc ) do
acc = Enum . reverse ( acc )
after_index = Enum . find_index ( acc , & after ?( op , &1 ) )
new_acc =
if after_index do
acc
|> List . insert_at ( after_index , op )
|> Enum . reverse ( )
else
[ op | Enum . reverse ( acc ) ]
end
sort_operations ( rest , new_acc )
end
defp after ?(
2020-11-25 12:11:02 +13:00
% Operation.AddUniqueIndex {
identity : %{ keys : keys } ,
table : table ,
multitenancy : multitenancy
} ,
2020-09-11 12:26:47 +12:00
% Operation.AddAttribute { table : table , attribute : %{ name : name } }
) do
2020-11-25 12:11:02 +13:00
name in keys || ( multitenancy . attribute && name == multitenancy . attribute )
2020-09-11 12:26:47 +12:00
end
2020-12-01 19:54:20 +13:00
defp after ?( % Operation.AddUniqueIndex { table : table } , % Operation.RemoveUniqueIndex { table : table } ) ,
do : true
2020-09-11 12:26:47 +12:00
defp after ?(
% Operation.AddUniqueIndex { identity : %{ keys : keys } , table : table } ,
% Operation.AlterAttribute { table : table , new_attribute : %{ name : name } }
) do
name in keys
end
defp after ?(
% Operation.AddUniqueIndex { identity : %{ keys : keys } , table : table } ,
% Operation.RenameAttribute { table : table , new_attribute : %{ name : name } }
) do
name in keys
end
defp after ?(
% Operation.RemoveUniqueIndex { identity : %{ keys : keys } , table : table } ,
% Operation.RemoveAttribute { table : table , attribute : %{ name : name } }
) do
name in keys
end
defp after ?(
% Operation.RemoveUniqueIndex { identity : %{ keys : keys } , table : table } ,
% Operation.RenameAttribute { table : table , old_attribute : %{ name : name } }
) do
name in keys
end
2020-11-18 12:35:57 +13:00
defp after ?( % Operation.AlterAttribute { table : table } , % Operation.DropForeignKey {
table : table ,
direction : :up
} ) ,
do : true
defp after ?(
% Operation.DropForeignKey {
table : table ,
direction : :down
} ,
% Operation.AlterAttribute { table : table }
) ,
do : true
2020-09-11 12:26:47 +12:00
defp after ?( % Operation.AddAttribute { table : table } , % Operation.CreateTable { table : table } ) do
true
end
defp after ?(
% Operation.AddAttribute {
attribute : %{
references : %{ table : table , destination_field : name }
}
} ,
% Operation.AddAttribute { table : table , attribute : %{ name : name } }
) ,
do : true
defp after ?(
% Operation.AddAttribute {
table : table ,
attribute : %{
primary_key? : false
}
} ,
% Operation.AddAttribute { table : table , attribute : %{ primary_key? : true } }
) ,
do : true
defp after ?(
% Operation.AddAttribute {
table : table ,
attribute : %{
primary_key? : true
}
} ,
% Operation.RemoveAttribute { table : table , attribute : %{ primary_key? : true } }
) ,
do : true
defp after ?(
% Operation.AlterAttribute {
table : table ,
new_attribute : %{ primary_key? : false } ,
old_attribute : %{ primary_key? : true }
} ,
% Operation.AddAttribute {
table : table ,
attribute : %{
primary_key? : true
}
}
) ,
do : true
defp after ?(
% Operation.RemoveAttribute { attribute : %{ name : name } , table : table } ,
% Operation.AlterAttribute {
old_attribute : %{ references : %{ table : table , destination_field : name } }
}
) ,
do : true
defp after ?(
% Operation.AlterAttribute {
new_attribute : %{
references : %{ table : table , destination_field : name }
}
} ,
% Operation.AddAttribute { table : table , attribute : %{ name : name } }
) ,
do : true
defp after ?( % Operation.AddUniqueIndex { table : table } , % Operation.CreateTable { table : table } ) do
true
end
defp after ?( % Operation.AlterAttribute { new_attribute : %{ references : references } } , _ )
when not is_nil ( references ) ,
do : true
defp after ?( _ , _ ) , do : false
2020-11-20 16:09:26 +13:00
defp fetch_operations ( snapshots , opts ) do
2021-02-01 10:39:59 +13:00
snapshots
|> Enum . map ( fn { snapshot , existing_snapshot } ->
{ snapshot , do_fetch_operations ( snapshot , existing_snapshot , opts ) }
end )
|> Enum . reject ( fn { _ , ops } ->
Enum . empty? ( ops )
2020-09-11 12:26:47 +12:00
end )
end
2020-11-20 16:09:26 +13:00
defp do_fetch_operations ( snapshot , existing_snapshot , opts , acc \\ [ ] )
2020-09-11 12:26:47 +12:00
2020-11-20 16:09:26 +13:00
defp do_fetch_operations ( snapshot , nil , opts , acc ) do
2020-09-11 12:26:47 +12:00
empty_snapshot = %{
attributes : [ ] ,
identities : [ ] ,
table : snapshot . table ,
2020-10-29 15:26:45 +13:00
repo : snapshot . repo ,
multitenancy : %{
attribute : nil ,
strategy : nil ,
2021-04-05 08:09:11 +12:00
global : nil
2020-10-29 15:26:45 +13:00
}
2020-09-11 12:26:47 +12:00
}
2020-11-20 16:09:26 +13:00
do_fetch_operations ( snapshot , empty_snapshot , opts , [
2020-10-29 15:26:45 +13:00
% Operation.CreateTable {
table : snapshot . table ,
multitenancy : snapshot . multitenancy ,
old_multitenancy : empty_snapshot . multitenancy
}
| acc
2020-09-11 12:26:47 +12:00
] )
end
2020-11-20 16:09:26 +13:00
defp do_fetch_operations ( snapshot , old_snapshot , opts , acc ) do
attribute_operations = attribute_operations ( snapshot , old_snapshot , opts )
2020-09-11 12:26:47 +12:00
2020-11-25 12:11:02 +13:00
rewrite_all_identities? = changing_multitenancy_affects_identities? ( snapshot , old_snapshot )
2020-09-11 12:26:47 +12:00
unique_indexes_to_remove =
2020-11-25 12:11:02 +13:00
if rewrite_all_identities? do
old_snapshot . identities
else
Enum . reject ( old_snapshot . identities , fn old_identity ->
Enum . find ( snapshot . identities , fn identity ->
Enum . sort ( old_identity . keys ) == Enum . sort ( identity . keys ) &&
old_identity . base_filter == identity . base_filter
end )
2020-09-11 12:26:47 +12:00
end )
2020-11-25 12:11:02 +13:00
end
2020-09-11 12:26:47 +12:00
|> Enum . map ( fn identity ->
% Operation.RemoveUniqueIndex { identity : identity , table : snapshot . table }
end )
unique_indexes_to_add =
2020-11-25 12:11:02 +13:00
if rewrite_all_identities? do
snapshot . identities
else
Enum . reject ( snapshot . identities , fn identity ->
Enum . find ( old_snapshot . identities , fn old_identity ->
Enum . sort ( old_identity . keys ) == Enum . sort ( identity . keys ) &&
old_identity . base_filter == identity . base_filter
end )
2020-09-11 12:26:47 +12:00
end )
2020-11-25 12:11:02 +13:00
end
2020-09-11 12:26:47 +12:00
|> Enum . map ( fn identity ->
2020-09-20 10:08:09 +12:00
% Operation.AddUniqueIndex {
identity : identity ,
table : snapshot . table
}
2020-09-11 12:26:47 +12:00
end )
2020-11-07 18:00:41 +13:00
[ unique_indexes_to_remove , attribute_operations , unique_indexes_to_add , acc ]
2020-10-29 15:26:45 +13:00
|> Enum . concat ( )
|> Enum . map ( & Map . put ( &1 , :multitenancy , snapshot . multitenancy ) )
|> Enum . map ( & Map . put ( &1 , :old_multitenancy , old_snapshot . multitenancy ) )
2020-09-11 12:26:47 +12:00
end
2020-11-20 16:09:26 +13:00
defp attribute_operations ( snapshot , old_snapshot , opts ) do
2020-09-11 12:26:47 +12:00
attributes_to_add =
Enum . reject ( snapshot . attributes , fn attribute ->
Enum . find ( old_snapshot . attributes , & ( &1 . name == attribute . name ) )
end )
attributes_to_remove =
Enum . reject ( old_snapshot . attributes , fn attribute ->
Enum . find ( snapshot . attributes , & ( &1 . name == attribute . name ) )
end )
{ attributes_to_add , attributes_to_remove , attributes_to_rename } =
2021-04-05 08:09:11 +12:00
resolve_renames ( snapshot . table , attributes_to_add , attributes_to_remove , opts )
2020-09-11 12:26:47 +12:00
attributes_to_alter =
snapshot . attributes
|> Enum . map ( fn attribute ->
{ attribute ,
Enum . find ( old_snapshot . attributes , & ( &1 . name == attribute . name && &1 != attribute ) ) }
end )
|> Enum . filter ( & elem ( &1 , 1 ) )
rename_attribute_events =
Enum . map ( attributes_to_rename , fn { new , old } ->
% Operation.RenameAttribute { new_attribute : new , old_attribute : old , table : snapshot . table }
end )
add_attribute_events =
Enum . flat_map ( attributes_to_add , fn attribute ->
if attribute . references do
[
% Operation.AddAttribute {
attribute : Map . delete ( attribute , :references ) ,
table : snapshot . table
} ,
% Operation.AlterAttribute {
old_attribute : Map . delete ( attribute , :references ) ,
new_attribute : attribute ,
table : snapshot . table
}
]
else
[
% Operation.AddAttribute {
attribute : attribute ,
table : snapshot . table
}
]
end
end )
alter_attribute_events =
2020-11-18 12:35:57 +13:00
Enum . flat_map ( attributes_to_alter , fn { new_attribute , old_attribute } ->
if has_reference? ( old_snapshot . multitenancy , old_attribute ) and
Map . get ( old_attribute , :references ) != Map . get ( new_attribute , :references ) do
[
% Operation.DropForeignKey {
attribute : old_attribute ,
table : snapshot . table ,
multitenancy : old_snapshot . multitenancy ,
direction : :up
} ,
% Operation.AlterAttribute {
new_attribute : new_attribute ,
old_attribute : old_attribute ,
table : snapshot . table
} ,
% Operation.DropForeignKey {
attribute : new_attribute ,
table : snapshot . table ,
multitenancy : snapshot . multitenancy ,
direction : :down
}
]
else
[
% Operation.AlterAttribute {
2021-04-14 04:19:50 +12:00
new_attribute : Map . delete ( new_attribute , :references ) ,
old_attribute : Map . delete ( old_attribute , :references ) ,
2020-11-18 12:35:57 +13:00
table : snapshot . table
}
]
end
2020-09-11 12:26:47 +12:00
end )
remove_attribute_events =
Enum . map ( attributes_to_remove , fn attribute ->
2020-11-20 16:09:26 +13:00
% Operation.RemoveAttribute {
attribute : attribute ,
table : snapshot . table ,
commented? : ! opts . drop_columns
}
2020-09-11 12:26:47 +12:00
end )
add_attribute_events ++
alter_attribute_events ++ remove_attribute_events ++ rename_attribute_events
end
2020-11-25 12:11:02 +13:00
def changing_multitenancy_affects_identities? ( snapshot , old_snapshot ) do
snapshot . multitenancy != old_snapshot . multitenancy
end
2020-11-18 12:35:57 +13:00
def has_reference? ( multitenancy , attribute ) do
not is_nil ( Map . get ( attribute , :references ) ) and
! ( attribute . references . multitenancy &&
attribute . references . multitenancy . strategy == :context &&
( is_nil ( multitenancy ) || multitenancy . strategy == :attribute ) )
end
2020-09-11 12:26:47 +12:00
def get_existing_snapshot ( snapshot , opts ) do
repo_name = snapshot . repo |> Module . split ( ) |> List . last ( ) |> Macro . underscore ( )
2020-10-29 15:26:45 +13:00
folder =
if snapshot . multitenancy . strategy == :context do
opts . snapshot_path
|> Path . join ( repo_name )
|> Path . join ( " tenants " )
else
2021-01-07 18:37:41 +13:00
opts . snapshot_path
|> Path . join ( repo_name )
2020-10-29 15:26:45 +13:00
end
2021-01-07 18:37:41 +13:00
snapshot_folder = Path . join ( folder , snapshot . table )
if File . exists? ( snapshot_folder ) do
snapshot_folder
|> File . ls! ( )
|> Enum . filter ( & String . ends_with? ( &1 , " .json " ) )
|> Enum . map ( & String . trim_trailing ( &1 , " .json " ) )
|> Enum . map ( & Integer . parse / 1 )
2021-01-13 08:21:44 +13:00
|> Enum . filter ( fn
{ _int , remaining } ->
remaining == " "
:error ->
false
end )
2021-01-07 18:37:41 +13:00
|> Enum . map ( & elem ( &1 , 0 ) )
|> case do
[ ] ->
get_old_snapshot ( folder , snapshot )
timestamps ->
timestamp = Enum . max ( timestamps )
snapshot_file = Path . join ( snapshot_folder , " #{ timestamp } .json " )
snapshot_file
|> File . read! ( )
2021-04-05 08:09:11 +12:00
|> load_snapshot ( )
2021-01-07 18:37:41 +13:00
end
else
get_old_snapshot ( folder , snapshot )
end
end
2020-09-11 12:26:47 +12:00
2021-01-24 16:45:15 +13:00
T
2021-01-07 18:37:41 +13:00
defp get_old_snapshot ( folder , snapshot ) do
old_snapshot_file = Path . join ( folder , " #{ snapshot . table } .json " )
# This is adapter code for the old version, where migrations were stored in a flat directory
if File . exists? ( old_snapshot_file ) do
old_snapshot_file
2020-10-29 15:26:45 +13:00
|> File . read! ( )
2021-04-05 08:09:11 +12:00
|> load_snapshot ( )
2020-09-11 12:26:47 +12:00
end
end
2021-04-05 08:09:11 +12:00
defp resolve_renames ( _table , adding , [ ] , _opts ) , do : { adding , [ ] , [ ] }
2020-09-11 12:26:47 +12:00
2021-04-05 08:09:11 +12:00
defp resolve_renames ( _table , [ ] , removing , _opts ) , do : { [ ] , removing , [ ] }
2020-11-20 16:09:26 +13:00
2021-04-05 08:09:11 +12:00
defp resolve_renames ( table , [ adding ] , [ removing ] , opts ) do
if renaming_to? ( table , removing . name , adding . name , opts ) do
2020-09-11 12:26:47 +12:00
{ [ ] , [ ] , [ { adding , removing } ] }
else
{ [ adding ] , [ removing ] , [ ] }
end
end
2021-04-05 08:09:11 +12:00
defp resolve_renames ( table , adding , [ removing | rest ] , opts ) do
2020-09-11 12:26:47 +12:00
{ new_adding , new_removing , new_renames } =
2021-04-05 08:09:11 +12:00
if renaming? ( table , removing , opts ) do
new_attribute =
if opts . no_shell? do
raise " Unimplemented: Cannot get new_attribute without the shell! "
else
get_new_attribute ( adding )
end
2020-09-11 12:26:47 +12:00
{ adding -- [ new_attribute ] , [ ] , [ { new_attribute , removing } ] }
else
{ adding , [ removing ] , [ ] }
end
2021-04-05 08:09:11 +12:00
{ rest_adding , rest_removing , rest_renames } = resolve_renames ( table , new_adding , rest , opts )
2020-09-11 12:26:47 +12:00
{ new_adding ++ rest_adding , new_removing ++ rest_removing , rest_renames ++ new_renames }
end
2021-04-05 08:09:11 +12:00
defp renaming_to? ( table , removing , adding , opts ) do
if opts . no_shell? do
raise " Unimplemented: cannot determine: Are you renaming #{ table } . #{ removing } to #{ table } . #{
adding
} ? without shell input "
else
Mix . shell ( ) . yes? ( " Are you renaming #{ table } . #{ removing } to #{ table } . #{ adding } ? " )
end
end
defp renaming? ( table , removing , opts ) do
if opts . no_shell? do
raise " Unimplemented: cannot determine: Are you renaming #{ table } . #{ removing . name } ? without shell input "
else
Mix . shell ( ) . yes? ( " Are you renaming #{ table } . #{ removing . name } ? " )
end
end
2020-09-11 12:26:47 +12:00
defp get_new_attribute ( adding , tries \\ 3 )
defp get_new_attribute ( _adding , 0 ) do
raise " Could not get matching name after 3 attempts. "
end
defp get_new_attribute ( adding , tries ) do
name =
Mix . shell ( ) . prompt (
" What are you renaming it to?: #{ Enum . map_join ( adding , " , " , & &1 . name ) } "
)
2021-02-09 09:29:52 +13:00
name =
if name do
String . trim ( name )
else
nil
end
2020-09-11 12:26:47 +12:00
case Enum . find ( adding , & ( to_string ( &1 . name ) == name ) ) do
nil -> get_new_attribute ( adding , tries - 1 )
new_attribute -> new_attribute
end
end
2021-03-03 06:33:24 +13:00
defp timestamp ( require_unique? \\ false ) do
# Alright, this is silly I know. But migration ids need to be unique
# and "synthesizing" that behavior is significantly more annoying than
# just waiting a bit, ensuring the migration versions are unique.
if require_unique? , do : :timer . sleep ( 1500 )
2020-09-11 12:26:47 +12:00
{ { y , m , d } , { hh , mm , ss } } = :calendar . universal_time ( )
" #{ y } #{ pad ( m ) } #{ pad ( d ) } #{ pad ( hh ) } #{ pad ( mm ) } #{ pad ( ss ) } "
end
defp pad ( i ) when i < 10 , do : << ?0 , ?0 + i >>
defp pad ( i ) , do : to_string ( i )
2021-02-01 10:39:59 +13:00
def get_snapshots ( resource , all_resources ) do
if AshPostgres . polymorphic? ( resource ) do
all_resources
2021-02-23 17:53:18 +13:00
|> Enum . flat_map ( & Ash.Resource.Info . relationships / 1 )
2021-02-01 10:39:59 +13:00
|> Enum . filter ( & ( &1 . destination == resource ) )
2021-02-07 10:03:06 +13:00
|> Enum . reject ( & ( &1 . type == :belongs_to ) )
2021-02-01 10:39:59 +13:00
|> Enum . filter ( & &1 . context [ :data_layer ] [ :table ] )
|> Enum . map ( fn relationship ->
resource
|> do_snapshot ( relationship . context [ :data_layer ] [ :table ] )
|> Map . update! ( :attributes , fn attributes ->
Enum . map ( attributes , fn attribute ->
if attribute . name == relationship . destination_field do
Map . put ( attribute , :references , %{
destination_field : relationship . source_field ,
multitenancy : multitenancy ( relationship . source ) ,
2021-04-01 19:19:30 +13:00
table : AshPostgres . table ( relationship . source ) ,
on_delete : AshPostgres . polymorphic_on_delete ( relationship . source ) ,
on_update : AshPostgres . polymorphic_on_update ( relationship . source ) ,
name :
AshPostgres . polymorphic_name ( relationship . source ) ||
" #{ relationship . context [ :data_layer ] [ :table ] } _ #{ relationship . source_field } _fkey "
2021-02-01 10:39:59 +13:00
} )
else
attribute
end
end )
end )
end )
else
[ do_snapshot ( resource ) ]
end
end
defp do_snapshot ( resource , table \\ nil ) do
2020-09-11 12:26:47 +12:00
snapshot = %{
attributes : attributes ( resource ) ,
identities : identities ( resource ) ,
2021-02-01 10:39:59 +13:00
table : table || AshPostgres . table ( resource ) ,
2020-09-20 10:08:09 +12:00
repo : AshPostgres . repo ( resource ) ,
2020-10-29 15:26:45 +13:00
multitenancy : multitenancy ( resource ) ,
2021-03-30 02:26:58 +13:00
base_filter : AshPostgres . base_filter_sql ( resource ) ,
has_create_action : has_create_action? ( resource )
2020-09-11 12:26:47 +12:00
}
hash =
:sha256
|> :crypto . hash ( inspect ( snapshot ) )
|> Base . encode16 ( )
Map . put ( snapshot , :hash , hash )
end
2021-03-30 02:26:58 +13:00
defp has_create_action? ( resource ) do
resource
|> Ash.Resource.Info . actions ( )
|> Enum . any? ( & ( &1 . type == :create ) )
end
2020-10-29 15:26:45 +13:00
defp multitenancy ( resource ) do
2021-02-23 17:53:18 +13:00
strategy = Ash.Resource.Info . multitenancy_strategy ( resource )
attribute = Ash.Resource.Info . multitenancy_attribute ( resource )
global = Ash.Resource.Info . multitenancy_global? ( resource )
2020-10-29 15:26:45 +13:00
%{
strategy : strategy ,
attribute : attribute ,
global : global
}
end
defp attributes ( resource ) do
2020-09-11 12:26:47 +12:00
repo = AshPostgres . repo ( resource )
resource
2021-02-23 17:53:18 +13:00
|> Ash.Resource.Info . attributes ( )
2020-09-11 12:26:47 +12:00
|> Enum . sort_by ( & &1 . name )
2021-01-08 16:53:16 +13:00
|> Enum . map ( & Map . take ( &1 , [ :name , :type , :default , :allow_nil? , :generated? , :primary_key? ] ) )
2020-09-11 12:26:47 +12:00
|> Enum . map ( fn attribute ->
default = default ( attribute , repo )
attribute
|> Map . put ( :default , default )
|> Map . update! ( :type , fn type ->
2021-01-22 09:32:26 +13:00
migration_type ( type )
2020-09-11 12:26:47 +12:00
end )
end )
|> Enum . map ( fn attribute ->
references = find_reference ( resource , attribute )
Map . put ( attribute , :references , references )
end )
end
defp find_reference ( resource , attribute ) do
2021-02-23 17:53:18 +13:00
Enum . find_value ( Ash.Resource.Info . relationships ( resource ) , fn relationship ->
2020-09-11 12:26:47 +12:00
if attribute . name == relationship . source_field && relationship . type == :belongs_to &&
foreign_key? ( relationship ) do
2021-04-01 19:19:30 +13:00
configured_reference = configured_reference ( resource , attribute . name , relationship . name )
2020-09-11 12:26:47 +12:00
%{
destination_field : relationship . destination_field ,
2020-10-29 15:26:45 +13:00
multitenancy : multitenancy ( relationship . destination ) ,
2021-04-01 19:19:30 +13:00
on_delete : configured_reference . on_delete ,
on_update : configured_reference . on_update ,
name : configured_reference . name ,
2021-02-07 09:52:47 +13:00
table :
relationship . context [ :data_layer ] [ :table ] ||
AshPostgres . table ( relationship . destination )
2020-09-11 12:26:47 +12:00
}
end
end )
end
2021-04-01 19:19:30 +13:00
defp configured_reference ( resource , attribute , relationship ) do
2021-04-14 04:19:50 +12:00
ref =
resource
|> AshPostgres . references ( )
|> Enum . find ( & ( &1 . relationship == relationship ) )
|> Kernel . || ( %{
on_delete : nil ,
on_update : nil ,
name : nil
} )
Map . put ( ref , :name , ref . name || " #{ AshPostgres . table ( resource ) } _ #{ attribute } _fkey " )
2021-04-01 19:19:30 +13:00
end
2021-01-27 09:07:26 +13:00
defp migration_type ( { :array , type } ) , do : { :array , migration_type ( type ) }
2021-01-22 09:32:26 +13:00
defp migration_type ( Ash.Type.CiString ) , do : :citext
2021-03-22 06:23:30 +13:00
defp migration_type ( Ash.Type.UUID ) , do : :uuid
2021-04-13 06:13:23 +12:00
defp migration_type ( Ash.Type.Integer ) , do : :bigint
2021-01-22 09:32:26 +13:00
defp migration_type ( other ) , do : migration_type_from_storage_type ( Ash.Type . storage_type ( other ) )
defp migration_type_from_storage_type ( :string ) , do : :text
defp migration_type_from_storage_type ( storage_type ) , do : storage_type
2020-09-11 12:26:47 +12:00
defp foreign_key? ( relationship ) do
2021-02-23 17:53:18 +13:00
Ash.DataLayer . data_layer ( relationship . source ) == AshPostgres.DataLayer &&
2020-09-11 12:26:47 +12:00
AshPostgres . repo ( relationship . source ) == AshPostgres . repo ( relationship . destination )
end
defp identities ( resource ) do
resource
2021-02-23 17:53:18 +13:00
|> Ash.Resource.Info . identities ( )
2020-09-20 10:08:09 +12:00
|> case do
[ ] ->
[ ]
identities ->
2021-02-23 17:53:18 +13:00
base_filter = Ash.Resource.Info . base_filter ( resource )
2020-09-20 10:08:09 +12:00
if base_filter && ! AshPostgres . base_filter_sql ( resource ) do
raise """
Currently , ash_postgres cannot translate your base_filter #{inspect(base_filter)} into sql. You must provide the `base_filter_sql` option, or skip unique indexes with `skip_unique_indexes`"
"""
end
identities
end
|> Enum . reject ( fn identity ->
identity . name in AshPostgres . skip_unique_indexes? ( resource )
end )
2020-09-11 12:26:47 +12:00
|> Enum . filter ( fn identity ->
Enum . all? ( identity . keys , fn key ->
2021-02-23 17:53:18 +13:00
Ash.Resource.Info . attribute ( resource , key )
2020-09-11 12:26:47 +12:00
end )
end )
2020-09-20 10:08:09 +12:00
|> Enum . map ( fn identity ->
%{ identity | keys : Enum . sort ( identity . keys ) }
end )
2020-09-11 12:26:47 +12:00
|> Enum . sort_by ( & &1 . name )
|> Enum . map ( & Map . take ( &1 , [ :name , :keys ] ) )
2020-09-20 10:08:09 +12:00
|> Enum . map ( & Map . put ( &1 , :base_filter , AshPostgres . base_filter_sql ( resource ) ) )
2020-09-11 12:26:47 +12:00
end
2021-02-25 07:59:49 +13:00
@uuid_functions [ & Ash.UUID . generate / 0 , & Ecto.UUID . generate / 0 ]
2020-09-11 12:26:47 +12:00
defp default ( %{ default : default } , repo ) when is_function ( default ) do
cond do
default in @uuid_functions && " uuid-ossp " in ( repo . config ( ) [ :installed_extensions ] || [ ] ) ->
~S[ fragment("uuid_generate_v4()") ]
default == ( & DateTime . utc_now / 0 ) ->
~S[ fragment("now()") ]
true ->
" nil "
end
end
defp default ( %{ default : { _ , _ , _ } } , _ ) , do : " nil "
2020-10-29 15:26:45 +13:00
defp default ( %{ default : nil } , _ ) , do : " nil "
2020-09-11 12:26:47 +12:00
defp default ( %{ default : value , type : type } , _ ) do
case Ash.Type . dump_to_native ( type , value ) do
{ :ok , value } -> inspect ( value )
_ -> " nil "
end
end
defp snapshot_to_binary ( snapshot ) do
2021-01-27 09:07:26 +13:00
snapshot
|> Map . update! ( :attributes , fn attributes ->
Enum . map ( attributes , fn attribute ->
%{ attribute | type : sanitize_type ( attribute . type ) }
end )
end )
|> Jason . encode! ( pretty : true )
end
defp sanitize_type ( { :array , type } ) do
2021-04-05 08:09:11 +12:00
[ " array " , sanitize_type ( type ) ]
2021-01-27 09:07:26 +13:00
end
defp sanitize_type ( type ) do
type
2020-09-11 12:26:47 +12:00
end
2021-04-05 08:09:11 +12:00
defp load_snapshot ( json ) do
2020-09-11 12:26:47 +12:00
json
|> Jason . decode! ( keys : :atoms! )
2021-04-05 08:09:11 +12:00
|> sanitize_snapshot ( )
end
defp sanitize_snapshot ( snapshot ) do
snapshot
2021-03-30 02:26:58 +13:00
|> Map . put_new ( :has_create_action , true )
2020-09-11 12:26:47 +12:00
|> Map . update! ( :identities , fn identities ->
Enum . map ( identities , & load_identity / 1 )
end )
|> Map . update! ( :attributes , fn attributes ->
2021-04-05 08:09:11 +12:00
Enum . map ( attributes , & load_attribute ( &1 , snapshot . table ) )
2020-09-11 12:26:47 +12:00
end )
|> Map . update! ( :repo , & String . to_atom / 1 )
2020-10-29 15:26:45 +13:00
|> Map . put_new ( :multitenancy , %{
attribute : nil ,
strategy : nil ,
2021-04-05 08:09:11 +12:00
global : nil
2020-10-29 15:26:45 +13:00
} )
|> Map . update! ( :multitenancy , & load_multitenancy / 1 )
end
defp load_multitenancy ( multitenancy ) do
multitenancy
|> Map . update! ( :strategy , fn strategy -> strategy && String . to_atom ( strategy ) end )
|> Map . update! ( :attribute , fn attribute -> attribute && String . to_atom ( attribute ) end )
2020-09-11 12:26:47 +12:00
end
2021-04-01 19:19:30 +13:00
defp load_attribute ( attribute , table ) do
2020-09-11 12:26:47 +12:00
attribute
2021-01-27 09:07:26 +13:00
|> Map . update! ( :type , & load_type / 1 )
2020-09-11 12:26:47 +12:00
|> Map . update! ( :name , & String . to_atom / 1 )
2021-01-07 18:37:41 +13:00
|> Map . put_new ( :default , " nil " )
|> Map . update! ( :default , & ( &1 || " nil " ) )
2020-09-11 12:26:47 +12:00
|> Map . update! ( :references , fn
nil ->
nil
references ->
2020-10-29 15:26:45 +13:00
references
|> Map . update! ( :destination_field , & String . to_atom / 1 )
2021-04-01 19:19:30 +13:00
|> Map . put_new ( :on_delete , nil )
|> Map . put_new ( :on_update , nil )
2021-04-14 04:19:50 +12:00
|> Map . update! ( :on_delete , & ( &1 && String . to_atom ( &1 ) ) )
|> Map . update! ( :on_update , & ( &1 && String . to_atom ( &1 ) ) )
2021-04-14 04:38:39 +12:00
|> Map . put ( :name , Map . get ( references , :name ) || " #{ table } _ #{ attribute . name } _fkey " )
2020-10-29 15:26:45 +13:00
|> Map . put_new ( :multitenancy , %{
attribute : nil ,
strategy : nil ,
2021-04-05 08:09:11 +12:00
global : nil
2020-10-29 15:26:45 +13:00
} )
|> Map . update! ( :multitenancy , & load_multitenancy / 1 )
2020-09-11 12:26:47 +12:00
end )
end
2021-01-27 09:07:26 +13:00
defp load_type ( [ " array " , type ] ) do
{ :array , load_type ( type ) }
end
defp load_type ( type ) do
String . to_atom ( type )
end
2020-09-11 12:26:47 +12:00
defp load_identity ( identity ) do
identity
|> Map . update! ( :name , & String . to_atom / 1 )
|> Map . update! ( :keys , fn keys ->
2020-09-20 10:08:09 +12:00
keys
|> Enum . map ( & String . to_atom / 1 )
|> Enum . sort ( )
2020-09-11 12:26:47 +12:00
end )
2020-09-20 10:08:09 +12:00
|> Map . put_new ( :base_filter , nil )
2020-09-11 12:26:47 +12:00
end
end