This commit is contained in:
Zach Daniel 2024-09-03 16:23:39 -04:00
parent 7285b3382e
commit 7581d3740e
10 changed files with 1911 additions and 10 deletions

View file

@ -18,6 +18,30 @@ defmodule AshPostgres.Igniter do
"""
end
def table(igniter, resource) do
igniter
|> Spark.Igniter.get_option(resource, [:postgres, :table])
|> case do
{igniter, {:ok, value}} when is_binary(value) or is_nil(value) ->
{:ok, igniter, value}
_ ->
:error
end
end
def repo(igniter, resource) do
igniter
|> Spark.Igniter.get_option(resource, [:postgres, :repo])
|> case do
{igniter, {:ok, value}} when is_atom(value) ->
{:ok, igniter, value}
_ ->
:error
end
end
def add_postgres_extension(igniter, repo_name, extension) do
Igniter.Code.Module.find_and_update_module!(igniter, repo_name, fn zipper ->
case Igniter.Code.Function.move_to_def(zipper, :installed_extensions, 0) do

View file

@ -18,6 +18,7 @@ defmodule AshPostgres.MigrationGenerator do
format: true,
dry_run: false,
check: false,
snapshots_only: false,
dont_drop_columns: false
def generate(domains, opts \\ []) do

View file

@ -0,0 +1,113 @@
defmodule Mix.Tasks.AshPostgres.Gen.Resources do
use Igniter.Mix.Task
@shortdoc "Generates or updates resources based on a database schema"
@doc """
#{@shortdoc}
## Options
- `repo`, `r` - The repo or repos to generate resources for, comma separated. Can be specified multiple times. Defaults to all repos.
- `tables`, `t` - The tables to generate resources for, comma separated. Can be specified multiple times. Defaults to all tables non-`_*` tables
- `skip-tables`, `s` - The tables to skip generating resources for, comma separated. Can be specified multiple times.
- `snapshots-only`, `n` - Only generate snapshots for the generated resources, and not migraitons.
- `domains` , 'd` - The domain to generate resources inside of. See the section on domains for more.
"""
@impl Igniter.Mix.Task
def info(_argv, _parent) do
%Igniter.Mix.Task.Info{
positional: [],
schema: [
repo: :keep,
tables: :keep,
skip_tables: :keep,
snapshots_only: :boolean,
domain: :keep
],
aliases: [
t: :tables,
r: :repo,
d: :domain,
s: :skip_tables,
n: :snapshots_only
]
}
end
@impl Igniter.Mix.Task
def igniter(igniter, argv) do
Mix.Task.run("compile")
options = options!(argv)
repos =
options[:repo] ||
Mix.Project.config()[:app]
|> Application.get_env(:ecto_repos, [])
case repos do
[] ->
igniter
|> Igniter.add_warning("No ecto repos configured.")
repos ->
Mix.shell().info("Generating resources from #{inspect(repos)}")
prompt =
"""
Would you like to generate migrations for the current structure? (recommended)
If #{IO.ANSI.green()}yes#{IO.ANSI.reset()}:
We will generate migrations based on the generated resources.
You should then change your database name in your config, and
run `mix ash.setup`.
If you already have ecto migrations you'd like to use, run
this command with `--snapshots-only`, in which case only resource
snapshots will be generated.
#{IO.ANSI.green()}
Going forward, your resources will be the source of truth.#{IO.ANSI.reset()}
#{IO.ANSI.red()}
*WARNING*
If you run `mix ash.reset` after this command without updating
your config, you will be *deleting the database you just used to
generate these resources*!#{IO.ANSI.reset()}
If #{IO.ANSI.red()}no#{IO.ANSI.reset()}:
We will not generate any migrations. This means you have migrations already that
can get you from zero to the current starting point.
#{IO.ANSI.yellow()}
You will have to hand-write migrations from this point on.#{IO.ANSI.reset()}
"""
options =
if Mix.shell().yes?(prompt) do
Keyword.put(options, :no_migrations, false)
else
Keyword.put(options, :no_migrations, true)
end
migration_opts =
if options[:snapshots_only] do
["--snapshots-only"]
else
[]
end
igniter
|> AshPostgres.ResourceGenerator.generate(repos, options)
|> then(fn igniter ->
if options[:no_migrations] do
igniter
else
Igniter.add_task(igniter, "ash_postgres.generate_migrations", migration_opts)
end
end)
end
end
end

View file

@ -21,6 +21,7 @@ defmodule Mix.Tasks.AshPostgres.GenerateMigrations do
* `no-format` - files that are created will not be formatted with the code formatter
* `dry-run` - no files are created, instead the new migration is printed
* `check` - no files are created, returns an exit(1) code if the current snapshots and resources don't fit
* `snapshots-only` - no migrations are generated, only snapshots are stored
#### Snapshots
@ -90,6 +91,7 @@ defmodule Mix.Tasks.AshPostgres.GenerateMigrations do
migration_path: :string,
tenant_migration_path: :string,
quiet: :boolean,
snapshots_only: :boolean,
name: :string,
no_format: :boolean,
dry_run: :boolean,
@ -100,7 +102,7 @@ defmodule Mix.Tasks.AshPostgres.GenerateMigrations do
domains = AshPostgres.Mix.Helpers.domains!(opts, args, false)
if Enum.empty?(domains) do
if Enum.empty?(domains) && !opts[:snapshots_only] do
IO.warn("""
No domains found, so no resource-related migrations will be generated.
Pass the `--domains` option or configure `config :your_app, ash_domains: [...]`
@ -113,7 +115,7 @@ defmodule Mix.Tasks.AshPostgres.GenerateMigrations do
|> Keyword.delete(:no_format)
|> Keyword.put_new(:name, name)
if !opts[:name] && !opts[:dry_run] && !opts[:check] do
if !opts[:name] && !opts[:dry_run] && !opts[:check] && !opts[:snapshots_only] do
IO.warn("""
Name must be provided when generating migrations, unless `--dry-run` or `--check` is also provided.
Using an autogenerated name will be deprecated in a future release.

View file

@ -0,0 +1,667 @@
defmodule AshPostgres.ResourceGenerator do
alias AshPostgres.ResourceGenerator.Spec
require Logger
def generate(igniter, repos, opts \\ []) do
{igniter, resources} = Ash.Resource.Igniter.list_resources(igniter)
resources =
Task.async_stream(resources, fn resource ->
{resource, AshPostgres.Igniter.repo(igniter, resource),
AshPostgres.Igniter.table(igniter, resource)}
end)
|> Enum.map(fn {:ok, {resource, repo, table}} ->
repo =
case repo do
{:ok, _igniter, repo} -> repo
_ -> nil
end
table =
case table do
{:ok, _igniter, table} -> table
_ -> nil
end
{resource, repo, table}
end)
igniter = Igniter.include_all_elixir_files(igniter)
opts =
if opts[:tables] do
Keyword.put(
opts,
:tables,
opts[:tables]
|> List.wrap()
|> Enum.join(",")
|> String.split(",")
)
else
opts
end
opts =
if opts[:skip_tables] do
Keyword.put(
opts,
:skip_tables,
opts[:skip_tables]
|> List.wrap()
|> Enum.join(",")
|> String.split(",")
)
else
opts
end
specs =
repos
|> Enum.flat_map(&Spec.tables(&1, skip_tables: opts[:skip_tables], tables: opts[:tables]))
|> Enum.map(&determine_resource(&1, resources))
|> Enum.group_by(& &1.resource)
|> Enum.map(fn
{_resource, [single]} ->
single
{resource, specs} ->
raise """
Duplicate resource names detected across multiple repos: #{inspect(resource)}
To address this, define `table_to_resource/1` in one of the following repo modules:application
#{inspect(Enum.map(specs, & &1.repo))}
For example:
# override for one table
def table_to_resource("#{Enum.at(specs, 0).table_name}"), do: My.Resource.Name
# delegate to default otherwise
def table_to_resource(table), do: super(table)
"""
end)
|> Spec.add_relationships(resources)
Enum.reduce(specs, igniter, fn table_spec, igniter ->
table_to_resource(igniter, table_spec, resources, opts)
end)
end
defp table_to_resource(
igniter,
%AshPostgres.ResourceGenerator.Spec{} = table_spec,
_resources,
opts
) do
no_migrate_flag =
if opts[:no_migrations] do
"migrate? false"
end
resource =
"""
use Ash.Resource,
data_layer: AshPostgres.DataLayer
postgres do
table #{inspect(table_spec.table_name)}
repo #{inspect(table_spec.repo)}
#{no_migrate_flag}
#{references(table_spec, opts[:no_migrations])}
#{custom_indexes(table_spec, opts[:no_migrations])}
#{check_constraints(table_spec, opts[:no_migrations])}
#{skip_unique_indexes(table_spec)}
#{identity_index_names(table_spec)}
end
attributes do
#{attributes(table_spec)}
end
"""
|> add_identities(table_spec)
|> add_relationships(table_spec)
igniter
|> Igniter.Code.Module.create_module(table_spec.resource, resource)
end
defp check_constraints(%{check_constraints: _check_constraints}, true) do
""
end
defp check_constraints(%{check_constraints: []}, _) do
""
end
defp check_constraints(%{check_constraints: check_constraints}, _) do
IO.inspect(check_constraints)
check_constraints =
Enum.map_join(check_constraints, "\n", fn check_constraint ->
"""
check_constraint :#{check_constraint.column}, "#{check_constraint.name}", check: "#{check_constraint.expression}", message: "is invalid"
"""
end)
"""
check_constraints do
#{check_constraints}
end
"""
end
defp skip_unique_indexes(%{indexes: indexes}) do
indexes
|> Enum.filter(& &1.unique?)
|> Enum.filter(fn %{columns: columns} ->
Enum.all?(columns, &Regex.match?(~r/^[0-9a-zA-Z_]+$/, &1))
end)
|> Enum.reject(&index_as_identity?/1)
|> case do
[] ->
""
indexes ->
"""
skip_unique_indexes [#{Enum.map_join(indexes, ",", &":#{&1.name}")}]
"""
end
end
defp identity_index_names(%{indexes: indexes}) do
indexes
|> Enum.filter(& &1.unique?)
|> Enum.filter(fn %{columns: columns} ->
Enum.all?(columns, &Regex.match?(~r/^[0-9a-zA-Z_]+$/, &1))
end)
|> case do
[] ->
[]
indexes ->
indexes
|> Enum.map_join(", ", fn index ->
"#{index.name}: \"#{index.name}\""
end)
|> then(&"identity_index_names [#{&1}]")
end
end
defp add_identities(str, %{indexes: indexes}) do
indexes
|> Enum.filter(& &1.unique?)
|> Enum.filter(fn %{columns: columns} ->
Enum.all?(columns, &Regex.match?(~r/^[0-9a-zA-Z_]+$/, &1))
end)
|> Enum.map(fn index ->
name = index.name
fields = "[" <> Enum.map_join(index.columns, ", ", &":#{&1}") <> "]"
case identity_options(index) do
"" ->
"identity :#{name}, #{fields}"
options ->
"""
identity :#{name}, #{fields} do
#{options}
end
"""
end
end)
|> case do
[] ->
str
identities ->
"""
#{str}
identities do
#{Enum.join(identities, "\n")}
end
"""
end
end
defp identity_options(index) do
""
|> add_identity_where(index)
|> add_nils_distinct?(index)
end
defp add_identity_where(str, %{where_clause: nil}), do: str
defp add_identity_where(str, %{name: name, where_clause: where_clause}) do
Logger.warning("""
Index #{name} has been left commented out in its resource
Manual conversion of `#{where_clause}` to an Ash expression is required.
""")
"""
#{str}
# Express `#{where_clause}` as an Ash expression
# where expr(...)
"""
end
defp add_nils_distinct?(str, %{nils_distinct?: false}) do
"#{str}\n nils_distinct? false"
end
defp add_nils_distinct?(str, _), do: str
defp add_relationships(str, %{relationships: []}) do
str
end
defp add_relationships(str, %{relationships: relationships} = spec) do
relationships
|> Enum.map_join("\n", fn relationship ->
case relationship_options(spec, relationship) do
"" ->
"#{relationship.type} :#{relationship.name}, #{inspect(relationship.destination)}"
options ->
"""
#{relationship.type} :#{relationship.name}, #{inspect(relationship.destination)} do
#{options}
end
"""
end
end)
|> then(fn rels ->
"""
#{str}
relationships do
#{rels}
end
"""
end)
end
defp relationship_options(spec, %{type: :belongs_to} = rel) do
case Enum.find(spec.attributes, fn attribute ->
attribute.name == rel.source_attribute
end) do
%{
default: default,
generated?: generated?,
source: source,
name: name
}
when not is_nil(default) or generated? or source != name ->
"define_attribute? false"
|> add_destination_attribute(rel, "id")
|> add_source_attribute(rel, "#{rel.name}_id")
|> add_allow_nil(rel)
|> add_filter(rel)
attribute ->
""
|> add_destination_attribute(rel, "id")
|> add_source_attribute(rel, "#{rel.name}_id")
|> add_allow_nil(rel)
|> add_primary_key(attribute.primary_key?)
|> add_attribute_type(attribute)
|> add_filter(rel)
end
end
defp relationship_options(_spec, rel) do
default_destination_attribute =
rel.source
|> Module.split()
|> List.last()
|> Macro.underscore()
|> Kernel.<>("_id")
""
|> add_destination_attribute(rel, default_destination_attribute)
|> add_source_attribute(rel, "id")
|> add_filter(rel)
end
defp add_filter(str, %{match_with: []}), do: str
defp add_filter(str, %{match_with: match_with}) do
filter =
Enum.map_join(match_with, " and ", fn {source, dest} ->
"parent(#{source}) == #{dest}"
end)
"#{str}\n filter expr(#{filter})"
end
defp add_attribute_type(str, %{attr_type: :uuid}), do: str
defp add_attribute_type(str, %{attr_type: attr_type}) do
"#{str}\n attribute_type :#{attr_type}"
end
defp add_destination_attribute(str, rel, default) do
if rel.destination_attribute == default do
str
else
"#{str}\n destination_attribute :#{rel.destination_attribute}"
end
end
defp add_source_attribute(str, rel, default) do
if rel.source_attribute == default do
str
else
"#{str}\n source_attribute :#{rel.source_attribute}"
end
end
defp references(_table_spec, true) do
""
end
defp references(table_spec, _) do
table_spec.foreign_keys
|> Enum.flat_map(fn %Spec.ForeignKey{} = foreign_key ->
default_name = "#{table_spec.table_name}_#{foreign_key.column}_fkey"
if default_name == foreign_key.constraint_name and
foreign_key.on_update == "NO ACTION" and
foreign_key.on_delete == "NO ACTION" and
foreign_key.match_type in ["SIMPLE", "NONE"] do
[]
else
relationship =
Enum.find(table_spec.relationships, fn relationship ->
relationship.type == :belongs_to and
relationship.constraint_name == foreign_key.constraint_name
end).name
options =
""
|> add_on(:update, foreign_key.on_update)
|> add_on(:delete, foreign_key.on_delete)
|> add_match_with(foreign_key.match_with)
|> add_match_type(foreign_key.match_type)
[
"""
reference :#{relationship} do
#{options}
end
"""
]
end
|> Enum.join("\n")
|> String.trim()
|> then(
&[
"""
references do
#{&1}
end
"""
]
)
end)
end
defp add_match_with(str, empty) when empty in [[], nil], do: str
defp add_match_with(str, keyval),
do: str <> "\nmatch_with [#{Enum.map_join(keyval, fn {key, val} -> "#{key}: :#{val}" end)}]"
defp add_match_type(str, type) when type in ["SIMPLE", "NONE"], do: str
defp add_match_type(str, "FULL"), do: str <> "\nmatch_type :full"
defp add_match_type(str, "PARTIAL"), do: str <> "\nmatch_type :partial"
defp add_on(str, type, "RESTRICT"), do: str <> "\non_#{type} :restrict"
defp add_on(str, type, "CASCADE"), do: str <> "\non_#{type} :#{type}"
defp add_on(str, type, "SET NULL"), do: str <> "\non_#{type} :nilify"
defp add_on(str, _type, _), do: str
defp custom_indexes(table_spec, true) do
table_spec.indexes
|> Enum.reject(fn index ->
!index.unique? || (&index_as_identity?/1)
end)
|> Enum.reject(fn index ->
Enum.any?(index.columns, &String.contains?(&1, "("))
end)
|> case do
[] ->
""
indexes ->
indexes
|> Enum.map_join(", ", fn %{index: name, columns: columns} ->
columns = Enum.map_join(columns, ", ", &":#{&1}")
"{[#{columns}], #{inspect(name)}}"
end)
|> then(fn index_names ->
"unique_index_names [#{index_names}]"
end)
end
end
defp custom_indexes(table_spec, _) do
table_spec.indexes
|> Enum.reject(&index_as_identity?/1)
|> case do
[] ->
""
indexes ->
indexes
|> Enum.map_join("\n", fn index ->
columns =
index.columns
|> Enum.map_join(", ", fn thing ->
if String.contains?(thing, "(") do
inspect(thing)
else
":#{thing}"
end
end)
case index_options(table_spec, index) do
"" ->
"index [#{columns}]"
options ->
"""
index [#{columns}] do
#{options}
end
"""
end
end)
|> then(fn indexes ->
"""
custom_indexes do
#{indexes}
end
"""
end)
end
end
defp index_as_identity?(index) do
is_nil(index.where_clause) and index.using == "btree" and index.include in [nil, []] and
Enum.all?(index.columns, &Regex.match?(~r/^[0-9a-zA-Z_]+$/, &1))
end
defp index_options(spec, index) do
default_name =
if Enum.all?(index.columns, &Regex.match?(~r/^[0-9a-zA-Z_]+$/, &1)) do
AshPostgres.CustomIndex.name(spec.table_name, %{fields: index.columns})
end
""
|> add_index_name(index.name, default_name)
|> add_unique(index.unique?)
|> add_using(index.using)
|> add_where(index.where_clause)
|> add_include(index.include)
|> add_nulls_distinct(index.nulls_distinct)
end
defp add_index_name(str, default, default), do: str
defp add_index_name(str, name, _), do: str <> "\nname #{inspect(name)}"
defp add_unique(str, false), do: str
defp add_unique(str, true), do: str <> "\nunique true"
defp add_nulls_distinct(str, true), do: str
defp add_nulls_distinct(str, false), do: str <> "\nnulls_distinct false"
defp add_using(str, "btree"), do: str
defp add_using(str, using), do: str <> "\nusing #{inspect(using)}"
defp add_where(str, empty) when empty in [nil, ""], do: str
defp add_where(str, where), do: str <> "\nwhere #{inspect(where)}"
defp add_include(str, empty) when empty in [nil, []], do: str
defp add_include(str, include),
do: str <> "\ninclude [#{Enum.map_join(include, ", ", &inspect/1)}]"
defp attributes(table_spec) do
table_spec.attributes
|> Enum.split_with(& &1.default)
|> then(fn {l, r} -> r ++ l end)
|> Enum.split_with(& &1.primary_key?)
|> then(fn {l, r} -> l ++ r end)
|> Enum.filter(fn attribute ->
if not is_nil(attribute.default) or !!attribute.generated? or
attribute.source != attribute.name do
true
else
not Enum.any?(table_spec.relationships, fn relationship ->
relationship.type == :belongs_to and relationship.source_attribute == attribute.name
end)
end
end)
|> Enum.map_join("\n", &attribute(&1))
end
defp attribute(attribute) do
now_default = &DateTime.utc_now/0
uuid_default = &Ash.UUID.generate/0
{constructor, attribute, type?, type_option?} =
case attribute do
%{name: "updated_at", attr_type: attr_type} ->
{"update_timestamp", %{attribute | default: nil, generated?: false}, false,
attr_type != :utc_datetime_usec}
%{default: default, attr_type: attr_type}
when default == now_default ->
{"create_timestamp", %{attribute | default: nil, generated?: false}, false,
attr_type != :utc_datetime_usec}
%{default: default, attr_type: attr_type, primary_key?: true}
when default == uuid_default ->
{"uuid_primary_key",
%{attribute | default: nil, primary_key?: false, generated?: false, allow_nil?: true},
false, attr_type != :uuid}
_ ->
{"attribute", attribute, true, false}
end
case String.trim(options(attribute, type_option?)) do
"" ->
if type? do
"#{constructor} :#{attribute.name}, #{inspect(attribute.attr_type)}"
else
"#{constructor} :#{attribute.name}"
end
options ->
if type? do
"""
#{constructor} :#{attribute.name}, #{inspect(attribute.attr_type)} do
#{options}
end
"""
else
"""
#{constructor} :#{attribute.name} do
#{options}
end
"""
end
end
end
defp options(attribute, type_option?) do
""
|> add_primary_key(attribute)
|> add_allow_nil(attribute)
|> add_sensitive(attribute)
|> add_default(attribute)
|> add_type(attribute, type_option?)
|> add_generated(attribute)
|> add_source(attribute)
end
defp add_type(str, %{attr_type: attr_type}, true) do
str <> "\n type #{inspect(attr_type)}"
end
defp add_type(str, _, _), do: str
defp add_generated(str, %{generated?: true}) do
str <> "\n generated? true"
end
defp add_generated(str, _), do: str
defp add_source(str, %{name: name, source: source}) when name != source do
str <> "\n source :#{source}"
end
defp add_source(str, _), do: str
defp add_primary_key(str, %{primary_key?: true}) do
str <> "\n primary_key? true"
end
defp add_primary_key(str, _), do: str
defp add_allow_nil(str, %{allow_nil?: false}) do
str <> "\n allow_nil? false"
end
defp add_allow_nil(str, _), do: str
defp add_sensitive(str, %{sensitive?: true}) do
str <> "\n sensitive? true"
end
defp add_sensitive(str, _), do: str
defp add_default(str, %{default: default}) when not is_nil(default) do
str <> "\n default #{inspect(default)}"
end
defp add_default(str, _), do: str
defp determine_resource(%Spec{} = table_spec, resources) do
case Enum.find(resources, fn {_resource, repo, table} ->
table == table_spec.table_name && repo == table_spec.repo
end) do
nil ->
resource = table_spec.repo.table_to_resource(table_spec.table_name)
%{table_spec | resource: resource}
resource ->
%{table_spec | resource: resource}
end
end
end

View file

@ -0,0 +1,73 @@
defmodule AshPostgres.ResourceGenerator.SensitiveData do
# I got this from ChatGPT, but this is a best effort transformation
# anyway.
@sensitive_patterns [
# Password-related
~r/password/i,
~r/passwd/i,
~r/pass/i,
~r/pwd/i,
~r/hash(ed)?(_password)?/i,
# Authentication-related
~r/auth(_key)?/i,
~r/token/i,
~r/secret(_key)?/i,
~r/api_key/i,
# Personal Information
~r/ssn/i,
~r/social(_security)?(_number)?/i,
~r/(credit_?card|cc)(_number)?/i,
~r/passport(_number)?/i,
~r/driver_?licen(s|c)e(_number)?/i,
~r/national_id/i,
# Financial Information
~r/account(_number)?/i,
~r/routing(_number)?/i,
~r/iban/i,
~r/swift(_code)?/i,
~r/tax_id/i,
# Contact Information
~r/phone(_number)?/i,
~r/email(_address)?/i,
~r/address/i,
# Health Information
~r/medical(_record)?/i,
~r/health(_data)?/i,
~r/diagnosis/i,
~r/treatment/i,
# Biometric Data
~r/fingerprint/i,
~r/retina_scan/i,
~r/face_id/i,
~r/dna/i,
# Encrypted or Encoded Data
~r/encrypt(ed)?/i,
~r/encoded/i,
~r/cipher/i,
# Other Potentially Sensitive Data
~r/private(_key)?/i,
~r/confidential/i,
~r/restricted/i,
~r/sensitive/i,
# General patterns
~r/.*_salt/i,
~r/.*_secret/i,
~r/.*_key/i,
~r/.*_token/i
]
def is_sensitive?(column_name) do
Enum.any?(@sensitive_patterns, fn pattern ->
Regex.match?(pattern, column_name)
end)
end
end

View file

@ -0,0 +1,983 @@
defmodule AshPostgres.ResourceGenerator.Spec do
require Logger
defstruct [
:attributes,
:table_name,
:repo,
:resource,
check_constraints: [],
foreign_keys: [],
indexes: [],
identities: [],
relationships: []
]
defmodule Attribute do
defstruct [
:name,
:type,
:attr_type,
:default,
:migration_default,
:size,
:source,
generated?: false,
primary_key?: false,
sensitive?: false,
allow_nil?: true
]
end
defmodule ForeignKey do
defstruct [
:constraint_name,
:match_type,
:column,
:references,
:destination_field,
:on_delete,
:on_update,
:match_with
]
end
defmodule Index do
defstruct [:name, :columns, :unique?, :nulls_distinct, :where_clause, :using, :include]
end
defmodule CheckConstraint do
defstruct [:name, :column, :expression]
end
defmodule Relationship do
defstruct [
:name,
:type,
:destination,
:match_with,
:source,
:source_attribute,
:constraint_name,
:destination_attribute,
:allow_nil?,
:foreign_key
]
end
def tables(repo, opts \\ []) do
{:ok, result, _} =
Ecto.Migrator.with_repo(repo, fn repo ->
repo
|> table_specs(opts)
|> verify_found_tables(opts)
|> Enum.group_by(&Enum.at(&1, 0), fn [_, field, type, default, size, allow_nil?] ->
name = Macro.underscore(field)
%Attribute{
name: name,
source: field,
type: type,
migration_default: default,
size: size,
allow_nil?: allow_nil?
}
end)
|> Enum.map(fn {table_name, attributes} ->
attributes = build_attributes(attributes, table_name, repo)
%__MODULE__{table_name: table_name, repo: repo, attributes: attributes}
end)
|> Enum.map(fn spec ->
spec
|> add_foreign_keys()
|> add_indexes()
|> add_check_constraints()
end)
end)
result
end
defp add_foreign_keys(spec) do
%Postgrex.Result{rows: fkey_rows} =
spec.repo.query!(
"""
SELECT
tc.constraint_name,
rc.match_option AS match_type,
rc.update_rule AS on_update,
rc.delete_rule AS on_delete,
array_agg(DISTINCT kcu.column_name) AS referencing_columns,
array_agg(DISTINCT ccu.column_name) AS referenced_columns,
ccu.table_name AS foreign_table_name
FROM
information_schema.table_constraints AS tc
JOIN information_schema.key_column_usage AS kcu
ON tc.constraint_name = kcu.constraint_name
AND tc.table_schema = kcu.table_schema
JOIN information_schema.constraint_column_usage AS ccu
ON ccu.constraint_name = tc.constraint_name
AND ccu.table_schema = tc.table_schema
JOIN information_schema.referential_constraints AS rc
ON tc.constraint_name = rc.constraint_name
AND tc.table_schema = rc.constraint_schema
WHERE
tc.constraint_type = 'FOREIGN KEY'
AND tc.table_name = $1
GROUP BY
tc.constraint_name,
ccu.table_name,
rc.match_option,
rc.update_rule,
rc.delete_rule;
""",
[spec.table_name],
log: false
)
%{
spec
| foreign_keys:
Enum.map(
fkey_rows,
fn [
constraint_name,
match_type,
on_update,
on_delete,
referencing_columns,
referenced_columns,
destination
] ->
{[column_name], match_with_source} =
Enum.split(referencing_columns, 1)
{[destination_field], match_with_destination} =
Enum.split(referenced_columns, 1)
%ForeignKey{
constraint_name: constraint_name,
column: column_name,
references: destination,
destination_field: destination_field,
on_delete: on_delete,
on_update: on_update,
match_type: match_type,
match_with: Enum.zip(match_with_source, match_with_destination)
}
end
)
}
end
defp add_check_constraints(spec) do
%Postgrex.Result{rows: check_constraint_rows} =
spec.repo.query!(
"""
SELECT
conname AS constraint_name,
pg_get_constraintdef(oid) AS constraint_definition
FROM
pg_constraint
WHERE
contype = 'c'
AND conrelid::regclass::text = $1
""",
[spec.table_name]
)
attribute = Enum.find(spec.attributes, & &1.primary_key?) || Enum.at(spec.attributes, 0)
spec
|> Map.put(
:check_constraints,
Enum.flat_map(check_constraint_rows, fn
[name, "CHECK " <> expr] ->
[
%CheckConstraint{
name: name,
column: attribute.source,
expression: expr
}
]
_ ->
[]
end)
|> IO.inspect()
)
end
defp add_indexes(spec) do
%Postgrex.Result{rows: index_rows} =
spec.repo.query!(
"""
SELECT
i.relname AS index_name,
ix.indisunique AS is_unique,
NOT(ix.indnullsnotdistinct) AS nulls_distinct,
pg_get_expr(ix.indpred, ix.indrelid) AS where_clause,
am.amname AS using_method,
idx.indexdef
FROM
pg_index ix
JOIN
pg_class i ON ix.indexrelid = i.oid
JOIN
pg_class t ON ix.indrelid = t.oid
JOIN
pg_am am ON i.relam = am.oid
LEFT JOIN
pg_constraint c ON c.conindid = ix.indexrelid AND c.contype = 'p'
JOIN
pg_indexes idx ON idx.indexname = i.relname AND idx.schemaname = 'public' -- Adjust schema name if necessary
WHERE
t.relname = $1
AND c.conindid IS NULL
GROUP BY
i.relname, ix.indisunique, ix.indnullsnotdistinct, pg_get_expr(ix.indpred, ix.indrelid), am.amname, idx.indexdef;
""",
[spec.table_name],
log: false
)
%{
spec
| indexes:
index_rows
|> Enum.flat_map(fn [
index_name,
is_unique,
nulls_distinct,
where_clause,
using,
index_def
] ->
index_name = String.slice(index_name, 0..63)
case parse_columns_from_index_def(index_def, using) do
{:ok, columns} ->
include =
case String.split(index_def, "INCLUDE ") do
[_, included_cols] ->
try do
parse_columns(included_cols)
catch
:error ->
Logger.warning(
"Failed to parse includs from index definition: #{index_def}"
)
nil
end
_ ->
nil
end
[
%Index{
name: index_name,
columns: Enum.uniq(columns),
unique?: is_unique,
include: include,
using: using,
nulls_distinct: nulls_distinct,
where_clause: where_clause
}
]
:error ->
Logger.warning("Failed to parse index definition: #{index_def}")
[]
end
end)
}
end
# CREATE INDEX users_lower_email_idx ON public.users USING btree (lower((email)::text))
# CREATE INDEX unique_email_com3 ON public.users USING btree (email, id) WHERE (email ~~ '%.com'::citext)
defp parse_columns_from_index_def(string, using) do
string
|> String.trim_leading("CREATE ")
|> String.trim_leading("UNIQUE ")
|> String.trim_leading("INDEX ")
|> String.replace(~r/^[a-zA-Z0-9_\.]+\s/, "")
|> String.trim_leading("ON ")
|> String.replace(~r/^[\S]+/, "")
|> String.trim_leading()
|> String.trim_leading("USING #{using} ")
|> do_parse_columns()
|> then(&{:ok, &1})
catch
:error -> :error
end
def parse_columns(char) do
do_parse_columns(char)
end
defp do_parse_columns(char, state \\ [], field \\ "", acc \\ [])
defp do_parse_columns("(" <> rest, [], field, acc) do
do_parse_columns(rest, [:outer], field, acc)
end
defp do_parse_columns(")" <> _rest, [:outer], field, acc) do
if field == "" do
Enum.reverse(acc)
else
Enum.reverse([field | acc])
end
end
defp do_parse_columns("(" <> rest, [:outer], field, acc) do
do_parse_columns(rest, [:in_paren, :in_field, :outer], field, acc)
end
defp do_parse_columns(", " <> rest, [:in_field, :outer], field, acc) do
do_parse_columns(rest, [:in_field, :outer], "", [field | acc])
end
defp do_parse_columns(<<str::binary-size(1)>> <> rest, [:outer], field, acc) do
do_parse_columns(rest, [:in_field, :outer], field <> str, acc)
end
defp do_parse_columns("''" <> rest, [:in_quote | stack], field, acc) do
do_parse_columns(rest, [:in_quote | stack], field <> "'", acc)
end
defp do_parse_columns("'" <> rest, [:in_quote | stack], field, acc) do
do_parse_columns(rest, stack, field <> "'", acc)
end
defp do_parse_columns(<<str::binary-size(1)>> <> rest, [:in_quote | stack], field, acc) do
do_parse_columns(rest, [:in_quote | stack], field <> str, acc)
end
defp do_parse_columns("'" <> rest, stack, field, acc) do
do_parse_columns(rest, [:in_quote | stack], field <> "'", acc)
end
defp do_parse_columns("(" <> rest, stack, field, acc) do
do_parse_columns(rest, [:in_paren | stack], field <> "(", acc)
end
defp do_parse_columns(")" <> rest, [:in_paren | stack], field, acc) do
do_parse_columns(rest, stack, field <> ")", acc)
end
defp do_parse_columns("), " <> rest, [:in_field | stack], field, acc) do
do_parse_columns(rest, [:in_field | stack], "", [field | acc])
end
defp do_parse_columns(")" <> _rest, [:in_field | _stack], field, acc) do
Enum.reverse([field | acc])
end
defp do_parse_columns(<<str::binary-size(1)>> <> rest, [:in_paren | stack], field, acc) do
do_parse_columns(rest, [:in_paren | stack], field <> str, acc)
end
defp do_parse_columns(<<str::binary-size(1)>> <> rest, [:outer], field, acc) do
do_parse_columns(rest, [:in_field, :outer], field <> str, acc)
end
defp do_parse_columns(<<str::binary-size(1)>> <> rest, [:in_field | stack], field, acc) do
do_parse_columns(rest, [:in_field | stack], field <> str, acc)
end
defp do_parse_columns(", " <> rest, [:in_field | stack], field, acc) do
do_parse_columns(rest, stack, "", [field | acc])
end
defp do_parse_columns(")" <> _rest, [:outer], field, acc) do
Enum.reverse([field | acc])
end
defp do_parse_columns("", [:in_field | _stack], field, acc) do
Enum.reverse([field | acc])
end
defp do_parse_columns("", [:outer], field, acc) do
if field == "" do
Enum.reverse(acc)
else
Enum.reverse([field | acc])
end
end
defp do_parse_columns(other, stack, field, acc) do
raise "Unexpected character: #{inspect(other)} at #{inspect(stack)} with #{inspect(field)} - #{inspect(acc)}"
end
defp verify_found_tables(specs, opts) do
if opts[:tables] do
not_found =
Enum.reject(opts[:tables], fn table ->
Enum.any?(specs, fn spec ->
Enum.at(spec, 0) == table
end)
end)
case not_found do
[] ->
specs
tables ->
raise "The following tables did not exist: #{inspect(tables)}"
end
else
specs
end
end
defp build_attributes(attributes, table_name, repo) do
attributes
|> set_primary_key(table_name, repo)
|> set_sensitive()
|> set_types()
|> set_defaults_and_generated()
end
defp set_defaults_and_generated(attributes) do
Enum.map(attributes, fn attribute ->
attribute =
if attribute.migration_default do
%{attribute | generated?: true}
else
attribute
end
case attribute do
%{migration_default: nil} ->
attribute
%{migration_default: "CURRENT_TIMESTAMP"} ->
%{attribute | default: &DateTime.utc_now/0}
%{migration_default: "now()"} ->
%{attribute | default: &DateTime.utc_now/0}
%{migration_default: "(now() AT TIME ZONE 'utc'::text)"} ->
%{attribute | default: &DateTime.utc_now/0}
%{migration_default: "gen_random_uuid()"} ->
%{attribute | default: &Ash.UUID.generate/0}
%{migration_default: "uuid_generate_v4()"} ->
%{attribute | default: &Ash.UUID.generate/0}
%{attr_type: :integer, migration_default: value} ->
case Integer.parse(value) do
{value, ""} ->
%{attribute | default: value}
_ ->
attribute
end
%{attr_type: :decimal, migration_default: value} ->
case Decimal.parse(value) do
{value, ""} ->
%{attribute | default: Decimal.new(value)}
_ ->
attribute
end
%{attr_type: :map, migration_default: value} ->
case Jason.decode(String.trim_trailing(value, "::json")) do
{:ok, value} ->
%{attribute | default: value}
_ ->
attribute
end
%{attr_type: type, migration_default: "'" <> value}
when type in [:string, :ci_string, :atom] ->
case String.trim_trailing(value, "'::text") do
^value ->
attribute
trimmed ->
# This is very likely too naive
attribute = %{attribute | default: String.replace(trimmed, "''", "'")}
if type == :atom do
%{attribute | default: String.to_atom(attribute.default)}
else
attribute
end
end
_ ->
attribute
end
end)
end
def add_relationships(specs, resources) do
specs
|> Enum.group_by(& &1.repo)
|> Enum.flat_map(fn {repo, specs} ->
do_add_relationships(
specs,
Enum.flat_map(resources, fn {resource, resource_repo, table} ->
if resource_repo == repo do
[{resource, table}]
else
[]
end
end)
)
end)
end
defp do_add_relationships(specs, resources) do
specs =
Enum.map(specs, fn spec ->
belongs_to_relationships =
Enum.flat_map(
spec.foreign_keys,
fn %ForeignKey{
constraint_name: constraint_name,
column: column_name,
references: references,
destination_field: destination_field,
match_with: match_with
} ->
case find_destination_and_field(
specs,
spec,
references,
destination_field,
resources,
match_with
) do
nil ->
[]
{destination, destination_attribute, match_with} ->
source_attr =
Enum.find(spec.attributes, fn attribute ->
attribute.source == column_name
end)
[
%Relationship{
type: :belongs_to,
name: Inflex.singularize(references),
source: spec.resource,
constraint_name: constraint_name,
match_with: match_with,
destination: destination,
source_attribute: source_attr.name,
allow_nil?: source_attr.allow_nil?,
destination_attribute: destination_attribute
}
]
end
end
)
|> Enum.group_by(& &1.name)
|> Enum.flat_map(fn
{_name, [relationship]} ->
[relationship]
{name, relationships} ->
name_all_relationships(:belongs_to, spec, name, relationships)
end)
%{spec | relationships: belongs_to_relationships}
end)
Enum.map(specs, fn spec ->
relationships_to_me =
Enum.flat_map(specs, fn other_spec ->
Enum.flat_map(other_spec.relationships, fn relationship ->
if relationship.destination == spec.resource do
[{other_spec.table_name, other_spec.resource, relationship}]
else
[]
end
end)
end)
|> Enum.map(fn {table, resource, relationship} ->
destination_field =
Enum.find(spec.attributes, fn attribute ->
attribute.name == relationship.destination_attribute
end).source
has_unique_index? =
Enum.any?(spec.indexes, fn index ->
index.unique? and is_nil(index.where_clause) and
index.columns == [destination_field]
end)
{name, type} =
if has_unique_index? do
if Inflex.pluralize(table) == table do
{Inflex.singularize(table), :has_one}
else
{table, :has_one}
end
else
if Inflex.pluralize(table) == table do
{table, :has_many}
else
{Inflex.pluralize(table), :has_many}
end
end
%Relationship{
type: type,
name: name,
destination: resource,
source: spec.resource,
match_with:
Enum.map(relationship.match_with, fn {source, dest} ->
{dest, source}
end),
constraint_name: relationship.constraint_name,
source_attribute: relationship.destination_attribute,
destination_attribute: relationship.source_attribute
}
end)
|> Enum.group_by(& &1.name)
|> Enum.flat_map(fn
{_name, [relationship]} ->
[relationship]
{name, relationships} ->
name_all_relationships(:has, spec, name, relationships)
end)
%{spec | relationships: spec.relationships ++ relationships_to_me}
end)
end
defp name_all_relationships(type, spec, name, relationships, acc \\ [])
defp name_all_relationships(_type, _spec, _name, [], acc), do: acc
defp name_all_relationships(type, spec, name, [relationship | rest], acc) do
label =
case type do
:belongs_to ->
"""
Multiple foreign keys found from `#{inspect(spec.resource)}` to `#{inspect(relationship.destination)}` with the guessed name `#{name}`.
Provide a relationship name for the one with the following info:
Resource: `#{inspect(spec.resource)}`
Relationship Type: :belongs_to
Guessed Name: `:#{name}`
Relationship Destination: `#{inspect(relationship.destination)}`
Constraint Name: `#{inspect(relationship.constraint_name)}`.
Leave empty to skip adding this relationship.
Name:
"""
|> String.trim_trailing()
_ ->
"""
Multiple foreign keys found from `#{inspect(relationship.source)}` to `#{inspect(spec.resource)}` with the guessed name `#{name}`.
Provide a relationship name for the one with the following info:
Resource: `#{inspect(relationship.source)}`
Relationship Type: :#{relationship.type}
Guessed Name: `:#{name}`
Relationship Destination: `#{inspect(spec.resource)}`
Constraint Name: `#{inspect(relationship.constraint_name)}`.
Leave empty to skip adding this relationship.
Name:
"""
|> String.trim_trailing()
end
Owl.IO.input(label: label)
|> String.trim()
# common typo
|> String.trim_leading(":")
|> case do
"" ->
name_all_relationships(type, spec, name, rest, acc)
new_name ->
name_all_relationships(type, spec, name, rest, [%{relationship | name: new_name} | acc])
end
end
defp find_destination_and_field(
specs,
spec,
destination,
destination_field,
resources,
match_with
) do
case Enum.find(specs, fn other_spec ->
other_spec.table_name == destination
end) do
nil ->
case Enum.find(resources, fn {_resource, table} ->
table == destination
end) do
nil ->
nil
{resource, _table} ->
# this is cheating. We should be making sure the app is compiled
# before our task is composed or pulling from source code
attributes =
resource
|> Ash.Resource.Info.attributes()
case Enum.reduce_while(match_with, {:ok, []}, fn {source, dest}, {:ok, acc} ->
with source_attr when not is_nil(source_attr) <-
Enum.find(spec.attributes, &(&1.source == source)),
dest_attr when not is_nil(dest_attr) <-
Enum.find(attributes, &(to_string(&1.source) == dest)) do
{:cont, {:ok, acc ++ [{source_attr.name, to_string(dest_attr.name)}]}}
else
_ ->
{:halt, :error}
end
end) do
{:ok, match_with} ->
Enum.find_value(attributes, fn attribute ->
if to_string(attribute.source) == destination_field do
{resource, to_string(attribute.name), match_with}
end
end)
_ ->
nil
end
end
%__MODULE__{} = other_spec ->
case Enum.reduce_while(match_with, {:ok, []}, fn {source, dest}, {:ok, acc} ->
with source_attr when not is_nil(source_attr) <-
Enum.find(spec.attributes, &(&1.source == source)),
dest_attr when not is_nil(dest_attr) <-
Enum.find(other_spec.attributes, &(&1.source == dest)) do
{:cont, {:ok, acc ++ [{source_attr.name, dest_attr.name}]}}
else
_ ->
{:halt, :error}
end
end) do
{:ok, match_with} ->
other_spec.attributes
|> Enum.find_value(fn %Attribute{} = attr ->
if attr.source == destination_field do
{other_spec.resource, attr.name, match_with}
end
end)
_ ->
nil
end
end
end
def set_types(attributes) do
attributes
|> Enum.map(fn attribute ->
case Process.get({:type_cache, attribute.type}) do
nil ->
case type(attribute.type) do
{:ok, type} ->
%{attribute | attr_type: type}
:error ->
case Mix.shell().prompt("""
Unknown type: #{attribute.type}. What should we use as the type?
Provide the value as literal source code that should be placed into the
generated file, i.e
- :string
- MyApp.Types.CustomType
- {:array, :string}
Use `skip` to skip ignore this attribute.
""") do
skip when skip in ["skip", "skip\n"] ->
attributes
new_type ->
new_type =
case String.trim(new_type) do
":" <> type -> String.to_atom(type)
type -> Code.eval_string(type)
end
Process.put({:type_cache, attribute.type}, new_type)
%{attribute | attr_type: new_type}
end
end
type ->
%{attribute | attr_type: type}
end
end)
end
defp type("ARRAY of " <> rest) do
case type(rest) do
{:ok, type} -> {:ok, {:array, type}}
:error -> :error
end
end
defp type("bigint"), do: {:ok, :integer}
defp type("bigserial"), do: {:ok, :integer}
defp type("boolean"), do: {:ok, :boolean}
defp type("bytea"), do: {:ok, :binary}
defp type("varchar"), do: {:ok, :string}
defp type("character varying"), do: {:ok, :string}
defp type("date"), do: {:ok, :date}
defp type("double precision"), do: {:ok, :decimal}
defp type("int"), do: {:ok, :integer}
defp type("integer"), do: {:ok, :integer}
defp type("json"), do: {:ok, :map}
defp type("jsonb"), do: {:ok, :map}
defp type("numeric"), do: {:ok, :decimal}
defp type("decimal"), do: {:ok, :decimal}
defp type("smallint"), do: {:ok, :integer}
defp type("smallserial"), do: {:ok, :ineger}
defp type("serial"), do: {:ok, :integer}
defp type("text"), do: {:ok, :string}
defp type("time"), do: {:ok, :time}
defp type("time without time zone"), do: {:ok, :time}
defp type("time with time zone"), do: {:ok, :time}
defp type("timestamp"), do: {:ok, :utc_datetime_usec}
defp type("timestamp without time zone"), do: {:ok, :utc_datetime_usec}
defp type("timestamp with time zone"), do: {:ok, :utc_datetime_usec}
defp type("tsquery"), do: {:ok, AshPostgres.Tsquery}
defp type("tsvector"), do: {:ok, AshPostgres.Tsvector}
defp type("uuid"), do: {:ok, :uuid}
defp type("citext"), do: {:ok, :ci_string}
defp type(_), do: :error
defp set_sensitive(attributes) do
Enum.map(attributes, fn attribute ->
%{
attribute
| sensitive?: AshPostgres.ResourceGenerator.SensitiveData.is_sensitive?(attribute.name)
}
end)
end
defp set_primary_key(attributes, table_name, repo) do
%Postgrex.Result{rows: pkey_rows} =
repo.query!(
"""
SELECT c.column_name
FROM information_schema.table_constraints tc
JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_name)
JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema
AND tc.table_name = c.table_name AND ccu.column_name = c.column_name
WHERE constraint_type = 'PRIMARY KEY' and tc.table_name = $1;
""",
[table_name],
log: false
)
Enum.map(attributes, fn %Attribute{name: name} = attribute ->
%{attribute | primary_key?: [name] in pkey_rows}
end)
end
defp table_specs(repo, opts) do
%Postgrex.Result{rows: rows} =
if opts[:tables] do
repo.query!(
"""
SELECT
t.table_name,
c.column_name,
CASE WHEN c.data_type = 'ARRAY' THEN
repeat('ARRAY of ', a.attndims) || REPLACE(c.udt_name, '_', '')
WHEN c.data_type = 'USER-DEFINED' THEN
c.udt_name
ELSE
c.data_type
END as data_type,
c.column_default,
c.character_maximum_length,
c.is_nullable = 'YES'
FROM
information_schema.tables t
JOIN
information_schema.columns c
ON t.table_name = c.table_name
JOIN pg_attribute a
ON a.attrelid = t.table_name::regclass
AND a.attname = c.column_name
AND a.attnum > 0
WHERE
t.table_schema = 'public'
AND (
(t.table_name NOT LIKE 'pg_%' AND t.table_name != 'schema_migrations')
OR t.table_name = ANY($1)
)
AND t.table_name = ANY($1)
AND NOT(t.table_name = ANY($2))
ORDER BY
t.table_name,
c.ordinal_position;
""",
[opts[:tables], opts[:skip_tables] || []],
log: false
)
else
repo.query!(
"""
SELECT
t.table_name,
c.column_name,
CASE WHEN c.data_type = 'ARRAY' THEN
repeat('ARRAY of ', a.attndims) || REPLACE(c.udt_name, '_', '')
WHEN c.data_type = 'USER-DEFINED' THEN
c.udt_name
ELSE
c.data_type
END as data_type,
c.column_default,
c.character_maximum_length,
c.is_nullable = 'YES'
FROM
information_schema.tables t
JOIN
information_schema.columns c
ON t.table_name = c.table_name
JOIN pg_attribute a
ON a.attrelid = t.table_name::regclass
AND a.attname = c.column_name
AND a.attnum > 0
WHERE
t.table_schema = 'public'
AND t.table_name NOT LIKE 'pg_%' AND t.table_name != 'schema_migrations'
AND NOT(t.table_name = ANY($1))
ORDER BY
t.table_name,
c.ordinal_position;
""",
[opts[:skip_tables] || []],
log: false
)
end
rows
end
end

View file

@ -1,6 +1,6 @@
%{
"ash": {:hex, :ash, "3.4.1", "14bfccd4c1e7c17db5aed1ecb5062875f55b56b67f6fba911f3a8ef6739f3cfd", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ecto, "~> 3.7", [hex: :ecto, repo: "hexpm", optional: false]}, {:ets, "~> 0.8", [hex: :ets, repo: "hexpm", optional: false]}, {:igniter, ">= 0.3.11 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: false]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: false]}, {:picosat_elixir, "~> 0.2", [hex: :picosat_elixir, repo: "hexpm", optional: true]}, {:plug, ">= 0.0.0", [hex: :plug, repo: "hexpm", optional: true]}, {:reactor, "~> 0.9", [hex: :reactor, repo: "hexpm", optional: false]}, {:simple_sat, ">= 0.1.1 and < 1.0.0-0", [hex: :simple_sat, repo: "hexpm", optional: true]}, {:spark, ">= 2.2.8 and < 3.0.0-0", [hex: :spark, repo: "hexpm", optional: false]}, {:splode, "~> 0.2", [hex: :splode, repo: "hexpm", optional: false]}, {:stream_data, "~> 1.0", [hex: :stream_data, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.1", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "1e3127e0af0698e652a6bbfb4d4f1a3bb8a48fb42833f4e8f00eda8f1a93082b"},
"ash_sql": {:hex, :ash_sql, "0.2.31", "721521e073d706169ebb0e68535422c1920580b29829fe949fb679c8674a9691", [:mix], [{:ash, ">= 3.1.7 and < 4.0.0-0", [hex: :ash, repo: "hexpm", optional: false]}, {:ecto, "~> 3.9", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "~> 3.9", [hex: :ecto_sql, repo: "hexpm", optional: false]}], "hexpm", "e5f578be31f5fa5af8dd1cb27b01b7b1864ef1414472293ce3a4851290cb69b1"},
"ash_sql": {:hex, :ash_sql, "0.2.32", "de99255becfb9daa7991c18c870e9f276bb372acda7eda3e05c3e2ff2ca8922e", [:mix], [{:ash, ">= 3.1.7 and < 4.0.0-0", [hex: :ash, repo: "hexpm", optional: false]}, {:ecto, "~> 3.9", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "~> 3.9", [hex: :ecto_sql, repo: "hexpm", optional: false]}], "hexpm", "43773bcd33d21319c11804d76fe11f1a1b7c8faba7aaedeab6f55fde3d2405db"},
"benchee": {:hex, :benchee, "1.3.1", "c786e6a76321121a44229dde3988fc772bca73ea75170a73fd5f4ddf1af95ccf", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "76224c58ea1d0391c8309a8ecbfe27d71062878f59bd41a390266bf4ac1cc56d"},
"bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"},
"credo": {:hex, :credo, "1.7.7", "771445037228f763f9b2afd612b6aa2fd8e28432a95dbbc60d8e03ce71ba4446", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8bc87496c9aaacdc3f90f01b7b0582467b69b4bd2441fe8aae3109d843cc2f2e"},
@ -8,15 +8,15 @@
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"},
"dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"},
"earmark_parser": {:hex, :earmark_parser, "1.4.39", "424642f8335b05bb9eb611aa1564c148a8ee35c9c8a8bba6e129d51a3e3c6769", [:mix], [], "hexpm", "06553a88d1f1846da9ef066b87b57c6f605552cfbe40d20bd8d59cc6bde41944"},
"earmark_parser": {:hex, :earmark_parser, "1.4.41", "ab34711c9dc6212dda44fcd20ecb87ac3f3fce6f0ca2f28d4a00e4154f8cd599", [:mix], [], "hexpm", "a81a04c7e34b6617c2792e291b5a2e57ab316365c2644ddc553bb9ed863ebefa"},
"ecto": {:hex, :ecto, "3.12.2", "bae2094f038e9664ce5f089e5f3b6132a535d8b018bd280a485c2f33df5c0ce1", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "492e67c70f3a71c6afe80d946d3ced52ecc57c53c9829791bfff1830ff5a1f0c"},
"ecto_sql": {:hex, :ecto_sql, "3.12.0", "73cea17edfa54bde76ee8561b30d29ea08f630959685006d9c6e7d1e59113b7d", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.12", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.19 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "dc9e4d206f274f3947e96142a8fdc5f69a2a6a9abb4649ef5c882323b6d512f0"},
"eflame": {:hex, :eflame, "1.0.1", "0664d287e39eef3c413749254b3af5f4f8b00be71c1af67d325331c4890be0fc", [:mix], [], "hexpm", "e0b08854a66f9013129de0b008488f3411ae9b69b902187837f994d7a99cf04e"},
"erlex": {:hex, :erlex, "0.2.7", "810e8725f96ab74d17aac676e748627a07bc87eb950d2b83acd29dc047a30595", [:mix], [], "hexpm", "3ed95f79d1a844c3f6bf0cea61e0d5612a42ce56da9c03f01df538685365efb0"},
"ets": {:hex, :ets, "0.9.0", "79c6a6c205436780486f72d84230c6cba2f8a9920456750ddd1e47389107d5fd", [:mix], [], "hexpm", "2861fdfb04bcaeff370f1a5904eec864f0a56dcfebe5921ea9aadf2a481c822b"},
"ex_check": {:hex, :ex_check, "0.16.0", "07615bef493c5b8d12d5119de3914274277299c6483989e52b0f6b8358a26b5f", [:mix], [], "hexpm", "4d809b72a18d405514dda4809257d8e665ae7cf37a7aee3be6b74a34dec310f5"},
"ex_doc": {:git, "https://github.com/elixir-lang/ex_doc.git", "a663c13478a49d29ae0267b6e45badb803267cf0", []},
"file_system": {:hex, :file_system, "1.0.0", "b689cc7dcee665f774de94b5a832e578bd7963c8e637ef940cd44327db7de2cd", [:mix], [], "hexpm", "6752092d66aec5a10e662aefeed8ddb9531d79db0bc145bb8c40325ca1d8536d"},
"ex_doc": {:git, "https://github.com/elixir-lang/ex_doc.git", "d571628fd829a510d219bcb7162400baff50977f", []},
"file_system": {:hex, :file_system, "1.0.1", "79e8ceaddb0416f8b8cd02a0127bdbababe7bf4a23d2a395b983c1f8b3f73edd", [:mix], [], "hexpm", "4414d1f38863ddf9120720cd976fce5bdde8e91d8283353f0e31850fa89feb9e"},
"git_cli": {:hex, :git_cli, "0.3.0", "a5422f9b95c99483385b976f5d43f7e8233283a47cda13533d7c16131cb14df5", [:mix], [], "hexpm", "78cb952f4c86a41f4d3511f1d3ecb28edb268e3a7df278de2faa1bd4672eaf9b"},
"git_ops": {:hex, :git_ops, "2.6.1", "cc7799a68c26cf814d6d1a5121415b4f5bf813de200908f930b27a2f1fe9dad5", [:mix], [{:git_cli, "~> 0.2", [hex: :git_cli, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "ce62d07e41fe993ec22c35d5edb11cf333a21ddaead6f5d9868fcb607d42039e"},
"glob_ex": {:hex, :glob_ex, "0.1.8", "f7ef872877ca2ae7a792ab1f9ff73d9c16bf46ecb028603a8a3c5283016adc07", [:mix], [], "hexpm", "9e39d01729419a60a937c9260a43981440c43aa4cadd1fa6672fecd58241c464"},
@ -25,9 +25,9 @@
"iterex": {:hex, :iterex, "0.1.2", "58f9b9b9a22a55cbfc7b5234a9c9c63eaac26d276b3db80936c0e1c60355a5a6", [:mix], [], "hexpm", "2e103b8bcc81757a9af121f6dc0df312c9a17220f302b1193ef720460d03029d"},
"jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"},
"libgraph": {:hex, :libgraph, "0.16.0", "3936f3eca6ef826e08880230f806bfea13193e49bf153f93edcf0239d4fd1d07", [:mix], [], "hexpm", "41ca92240e8a4138c30a7e06466acc709b0cbb795c643e9e17174a178982d6bf"},
"makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"},
"makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"},
"makeup_erlang": {:hex, :makeup_erlang, "0.1.2", "ad87296a092a46e03b7e9b0be7631ddcf64c790fa68a9ef5323b6cbb36affc72", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f3f5a1ca93ce6e092d92b6d9c049bcda58a3b617a8d888f8e7231c85630e8108"},
"makeup": {:hex, :makeup, "1.1.2", "9ba8837913bdf757787e71c1581c21f9d2455f4dd04cfca785c70bbfff1a76a3", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cce1566b81fbcbd21eca8ffe808f33b221f9eee2cbc7a1706fc3da9ff18e6cac"},
"makeup_elixir": {:hex, :makeup_elixir, "0.16.2", "627e84b8e8bf22e60a2579dad15067c755531fea049ae26ef1020cad58fe9578", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "41193978704763f6bbe6cc2758b84909e62984c7752b3784bd3c218bb341706b"},
"makeup_erlang": {:hex, :makeup_erlang, "1.0.1", "c7f58c120b2b5aa5fd80d540a89fdf866ed42f1f3994e4fe189abebeab610839", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "8a89a1eeccc2d798d6ea15496a6e4870b75e014d1af514b1b71fa33134f57814"},
"mix_audit": {:hex, :mix_audit, "2.1.4", "0a23d5b07350cdd69001c13882a4f5fb9f90fbd4cbf2ebc190a2ee0d187ea3e9", [:make, :mix], [{:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:yaml_elixir, "~> 2.11", [hex: :yaml_elixir, repo: "hexpm", optional: false]}], "hexpm", "fd807653cc8c1cada2911129c7eb9e985e3cc76ebf26f4dd628bb25bbcaa7099"},
"nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"},
"nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"},
@ -38,7 +38,7 @@
"simple_sat": {:hex, :simple_sat, "0.1.3", "f650fc3c184a5fe741868b5ac56dc77fdbb428468f6dbf1978e14d0334497578", [:mix], [], "hexpm", "a54305066a356b7194dc81db2a89232bacdc0b3edaef68ed9aba28dcbc34887b"},
"sobelow": {:hex, :sobelow, "0.13.0", "218afe9075904793f5c64b8837cc356e493d88fddde126a463839351870b8d1e", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "cd6e9026b85fc35d7529da14f95e85a078d9dd1907a9097b3ba6ac7ebbe34a0d"},
"sourceror": {:hex, :sourceror, "1.6.0", "9907884e1449a4bd7dbaabe95088ed4d9a09c3c791fb0103964e6316bc9448a7", [:mix], [], "hexpm", "e90aef8c82dacf32c89c8ef83d1416fc343cd3e5556773eeffd2c1e3f991f699"},
"spark": {:hex, :spark, "2.2.22", "abb5ba74ed8b8a69f8d3112fe0d74a1dea261664d9a3bcaf2d0f94f9ee7102f6", [:mix], [{:igniter, ">= 0.2.6 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:sourceror, "~> 1.2", [hex: :sourceror, repo: "hexpm", optional: false]}], "hexpm", "98b6ea8c19fe97b2b7b20be034ae6cf34e98b03ecba8b7d5a4cc2449f60f3f5e"},
"spark": {:hex, :spark, "2.2.23", "78f0a1b0b713a91ad556fe9dc19ec92d977aaa0803cce2e255d90e58b9045c2a", [:mix], [{:igniter, ">= 0.2.6 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:sourceror, "~> 1.2", [hex: :sourceror, repo: "hexpm", optional: false]}], "hexpm", "a354b5cd7c3f021e3cd1da5a033b7643fe7b3c71c96b96d9f500a742f40c94db"},
"spitfire": {:hex, :spitfire, "0.1.3", "7ea0f544005dfbe48e615ed90250c9a271bfe126914012023fd5e4b6b82b7ec7", [:mix], [], "hexpm", "d53b5107bcff526a05c5bb54c95e77b36834550affd5830c9f58760e8c543657"},
"splode": {:hex, :splode, "0.2.4", "71046334c39605095ca4bed5d008372e56454060997da14f9868534c17b84b53", [:mix], [], "hexpm", "ca3b95f0d8d4b482b5357954fec857abd0fa3ea509d623334c1328e7382044c2"},
"statistex": {:hex, :statistex, "1.0.0", "f3dc93f3c0c6c92e5f291704cf62b99b553253d7969e9a5fa713e5481cd858a5", [:mix], [], "hexpm", "ff9d8bee7035028ab4742ff52fc80a2aa35cece833cf5319009b52f1b5a86c27"},

View file

@ -0,0 +1,29 @@
{
"attributes": [
{
"allow_nil?": false,
"default": "fragment(\"gen_random_uuid()\")",
"generated?": false,
"primary_key?": true,
"references": null,
"size": null,
"source": "id",
"type": "uuid"
}
],
"base_filter": null,
"check_constraints": [],
"custom_indexes": [],
"custom_statements": [],
"has_create_action": true,
"hash": "F24673A4219DEC6873571CCF68B8F0CC34B5843DAA2D7B71A16EFE576C385C1C",
"identities": [],
"multitenancy": {
"attribute": null,
"global": null,
"strategy": null
},
"repo": "Elixir.AshPostgres.TestRepo",
"schema": null,
"table": "schematic_groups"
}

View file

@ -21,6 +21,15 @@ defmodule AshPostgres.Test.Domain do
resource(AshPostgres.Test.Record)
resource(AshPostgres.Test.PostFollower)
resource(AshPostgres.Test.StatefulPostFollower)
resource(CalcDependency.Dependency)
resource(CalcDependency.Element)
resource(CalcDependency.ElementContext)
resource(CalcDependency.Location)
resource(CalcDependency.Operation)
resource(CalcDependency.OperationVersion)
resource(CalcDependency.SchematicGroup)
resource(CalcDependency.Segment)
resource(CalcDependency.Verb)
end
authorization do