improvement: don't start processes for single items in list

chore: add some benchmarks/flame files
This commit is contained in:
Zach Daniel 2024-08-15 09:14:43 -04:00
parent e200b5b9d1
commit ce5c080492
11 changed files with 311 additions and 89 deletions

View file

@ -230,7 +230,10 @@ spark_locals_without_parens = [
[
import_deps: [:spark, :reactor],
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],
inputs: [
"{mix,.formatter}.exs",
"{config,lib,test,benchmarks,flames}/**/*.{ex,exs}"
],
plugins: [Spark.Formatter],
locals_without_parens: spark_locals_without_parens,
export: [

View file

@ -20,11 +20,8 @@ end
changeset = Ash.Changeset.for_create(Resource, :create, %{})
Benchee.run(
%{
create: fn ->
Ash.create!(changeset)
end
}
)
Benchee.run(%{
create: fn ->
Ash.create!(changeset)
end
})

View file

@ -30,19 +30,20 @@ defmodule Resource do
attributes do
uuid_primary_key :id
attribute :embeds, {:array, Embed}, public?: true
attribute :structs, {:array, :struct} do
public? true
constraints [
items: [
instance_of: Embed,
fields: [
name: [
type: :string
]
]
]
]
constraints items: [
instance_of: Embed,
fields: [
name: [
type: :string
]
]
]
end
attribute :maps, {:array, :map}, public?: true
end
@ -82,5 +83,5 @@ Benchee.run(
Ash.bulk_create!(resource_structs_input, Resource, :create)
end
},
memory_time: 2
memory_time: 2
)

80
benchmarks/read.exs Normal file
View file

@ -0,0 +1,80 @@
defmodule Domain do
use Ash.Domain, validate_config_inclusion?: false
resources do
allow_unregistered? true
end
end
defmodule Destination do
use Ash.Resource,
data_layer: Ash.DataLayer.Ets,
domain: Domain
actions do
defaults [:read, :destroy, create: :*, update: :*]
end
attributes do
uuid_primary_key :id
attribute :name, :string, allow_nil?: false, public?: true
end
relationships do
belongs_to :source, Source, public?: true
end
end
defmodule Source do
use Ash.Resource,
data_layer: Ash.DataLayer.Ets,
domain: Domain
actions do
defaults [:read, :destroy, create: :*, update: :*]
end
attributes do
uuid_primary_key :id
attribute :first_name, :string, allow_nil?: false, public?: true
attribute :last_name, :string, allow_nil?: false, public?: true
end
calculations do
calculate :full_name, :string, expr(first_name <> " " <> last_name)
end
aggregates do
first :first_destination_name, :destination, :name
end
relationships do
has_many :destination, Destination
end
end
source =
Source
|> Ash.Changeset.for_create(:create, %{first_name: "John", last_name: "Doe"})
|> Ash.create!()
for _ <- 1..2 do
Destination
|> Ash.Changeset.for_create(:create, %{source_id: source.id, name: "Destination"})
|> Ash.create!()
end
query =
Source
|> Ash.Query.for_read(:read, %{})
|> Ash.Query.load([:first_destination_name, :full_name, :destination])
Ash.read!(query)
Logger.configure(level: :error)
Benchee.run(%{
"read" => fn ->
Ash.read!(query)
end
})

View file

@ -2,21 +2,22 @@ list = Enum.to_list(1..10_000)
map_fun = fn i -> [i, i * i] end
mixed = fn count ->
Enum.reduce(1..count, 0, fn var, expr ->
cond do
rem(var, 4) == 0 ->
{:or, var, expr}
Enum.reduce(1..count, 0, fn var, expr ->
cond do
rem(var, 4) == 0 ->
{:or, var, expr}
rem(var, 3) == 0 ->
{:and, expr, var}
rem(var, 3) == 0 ->
{:and, expr, var}
rem(var, 2) == 0 ->
{:and, -var, expr}
rem(var, 2) == 0 ->
{:and, -var, expr}
true ->
{:or, -var, expr}
end
end) |> Ash.Policy.SatSolver.solve()
true ->
{:or, -var, expr}
end
end)
|> Ash.Policy.SatSolver.solve()
end
Benchee.run(
@ -26,14 +27,32 @@ Benchee.run(
end
},
inputs: %{
"3 conjunctive" => Enum.to_list(1..3) |> Enum.reduce(0, fn var, expr -> {:and, var, expr} end) |> Ash.Policy.SatSolver.solve(),
"3 disjunctive" => Enum.to_list(1..3) |> Enum.reduce(0, fn var, expr -> {:or, var, expr} end) |> Ash.Policy.SatSolver.solve(),
"3 mixed" => mixed.(3),
"5 conjunctive" => Enum.to_list(1..5) |> Enum.reduce(0, fn var, expr -> {:and, var, expr} end) |> Ash.Policy.SatSolver.solve(),
"5 disjunctive" => Enum.to_list(1..5) |> Enum.reduce(0, fn var, expr -> {:or, var, expr} end) |> Ash.Policy.SatSolver.solve(),
"5 mixed" => mixed.(5),
"7 conjunctive" => Enum.to_list(1..7) |> Enum.reduce(0, fn var, expr -> {:and, var, expr} end) |> Ash.Policy.SatSolver.solve(),
"7 disjunctive" => Enum.to_list(1..7) |> Enum.reduce(0, fn var, expr -> {:or, var, expr} end) |> Ash.Policy.SatSolver.solve(),
"7 mixed" => mixed.(7),
"3 conjunctive" =>
Enum.to_list(1..3)
|> Enum.reduce(0, fn var, expr -> {:and, var, expr} end)
|> Ash.Policy.SatSolver.solve(),
"3 disjunctive" =>
Enum.to_list(1..3)
|> Enum.reduce(0, fn var, expr -> {:or, var, expr} end)
|> Ash.Policy.SatSolver.solve(),
"3 mixed" => mixed.(3),
"5 conjunctive" =>
Enum.to_list(1..5)
|> Enum.reduce(0, fn var, expr -> {:and, var, expr} end)
|> Ash.Policy.SatSolver.solve(),
"5 disjunctive" =>
Enum.to_list(1..5)
|> Enum.reduce(0, fn var, expr -> {:or, var, expr} end)
|> Ash.Policy.SatSolver.solve(),
"5 mixed" => mixed.(5),
"7 conjunctive" =>
Enum.to_list(1..7)
|> Enum.reduce(0, fn var, expr -> {:and, var, expr} end)
|> Ash.Policy.SatSolver.solve(),
"7 disjunctive" =>
Enum.to_list(1..7)
|> Enum.reduce(0, fn var, expr -> {:or, var, expr} end)
|> Ash.Policy.SatSolver.solve(),
"7 mixed" => mixed.(7)
}
)

View file

@ -1,12 +1,11 @@
Benchee.run(
%{
"uuid_v7 raw" => fn ->
Ash.UUIDv7.bingenerate()
end,
"uuid_v7 string" => fn ->
Ash.UUIDv7.generate()
end,
"uuid_v4 string" => fn ->
Ash.UUID.generate()
end
Benchee.run(%{
"uuid_v7 raw" => fn ->
Ash.UUIDv7.bingenerate()
end,
"uuid_v7 string" => fn ->
Ash.UUIDv7.generate()
end,
"uuid_v4 string" => fn ->
Ash.UUID.generate()
end
})

81
flames/read.exs Normal file
View file

@ -0,0 +1,81 @@
defmodule Domain do
use Ash.Domain, validate_config_inclusion?: false
resources do
allow_unregistered? true
end
end
defmodule Destination do
use Ash.Resource,
data_layer: Ash.DataLayer.Ets,
domain: Domain
actions do
defaults [:read, :destroy, create: :*, update: :*]
end
attributes do
uuid_primary_key :id
attribute :name, :string, allow_nil?: false, public?: true
end
relationships do
belongs_to :source, Source, public?: true
end
end
defmodule Source do
use Ash.Resource,
data_layer: Ash.DataLayer.Ets,
domain: Domain
actions do
defaults [:read, :destroy, create: :*, update: :*]
end
attributes do
uuid_primary_key :id
attribute :first_name, :string, allow_nil?: false, public?: true
attribute :last_name, :string, allow_nil?: false, public?: true
end
calculations do
calculate :full_name, :string, expr(first_name <> " " <> last_name)
end
aggregates do
first :first_destination_name, :destination, :name
end
relationships do
has_many :destination, Destination
end
end
source =
Source
|> Ash.Changeset.for_create(:create, %{first_name: "John", last_name: "Doe"})
|> Ash.create!()
for _ <- 1..2 do
Destination
|> Ash.Changeset.for_create(:create, %{source_id: source.id, name: "Destination"})
|> Ash.create!()
end
query =
Source
|> Ash.Query.for_read(:read, %{})
|> Ash.Query.load([:first_destination_name, :full_name, :destination])
Ash.read!(query)
Logger.configure(level: :error)
:eflame.apply(
fn ->
Ash.read!(query)
end,
[]
)

View file

@ -18,9 +18,10 @@ defmodule Ash.Actions.Read.AsyncLimiter do
def async_or_inline(
%{resource: resource, context: %{private: %{async_limiter: async_limiter}}} = query,
opts,
last?,
func
)
when not is_nil(async_limiter) do
when not is_nil(async_limiter) and last? != true do
if Application.get_env(:ash, :disable_async?) do
func.()
else
@ -54,7 +55,7 @@ defmodule Ash.Actions.Read.AsyncLimiter do
end
end
def async_or_inline(_, _opts, func) do
def async_or_inline(_, _opts, _, func) do
func.()
end

View file

@ -264,16 +264,8 @@ defmodule Ash.Actions.Read.Calculations do
{newly_done, remaining} =
do_now
|> Enum.map(fn calculation ->
Ash.Actions.Read.AsyncLimiter.async_or_inline(
ash_query,
Ash.Context.to_opts(calculation.context),
fn ->
{calculation.name, calculation, run_calculation(calculation, ash_query, records)}
end
)
end)
|> Enum.concat(tasks)
|> do_run_calcs(ash_query, records)
|> Stream.concat(tasks)
|> Ash.Actions.Read.AsyncLimiter.await_at_least_one()
records =
@ -309,6 +301,26 @@ defmodule Ash.Actions.Read.Calculations do
end
end
defp do_run_calcs(calcs, ash_query, records, acc \\ [])
defp do_run_calcs([], _ash_query, _records, acc) do
acc
end
defp do_run_calcs([calculation | rest], ash_query, records, acc) do
result =
Ash.Actions.Read.AsyncLimiter.async_or_inline(
ash_query,
Ash.Context.to_opts(calculation.context),
Enum.empty?(rest),
fn ->
{calculation.name, calculation, run_calculation(calculation, ash_query, records)}
end
)
do_run_calcs(rest, ash_query, records, [result | acc])
end
defp attach_calculation_results(calculation, records, nil) do
if calculation.load do
Enum.map(records, fn record ->

View file

@ -1212,7 +1212,7 @@ defmodule Ash.Actions.Read do
end
defp authorize_query(query, opts) do
if opts[:authorize?] do
if opts[:authorize?] && !Enum.empty?(Ash.Resource.Info.authorizers(query.resource)) do
case Ash.can(query, opts[:actor],
return_forbidden_error?: true,
maybe_is: false,

View file

@ -39,21 +39,30 @@ defmodule Ash.Actions.Read.Relationships do
end)
end
defp fetch_related_records(relationships_and_queries, records) do
Enum.map(relationships_and_queries, fn
{relationship, {:lazy, query}} ->
{relationship, {:lazy, query}, lazy_related_records(records, relationship, query)}
defp fetch_related_records(batch, records, acc \\ [])
{relationship, %{valid?: true} = related_query} ->
do_fetch_related_records(records, relationship, related_query)
{relationship, %{errors: errors} = related_query} ->
{relationship, related_query, {:error, errors}}
end)
|> Ash.Actions.Read.AsyncLimiter.await_all()
defp fetch_related_records([], _records, acc) do
Ash.Actions.Read.AsyncLimiter.await_all(acc)
end
defp lazy_related_records(records, relationship, related_query) do
defp fetch_related_records([first | rest], records, acc) do
result =
case first do
{relationship, {:lazy, query}} ->
{relationship, {:lazy, query},
lazy_related_records(records, relationship, query, Enum.empty?(rest))}
{relationship, %{valid?: true} = related_query} ->
do_fetch_related_records(records, relationship, related_query, Enum.empty?(rest))
{relationship, %{errors: errors} = related_query} ->
{relationship, related_query, {:error, errors}}
end
fetch_related_records(rest, records, [result | acc])
end
defp lazy_related_records(records, relationship, related_query, last?) do
primary_key = Ash.Resource.Info.primary_key(relationship.source)
related_records_with_lazy_join_source =
@ -76,17 +85,24 @@ defmodule Ash.Actions.Read.Relationships do
|> Enum.map(&Ash.Resource.set_metadata(&1, %{lazy_join_source: record_pkey}))
end)
Ash.load(related_records_with_lazy_join_source, related_query,
lazy?: true,
domain: related_query.domain,
actor: related_query.context.private[:actor],
tenant: related_query.tenant,
authorize?: related_query.context.private[:authorize?]
Ash.Actions.Read.AsyncLimiter.async_or_inline(
related_query,
Ash.Context.to_opts(related_query.context),
last?,
fn ->
Ash.load(related_records_with_lazy_join_source, related_query,
lazy?: true,
domain: related_query.domain,
actor: related_query.context.private[:actor],
tenant: related_query.tenant,
authorize?: related_query.context.private[:authorize?]
)
end
)
end
defp with_related_queries(load, query, records, lazy?) do
Stream.map(load, fn {relationship_name, related_query} ->
Enum.map(load, fn {relationship_name, related_query} ->
lazy? = lazy? || related_query.context[:private][:lazy?]
if lazy? && Ash.Resource.loaded?(records, relationship_name, lists: :any) do
@ -309,11 +325,13 @@ defmodule Ash.Actions.Read.Relationships do
defp do_fetch_related_records(
records,
%{manual: {module, opts}} = relationship,
related_query
related_query,
last?
) do
Ash.Actions.Read.AsyncLimiter.async_or_inline(
related_query,
Ash.Context.to_opts(related_query.context),
last?,
fn ->
result =
module.load(records, opts, %Ash.Resource.ManualRelationship.Context{
@ -365,11 +383,13 @@ defmodule Ash.Actions.Read.Relationships do
defp do_fetch_related_records(
_records,
%{no_attributes?: true} = relationship,
related_query
related_query,
last?
) do
Ash.Actions.Read.AsyncLimiter.async_or_inline(
related_query,
Ash.Context.to_opts(related_query.context),
last?,
fn ->
result =
related_query
@ -387,11 +407,13 @@ defmodule Ash.Actions.Read.Relationships do
defp do_fetch_related_records(
_records,
relationship,
%{context: %{data_layer: %{lateral_join_source: {_, _}}}} = related_query
%{context: %{data_layer: %{lateral_join_source: {_, _}}}} = related_query,
last?
) do
Ash.Actions.Read.AsyncLimiter.async_or_inline(
related_query,
Ash.Context.to_opts(related_query.context),
last?,
fn ->
result =
related_query
@ -406,7 +428,12 @@ defmodule Ash.Actions.Read.Relationships do
)
end
defp do_fetch_related_records(records, %{type: :many_to_many} = relationship, related_query) do
defp do_fetch_related_records(
records,
%{type: :many_to_many} = relationship,
related_query,
last?
) do
record_ids =
Enum.map(records, fn record ->
Map.get(record, relationship.source_attribute)
@ -447,6 +474,7 @@ defmodule Ash.Actions.Read.Relationships do
Ash.Actions.Read.AsyncLimiter.async_or_inline(
related_query,
Ash.Context.to_opts(related_query.context),
last?,
fn ->
case Ash.Actions.Read.unpaginated_read(join_query, nil) do
{:ok, join_records} ->
@ -526,12 +554,13 @@ defmodule Ash.Actions.Read.Relationships do
)
end
defp do_fetch_related_records(records, relationship, related_query) do
defp do_fetch_related_records(records, relationship, related_query, last?) do
destination_attributes = Enum.map(records, &Map.get(&1, relationship.source_attribute))
Ash.Actions.Read.AsyncLimiter.async_or_inline(
related_query,
Ash.Context.to_opts(related_query.context),
last?,
fn ->
result =
related_query