Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion documentation/dsls/DSL-AshSqlite.DataLayer.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ end

| Name | Type | Default | Docs |
|------|------|---------|------|
| [`repo`](#sqlite-repo){: #sqlite-repo .spark-required} | `atom` | | The repo that will be used to fetch your data. See the `AshSqlite.Repo` documentation for more |
| [`repo`](#sqlite-repo){: #sqlite-repo .spark-required} | `module \| (any, any -> any)` | | The repo that will be used to fetch your data. See the `AshSqlite.Repo` documentation for more. Can also be a function that takes a resource and a type `:read \| :mutate` and returns the repo. |
| [`migrate?`](#sqlite-migrate?){: #sqlite-migrate? } | `boolean` | `true` | Whether or not to include this resource in the generated migrations with `mix ash.generate_migrations` |
| [`migration_types`](#sqlite-migration_types){: #sqlite-migration_types } | `keyword` | `[]` | A keyword list of attribute names to the ecto migration type that should be used for that attribute. Only necessary if you need to override the defaults. |
| [`migration_defaults`](#sqlite-migration_defaults){: #sqlite-migration_defaults } | `keyword` | `[]` | A keyword list of attribute names to the ecto migration default that should be used for that attribute. The string you use will be placed verbatim in the migration. Use fragments like `fragment(\\"now()\\")`, or for `nil`, use `\\"nil\\"`. |
Expand Down
23 changes: 21 additions & 2 deletions lib/data_layer.ex
Original file line number Diff line number Diff line change
Expand Up @@ -804,8 +804,27 @@ defmodule AshSqlite.DataLayer do

fields_to_upsert =
case fields_to_upsert do
[] -> keys
fields_to_upsert -> fields_to_upsert
[] ->
keys

fields_to_upsert ->
# Include fields with update_defaults (e.g. update_timestamp)
# even if they aren't in the changeset attributes or upsert_fields.
# These fields should always be refreshed when an upsert modifies fields.
# Can be disabled via context: %{data_layer: %{touch_update_defaults?: false}}
touch_update_defaults? =
Enum.at(changesets, 0).context[:data_layer][:touch_update_defaults?] != false

if touch_update_defaults? do
update_default_fields =
update_defaults
|> Keyword.keys()
|> Enum.reject(&(&1 in fields_to_upsert or &1 in keys))

fields_to_upsert ++ update_default_fields
else
fields_to_upsert
end
end

fields_to_upsert
Expand Down
32 changes: 19 additions & 13 deletions lib/migration_generator/migration_generator.ex
Original file line number Diff line number Diff line change
Expand Up @@ -467,11 +467,13 @@ defmodule AshSqlite.MigrationGenerator do
defp load_migration!({version, _, file}) when is_binary(file) do
loaded_modules = file |> compile_file() |> Enum.map(&elem(&1, 0))

if mod = Enum.find(loaded_modules, &migration?/1) do
{version, mod}
else
raise Ecto.MigrationError,
"file #{Path.relative_to_cwd(file)} does not define an Ecto.Migration"
case Enum.find(loaded_modules, &migration?/1) do
nil ->
raise Ecto.MigrationError,
"file #{Path.relative_to_cwd(file)} does not define an Ecto.Migration"

mod ->
{version, mod}
end
end

Expand Down Expand Up @@ -834,13 +836,15 @@ defmodule AshSqlite.MigrationGenerator do
config = repo.config()
app = Keyword.fetch!(config, :otp_app)

if path = opts.migration_path || config[:tenant_migrations_path] do
path
else
priv =
config[:priv] || "priv/#{repo |> Module.split() |> List.last() |> Macro.underscore()}"
case opts.migration_path || config[:tenant_migrations_path] do
nil ->
priv =
config[:priv] || "priv/#{repo |> Module.split() |> List.last() |> Macro.underscore()}"

Application.app_dir(app, Path.join(priv, "migrations"))

Application.app_dir(app, Path.join(priv, "migrations"))
path ->
path
end
end

Expand Down Expand Up @@ -1634,7 +1638,8 @@ defmodule AshSqlite.MigrationGenerator do
identity.name == old_identity.name &&
Enum.sort(old_identity.keys) == Enum.sort(identity.keys) &&
old_identity.base_filter == identity.base_filter &&
Map.get(old_identity, :nils_distinct?, true) == Map.get(identity, :nils_distinct?, true)
Map.get(old_identity, :nils_distinct?, true) ==
Map.get(identity, :nils_distinct?, true)
end)
end)
end
Expand Down Expand Up @@ -1677,7 +1682,8 @@ defmodule AshSqlite.MigrationGenerator do
old_identity.name == identity.name &&
Enum.sort(old_identity.keys) == Enum.sort(identity.keys) &&
old_identity.base_filter == identity.base_filter &&
Map.get(old_identity, :nils_distinct?, true) == Map.get(identity, :nils_distinct?, true)
Map.get(old_identity, :nils_distinct?, true) ==
Map.get(identity, :nils_distinct?, true)
end)
end)
end
Expand Down
3 changes: 2 additions & 1 deletion lib/migration_generator/operation.ex
Original file line number Diff line number Diff line change
Expand Up @@ -484,7 +484,8 @@ defmodule AshSqlite.MigrationGenerator.Operation do
import Helper

def up(%{
identity: %{name: name, keys: keys, base_filter: base_filter, index_name: index_name} = identity,
identity:
%{name: name, keys: keys, base_filter: base_filter, index_name: index_name} = identity,
table: table,
multitenancy: multitenancy
}) do
Expand Down
116 changes: 116 additions & 0 deletions test/bulk_create_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,122 @@ defmodule AshSqlite.BulkCreateTest do
end)
end

test "bulk creates with upsert updates update_timestamp" do
past = DateTime.to_iso8601(DateTime.add(DateTime.utc_now(), -60, :second))

assert [{:ok, %{title: "fred", uniq_one: "one", uniq_two: "two"} = initial}] =
Ash.bulk_create!(
[%{title: "fred", uniq_one: "one", uniq_two: "two", price: 10}],
Post,
:create,
return_stream?: true,
return_records?: true
)
|> Enum.to_list()

# Backdate updated_at via raw SQL so the upsert has something to compare against
AshSqlite.TestRepo.query!("UPDATE posts SET updated_at = ? WHERE id = ?", [
past,
initial.id
])

assert [%{updated_at: backdated}] = Ash.read!(Post)
assert DateTime.compare(backdated, DateTime.from_iso8601(past) |> elem(1)) == :eq

assert [{:ok, %{title: "fred", price: 1000} = upserted}] =
Ash.bulk_create!(
[%{title: "something", uniq_one: "one", uniq_two: "two", price: 1000}],
Post,
:create,
upsert?: true,
upsert_identity: :uniq_one_and_two,
upsert_fields: [:price],
return_stream?: true,
return_errors?: true,
return_records?: true
)
|> Enum.to_list()

assert DateTime.after?(upserted.updated_at, DateTime.from_iso8601(past) |> elem(1))
end

test "bulk creates with empty upsert does not update update_timestamp" do
past = DateTime.to_iso8601(DateTime.add(DateTime.utc_now(), -60, :second))

assert [{:ok, %{title: "fred", uniq_one: "one", uniq_two: "two"} = initial}] =
Ash.bulk_create!(
[%{title: "fred", uniq_one: "one", uniq_two: "two", price: 10}],
Post,
:create,
return_stream?: true,
return_records?: true
)
|> Enum.to_list()

AshSqlite.TestRepo.query!("UPDATE posts SET updated_at = ? WHERE id = ?", [
past,
initial.id
])

assert [%{updated_at: backdated}] = Ash.read!(Post)
assert DateTime.compare(backdated, DateTime.from_iso8601(past) |> elem(1)) == :eq

assert [{:ok, %{title: "fred"} = upserted}] =
Ash.bulk_create!(
[%{title: "something", uniq_one: "one", uniq_two: "two", price: 1000}],
Post,
:create,
upsert?: true,
upsert_identity: :uniq_one_and_two,
upsert_fields: [],
return_stream?: true,
return_errors?: true,
return_records?: true
)
|> Enum.to_list()

assert DateTime.compare(upserted.updated_at, DateTime.from_iso8601(past) |> elem(1)) == :eq
end

test "bulk creates with upsert does not update update_timestamp when touch_update_defaults? is false" do
past = DateTime.to_iso8601(DateTime.add(DateTime.utc_now(), -60, :second))

assert [{:ok, %{title: "fred", uniq_one: "one", uniq_two: "two"} = initial}] =
Ash.bulk_create!(
[%{title: "fred", uniq_one: "one", uniq_two: "two", price: 10}],
Post,
:create,
return_stream?: true,
return_records?: true
)
|> Enum.to_list()

AshSqlite.TestRepo.query!("UPDATE posts SET updated_at = ? WHERE id = ?", [
past,
initial.id
])

assert [%{updated_at: backdated}] = Ash.read!(Post)
assert DateTime.compare(backdated, DateTime.from_iso8601(past) |> elem(1)) == :eq

assert [{:ok, %{title: "fred", price: 1000} = upserted}] =
Ash.bulk_create!(
[%{title: "something", uniq_one: "one", uniq_two: "two", price: 1000}],
Post,
:create,
upsert?: true,
upsert_identity: :uniq_one_and_two,
upsert_fields: [:price],
context: %{data_layer: %{touch_update_defaults?: false}},
return_stream?: true,
return_errors?: true,
return_records?: true
)
|> Enum.to_list()

assert DateTime.compare(upserted.updated_at, DateTime.from_iso8601(past) |> elem(1)) == :eq
end

test "bulk creates can create relationships" do
Ash.bulk_create!(
[%{title: "fred", rating: %{score: 5}}, %{title: "george", rating: %{score: 0}}],
Expand Down