Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 10 additions & 1 deletion lib/hammer/ets/leaky_bucket.ex
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,16 @@ defmodule Hammer.ETS.LeakyBucket do
:ets.insert_new(table, {key, 0, now})

# Get current bucket state
[{^key, current_fill, last_update}] = :ets.lookup(table, key)
{current_fill, last_update} =
case :ets.lookup(table, key) do
[{^key, current_fill, last_update}] ->
{current_fill, last_update}

[] ->
# Entry was deleted between insert_new and lookup (cleanup race or table restart)
:ets.insert(table, {key, 0, now})
{0, now}
end

leaked = trunc((now - last_update) * leak_rate)

Expand Down
12 changes: 11 additions & 1 deletion lib/hammer/ets/token_bucket.ex
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,17 @@ defmodule Hammer.ETS.TokenBucket do
# Try to insert new empty bucket if doesn't exist
:ets.insert_new(table, {key, capacity, now})

[{^key, current_level, last_update}] = :ets.lookup(table, key)
{current_level, last_update} =
case :ets.lookup(table, key) do
[{^key, current_level, last_update}] ->
{current_level, last_update}

[] ->
# Entry was deleted between insert_new and lookup (cleanup race or table restart)
:ets.insert(table, {key, capacity, now})
{capacity, now}
end

new_tokens = trunc((now - last_update) * refill_rate)

current_tokens = min(capacity, current_level + new_tokens)
Expand Down
26 changes: 26 additions & 0 deletions test/hammer/ets/leaky_bucket_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,32 @@ defmodule Hammer.ETS.LeakyBucketTest do
end
end

describe "race condition handling" do
test "hit recovers when entry is deleted between insert_new and lookup", %{table: table} do
key = "race_key"
leak_rate = 10
capacity = 10

# Insert an entry, then delete it to simulate cleanup race
:ets.insert(table, {key, 5, System.system_time(:second)})
:ets.delete(table, key)

# hit should handle the missing entry gracefully
assert {:allow, 1} = LeakyBucket.hit(table, key, leak_rate, capacity, 1)
end

test "hit works on a fresh empty table", %{table: table} do
key = "fresh_key"
leak_rate = 10
capacity = 10

# Ensure key doesn't exist
assert :ets.lookup(table, key) == []

assert {:allow, 1} = LeakyBucket.hit(table, key, leak_rate, capacity, 1)
end
end

describe "get" do
test "get returns current bucket level", %{table: table} do
key = "key"
Expand Down
26 changes: 26 additions & 0 deletions test/hammer/ets/token_bucket_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,32 @@ defmodule Hammer.ETS.TokenBucketTest do
end
end

describe "race condition handling" do
test "hit recovers when entry is deleted between insert_new and lookup", %{table: table} do
key = "race_key"
refill_rate = 10
capacity = 10

# Insert an entry, then delete it to simulate cleanup race
:ets.insert(table, {key, 5, System.system_time(:second)})
:ets.delete(table, key)

# hit should handle the missing entry gracefully
assert {:allow, 9} = TokenBucket.hit(table, key, refill_rate, capacity, 1)
end

test "hit works on a fresh empty table", %{table: table} do
key = "fresh_key"
refill_rate = 10
capacity = 10

# Ensure key doesn't exist
assert :ets.lookup(table, key) == []

assert {:allow, 9} = TokenBucket.hit(table, key, refill_rate, capacity, 1)
end
end

describe "get" do
test "get returns current bucket level", %{table: table} do
key = "key"
Expand Down
Loading