diff --git a/Directory.Packages.props b/Directory.Packages.props
index ac15e67..8d5df10 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -9,6 +9,7 @@
+
diff --git a/fe/package-lock.json b/fe/package-lock.json
index 2c4bbfa..209b0d1 100644
--- a/fe/package-lock.json
+++ b/fe/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "listenarr-fe",
- "version": "0.2.55",
+ "version": "0.2.56",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "listenarr-fe",
- "version": "0.2.55",
+ "version": "0.2.56",
"hasInstallScript": true,
"dependencies": {
"@material/material-color-utilities": "^0.4.0",
diff --git a/fe/package.json b/fe/package.json
index e1171f0..74d478c 100644
--- a/fe/package.json
+++ b/fe/package.json
@@ -1,6 +1,6 @@
{
"name": "listenarr-fe",
- "version": "0.2.55",
+ "version": "0.2.56",
"private": true,
"type": "module",
"engines": {
diff --git a/listenarr.api/Controllers/SearchController.cs b/listenarr.api/Controllers/SearchController.cs
index f0f71e0..c247e4c 100644
--- a/listenarr.api/Controllers/SearchController.cs
+++ b/listenarr.api/Controllers/SearchController.cs
@@ -1023,8 +1023,8 @@ public async Task>> IntelligentSearch(
}
_logger.LogInformation("IntelligentSearch called for query: {Query}", LogRedaction.SanitizeText(query));
- var region = Request.Query.ContainsKey("region") ? Request.Query["region"].ToString() ?? "us" : "us";
- var language = Request.Query.ContainsKey("language") ? Request.Query["language"].ToString() : null;
+ var region = Request.Query.TryGetValue("region", out var regionValue) ? regionValue.ToString() ?? "us" : "us";
+ var language = Request.Query.TryGetValue("language", out var languageValue) ? languageValue.ToString() : null;
var results = await _searchService.IntelligentSearchAsync(query, candidateLimit, returnLimit, containmentMode, requireAuthorAndPublisher, fuzzyThreshold, region, language, HttpContext.RequestAborted);
// Normalize images for metadata results so the SPA receives local /api/v{version}/images/{asin} when possible
if (_imageCacheService != null && results != null)
@@ -1135,13 +1135,13 @@ public async Task>> IndexersSearch(
// Support MyAnonamouse query string toggles (mamFilter, mamSearchInDescription, mamSearchInSeries, mamSearchInFilenames, mamLanguage, mamFreeleechWedge)
var mamOptions = new Listenarr.Api.Models.MyAnonamouseOptions();
- if (Request.Query.ContainsKey("mamFilter") && Enum.TryParse(Request.Query["mamFilter"].ToString() ?? string.Empty, true, out var mamFilter))
+ if (Request.Query.TryGetValue("mamFilter", out var queryMamFilter) && Enum.TryParse(queryMamFilter.ToString() ?? string.Empty, true, out var mamFilter))
mamOptions.Filter = mamFilter;
- if (Request.Query.ContainsKey("mamSearchInDescription") && bool.TryParse(Request.Query["mamSearchInDescription"], out var sd)) mamOptions.SearchInDescription = sd;
- if (Request.Query.ContainsKey("mamSearchInSeries") && bool.TryParse(Request.Query["mamSearchInSeries"], out var ss)) mamOptions.SearchInSeries = ss;
- if (Request.Query.ContainsKey("mamSearchInFilenames") && bool.TryParse(Request.Query["mamSearchInFilenames"], out var sf)) mamOptions.SearchInFilenames = sf;
- if (Request.Query.ContainsKey("mamLanguage")) mamOptions.SearchLanguage = Request.Query["mamLanguage"].ToString();
- if (Request.Query.ContainsKey("mamFreeleechWedge") && Enum.TryParse(Request.Query["mamFreeleechWedge"].ToString() ?? string.Empty, true, out var mw)) mamOptions.FreeleechWedge = mw;
+ if (Request.Query.TryGetValue("mamSearchInDescription", out var queryMamSearchInDescription) && bool.TryParse(queryMamSearchInDescription, out var sd)) mamOptions.SearchInDescription = sd;
+ if (Request.Query.TryGetValue("mamSearchInSeries", out var queryMamSearchInSeries) && bool.TryParse(queryMamSearchInSeries, out var ss)) mamOptions.SearchInSeries = ss;
+ if (Request.Query.TryGetValue("mamSearchInFilenames", out var queryMamSearchInFilenames) && bool.TryParse(queryMamSearchInFilenames, out var sf)) mamOptions.SearchInFilenames = sf;
+ if (Request.Query.TryGetValue("mamLanguage", out var queryMamLanguage)) mamOptions.SearchLanguage = queryMamLanguage.ToString();
+ if (Request.Query.TryGetValue("mamFreeleechWedge", out var queryMamFreeleechWedge) && Enum.TryParse(queryMamFreeleechWedge.ToString() ?? string.Empty, true, out var mw)) mamOptions.FreeleechWedge = mw;
var req = new Listenarr.Api.Models.SearchRequest { MyAnonamouse = mamOptions };
var results = await _searchService.SearchIndexersAsync(query, category, sortBy, sortDirection, isAutomaticSearch, req);
diff --git a/listenarr.api/Listenarr.Api.csproj b/listenarr.api/Listenarr.Api.csproj
index 9c5172f..beb35e2 100644
--- a/listenarr.api/Listenarr.Api.csproj
+++ b/listenarr.api/Listenarr.Api.csproj
@@ -15,6 +15,7 @@
+
diff --git a/listenarr.api/Properties/launchSettings.json b/listenarr.api/Properties/launchSettings.json
index c1c2976..1faf0e0 100644
--- a/listenarr.api/Properties/launchSettings.json
+++ b/listenarr.api/Properties/launchSettings.json
@@ -1,27 +1,26 @@
-{
- "$schema": "https://json.schemastore.org/launchsettings.json",
+{
"profiles": {
"http": {
"commandName": "Project",
- "dotnetRunMessages": true,
"launchBrowser": true,
"launchUrl": "swagger",
- "applicationUrl": "http://localhost:4545",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development",
"LISTENARR_CONTENT_ROOT": "../../listenarr.api"
- }
+ },
+ "dotnetRunMessages": true,
+ "applicationUrl": "http://localhost:4545"
},
"https": {
"commandName": "Project",
- "dotnetRunMessages": true,
"launchBrowser": true,
"launchUrl": "swagger",
- "applicationUrl": "https://localhost:7172;http://localhost:4545",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development",
"LISTENARR_CONTENT_ROOT": "../../listenarr.api"
- }
+ },
+ "dotnetRunMessages": true,
+ "applicationUrl": "https://localhost:7172;http://localhost:4545"
},
"IIS Express": {
"commandName": "IISExpress",
@@ -31,5 +30,14 @@
"ASPNETCORE_ENVIRONMENT": "Development"
}
}
+ },
+ "$schema": "https://json.schemastore.org/launchsettings.json",
+ "iisSettings": {
+ "windowsAuthentication": false,
+ "anonymousAuthentication": true,
+ "iisExpress": {
+ "applicationUrl": "http://localhost:36264/",
+ "sslPort": 44320
+ }
}
-}
+}
\ No newline at end of file
diff --git a/listenarr.api/Services/AudioFileService.cs b/listenarr.api/Services/AudioFileService.cs
index 427c69d..526631b 100644
--- a/listenarr.api/Services/AudioFileService.cs
+++ b/listenarr.api/Services/AudioFileService.cs
@@ -58,7 +58,7 @@ public async Task EnsureAudiobookFileAsync(int audiobookId, string filePat
// Ensure candidate is the same directory or a subdirectory of the existing dir
var isInExistingDir = candidateDir.Equals(existingDir, StringComparison.OrdinalIgnoreCase) ||
candidateDir.StartsWith(existingDir + Path.DirectorySeparatorChar, StringComparison.OrdinalIgnoreCase);
-
+
// Also allow if file is within the audiobook's BasePath (multi-file migration)
var isInBasePath = !string.IsNullOrWhiteSpace(audiobook.BasePath) &&
candidateFull.StartsWith(Path.GetFullPath(audiobook.BasePath) + Path.DirectorySeparatorChar, StringComparison.OrdinalIgnoreCase);
@@ -93,11 +93,13 @@ public async Task EnsureAudiobookFileAsync(int audiobookId, string filePat
await toastSvc.PublishToastAsync("warning", "File not associated", $"Refused to associate {Path.GetFileName(filePath)} to {audiobookTitle}");
}
}
- catch (Exception thx) when (thx is not OperationCanceledException && thx is not OutOfMemoryException && thx is not StackOverflowException) {
+ catch (Exception thx) when (thx is not OperationCanceledException && thx is not OutOfMemoryException && thx is not StackOverflowException)
+ {
_logger.LogDebug(thx, "Failed to publish toast for refused file association");
}
}
- catch (Exception hx) when (hx is not OperationCanceledException && hx is not OutOfMemoryException && hx is not StackOverflowException) {
+ catch (Exception hx) when (hx is not OperationCanceledException && hx is not OutOfMemoryException && hx is not StackOverflowException)
+ {
_logger.LogDebug(hx, "Failed to persist history for refused file association (AudiobookId={AudiobookId}, File={File})", audiobookId, filePath);
}
@@ -106,7 +108,8 @@ public async Task EnsureAudiobookFileAsync(int audiobookId, string filePat
}
}
}
- catch (Exception exDir) when (exDir is not OperationCanceledException && exDir is not OutOfMemoryException && exDir is not StackOverflowException) {
+ catch (Exception exDir) when (exDir is not OperationCanceledException && exDir is not OutOfMemoryException && exDir is not StackOverflowException)
+ {
_logger.LogDebug(exDir, "Failed to verify audiobook folder containment for AudiobookId={AudiobookId} File={File}", audiobookId, filePath);
}
@@ -119,24 +122,18 @@ public async Task EnsureAudiobookFileAsync(int audiobookId, string filePat
var cacheKey = $"meta::{filePath}::{ticks}";
if (!_memoryCache.TryGetValue(cacheKey, out var cachedObj) || !(cachedObj is AudioMetadata cachedMeta))
{
- await _limiter.Sem.WaitAsync();
- try
- {
- meta = await metadataService.ExtractFileMetadataAsync(filePath);
- // Cache for 5 minutes
- _memoryCache.Set(cacheKey, meta, TimeSpan.FromMinutes(5));
- }
- finally
- {
- _limiter.Sem.Release();
- }
+ using var _ = await _limiter.Sem.LockAsync();
+ meta = await metadataService.ExtractFileMetadataAsync(filePath);
+ // Cache for 5 minutes
+ _memoryCache.Set(cacheKey, meta, TimeSpan.FromMinutes(5));
}
else
{
meta = cachedMeta;
}
}
- catch (Exception mEx) when (mEx is not OperationCanceledException && mEx is not OutOfMemoryException && mEx is not StackOverflowException) {
+ catch (Exception mEx) when (mEx is not OperationCanceledException && mEx is not OutOfMemoryException && mEx is not StackOverflowException)
+ {
_logger.LogInformation(mEx, "Metadata extraction failed for {Path}", filePath);
}
// If metadata extraction produced minimal results, attempt to ensure ffprobe is installed
@@ -162,27 +159,25 @@ public async Task EnsureAudiobookFileAsync(int audiobookId, string filePat
if (!string.IsNullOrEmpty(ffpath))
{
// Retry metadata extraction once under limiter
- await _limiter.Sem.WaitAsync();
- try
- {
- meta = await metadataService.ExtractFileMetadataAsync(filePath);
- // Update cache
- var fileInfoForCache2 = new FileInfo(filePath);
- var ticks2 = fileInfoForCache2.Exists ? fileInfoForCache2.LastWriteTimeUtc.Ticks : 0L;
- var cacheKey2 = $"meta::{filePath}::{ticks2}";
- _memoryCache.Set(cacheKey2, meta, TimeSpan.FromMinutes(5));
- }
- finally { _limiter.Sem.Release(); }
+ using var _ = await _limiter.Sem.LockAsync();
+ meta = await metadataService.ExtractFileMetadataAsync(filePath);
+ // Update cache
+ var fileInfoForCache2 = new FileInfo(filePath);
+ var ticks2 = fileInfoForCache2.Exists ? fileInfoForCache2.LastWriteTimeUtc.Ticks : 0L;
+ var cacheKey2 = $"meta::{filePath}::{ticks2}";
+ _memoryCache.Set(cacheKey2, meta, TimeSpan.FromMinutes(5));
}
}
- catch (Exception rex) when (rex is not OperationCanceledException && rex is not OutOfMemoryException && rex is not StackOverflowException) {
+ catch (Exception rex) when (rex is not OperationCanceledException && rex is not OutOfMemoryException && rex is not StackOverflowException)
+ {
_logger.LogInformation(rex, "Retry metadata extraction failed for {Path}", filePath);
}
}
}
}
}
- catch (Exception exRetry) when (exRetry is not OperationCanceledException && exRetry is not OutOfMemoryException && exRetry is not StackOverflowException) {
+ catch (Exception exRetry) when (exRetry is not OperationCanceledException && exRetry is not OutOfMemoryException && exRetry is not StackOverflowException)
+ {
_logger.LogDebug(exRetry, "Non-fatal error while attempting ffprobe install/retry for {Path}", filePath);
}
var fi = new FileInfo(filePath);
@@ -215,7 +210,8 @@ public async Task EnsureAudiobookFileAsync(int audiobookId, string filePat
var conn = db.Database.GetDbConnection();
_logger.LogInformation("Created AudiobookFile for audiobook {AudiobookId}: {Path} (Db: {Db}) Id={Id}", audiobookId, filePath, conn?.ConnectionString, fileRecord.Id);
}
- catch (Exception logEx) when (logEx is not OperationCanceledException && logEx is not OutOfMemoryException && logEx is not StackOverflowException) {
+ catch (Exception logEx) when (logEx is not OperationCanceledException && logEx is not OutOfMemoryException && logEx is not StackOverflowException)
+ {
_logger.LogInformation("Created AudiobookFile for audiobook {AudiobookId}: {Path} (Db: unknown) Id={Id}", audiobookId, filePath, fileRecord.Id);
_logger.LogDebug(logEx, "Failed to log DB connection string for AudiobookFile creation");
}
@@ -268,11 +264,13 @@ public async Task EnsureAudiobookFileAsync(int audiobookId, string filePat
await db.SaveChangesAsync();
}
}
- catch (Exception aubEx) when (aubEx is not OperationCanceledException && aubEx is not OutOfMemoryException && aubEx is not StackOverflowException) {
+ catch (Exception aubEx) when (aubEx is not OperationCanceledException && aubEx is not OutOfMemoryException && aubEx is not StackOverflowException)
+ {
_logger.LogDebug(aubEx, "Failed to update Audiobook file summary fields for AudiobookId {AudiobookId}", audiobookId);
}
}
- catch (Exception hx) when (hx is not OperationCanceledException && hx is not OutOfMemoryException && hx is not StackOverflowException) {
+ catch (Exception hx) when (hx is not OperationCanceledException && hx is not OutOfMemoryException && hx is not StackOverflowException)
+ {
_logger.LogDebug(hx, "Failed to create history entry for added audiobook file {Path}", filePath);
}
@@ -298,7 +296,8 @@ public async Task EnsureAudiobookFileAsync(int audiobookId, string filePat
}
}
}
- catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException) {
+ catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException)
+ {
_logger.LogWarning(ex, "Failed to create AudiobookFile record for audiobook {AudiobookId} at {Path}", audiobookId, filePath);
return false;
}
diff --git a/listenarr.api/Services/CompletedDownloadHandlingService.cs b/listenarr.api/Services/CompletedDownloadHandlingService.cs
index 6170be5..50d11d7 100644
--- a/listenarr.api/Services/CompletedDownloadHandlingService.cs
+++ b/listenarr.api/Services/CompletedDownloadHandlingService.cs
@@ -16,12 +16,8 @@
* along with this program. If not, see .
*/
-using Microsoft.EntityFrameworkCore;
-using Microsoft.Extensions.Hosting;
-using Microsoft.Extensions.Logging;
-using Microsoft.Extensions.DependencyInjection;
-using Listenarr.Domain.Models;
using Listenarr.Infrastructure.Models;
+using Microsoft.EntityFrameworkCore;
namespace Listenarr.Api.Services
{
@@ -44,7 +40,7 @@ public class CompletedDownloadHandlingService : BackgroundService
// Track downloads that are in the completion pipeline to avoid duplicate processing
private readonly Dictionary _processingDownloads = new();
- private readonly object _processingLock = new();
+ private readonly Lock _processingLock = new();
public CompletedDownloadHandlingService(
IServiceScopeFactory serviceScopeFactory,
@@ -200,9 +196,8 @@ private async Task ProcessCompletedDownloadsAsync(CancellationToken cancellation
// Skip if already being processed
lock (_processingLock)
{
- if (_processingDownloads.ContainsKey(download.Id))
+ if (_processingDownloads.TryGetValue(download.Id, out var firstSeen))
{
- var firstSeen = _processingDownloads[download.Id];
if (DateTime.UtcNow - firstSeen > TimeSpan.FromMinutes(5))
{
// Been processing for too long, reset and retry
diff --git a/listenarr.api/Services/DiscordBotService.cs b/listenarr.api/Services/DiscordBotService.cs
index fbe2837..9abb835 100644
--- a/listenarr.api/Services/DiscordBotService.cs
+++ b/listenarr.api/Services/DiscordBotService.cs
@@ -1,6 +1,5 @@
using System.Diagnostics;
using System.Runtime.InteropServices;
-using Microsoft.Extensions.Logging;
namespace Listenarr.Api.Services
{
@@ -21,7 +20,7 @@ public class DiscordBotService : IDiscordBotService
private readonly IProcessRunner? _processRunner;
private string? _botApiKey;
private Process? _botProcess;
- private readonly object _processLock = new object();
+ private readonly Lock _processLock = new();
public DiscordBotService(
ILogger logger,
diff --git a/listenarr.api/Services/DownloadMonitorService.cs b/listenarr.api/Services/DownloadMonitorService.cs
index 90de7d8..bb5ee43 100644
--- a/listenarr.api/Services/DownloadMonitorService.cs
+++ b/listenarr.api/Services/DownloadMonitorService.cs
@@ -1212,9 +1212,8 @@ await HandleFailedDownloadAsync(
else
{
// Not complete anymore - remove candidate if present
- if (_completionCandidates.ContainsKey(dl.Id))
+ if (_completionCandidates.Remove(dl.Id))
{
- _completionCandidates.Remove(dl.Id);
_logger.LogDebug("Download {DownloadId} no longer appears complete in qBittorrent, removed from candidates", dl.Id);
_ = BroadcastCandidateUpdateAsync(dl, false, cancellationToken);
}
@@ -1558,9 +1557,8 @@ await HandleFailedDownloadAsync(
}
else
{
- if (_completionCandidates.ContainsKey(dl.Id))
+ if (_completionCandidates.Remove(dl.Id))
{
- _completionCandidates.Remove(dl.Id);
_ = BroadcastCandidateUpdateAsync(dl, false, cancellationToken);
}
}
@@ -2255,9 +2253,8 @@ await HandleFailedDownloadAsync(
failedMatch.Error,
cancellationToken);
- if (_completionCandidates.ContainsKey(dl.Id))
+ if (_completionCandidates.Remove(dl.Id))
{
- _completionCandidates.Remove(dl.Id);
_ = BroadcastCandidateUpdateAsync(dl, false, cancellationToken);
}
continue;
@@ -2352,9 +2349,8 @@ await HandleFailedDownloadAsync(
// Not found in completed items - check if it's still in queue for progress updates
// SABnzbd doesn't provide queue data in history API, so we can't update progress here
// Progress updates for SABnzbd would need to be done via the queue API
- if (_completionCandidates.ContainsKey(dl.Id))
+ if (_completionCandidates.Remove(dl.Id))
{
- _completionCandidates.Remove(dl.Id);
_logger.LogDebug("Download {DownloadId} no longer appears complete in SABnzbd, removed from candidates", dl.Id);
_ = BroadcastCandidateUpdateAsync(dl, false, cancellationToken);
}
@@ -2624,9 +2620,8 @@ await HandleFailedDownloadAsync(
failedMatch.Error,
cancellationToken);
- if (_completionCandidates.ContainsKey(dl.Id))
+ if (_completionCandidates.Remove(dl.Id))
{
- _completionCandidates.Remove(dl.Id);
_ = BroadcastCandidateUpdateAsync(dl, false, cancellationToken);
}
continue;
@@ -2681,9 +2676,8 @@ await HandleFailedDownloadAsync(
else
{
// Not found in completed items - remove from candidates if present
- if (_completionCandidates.ContainsKey(dl.Id))
+ if (_completionCandidates.Remove(dl.Id))
{
- _completionCandidates.Remove(dl.Id);
_logger.LogDebug("Download {DownloadId} no longer appears complete in NZBGet, removed from candidates", dl.Id);
_ = BroadcastCandidateUpdateAsync(dl, false, cancellationToken);
}
diff --git a/listenarr.api/Services/DownloadService.cs b/listenarr.api/Services/DownloadService.cs
index a29b15a..484c8a0 100644
--- a/listenarr.api/Services/DownloadService.cs
+++ b/listenarr.api/Services/DownloadService.cs
@@ -1768,12 +1768,12 @@ public async Task RemoveFromQueueAsync(string downloadId, string? download
if (downloadRecord == null)
{
var allDownloads = await dbContext.Downloads.ToListAsync();
- downloadRecord = allDownloads.FirstOrDefault(d =>
+ downloadRecord = allDownloads.FirstOrDefault(d =>
d.Metadata != null &&
- ((d.Metadata.ContainsKey("ClientDownloadId") &&
- string.Equals(d.Metadata["ClientDownloadId"]?.ToString(), downloadId, StringComparison.OrdinalIgnoreCase)) ||
- (d.Metadata.ContainsKey("TorrentHash") &&
- string.Equals(d.Metadata["TorrentHash"]?.ToString(), downloadId, StringComparison.OrdinalIgnoreCase))));
+ ((d.Metadata.TryGetValue("ClientDownloadId", out var clientDownloadIdObj) &&
+ string.Equals(clientDownloadIdObj?.ToString(), downloadId, StringComparison.OrdinalIgnoreCase)) ||
+ (d.Metadata.TryGetValue("TorrentHash", out var torrentHashObj) &&
+ string.Equals(torrentHashObj?.ToString(), downloadId, StringComparison.OrdinalIgnoreCase))));
}
// If still not found, try enhanced title/name matching for legacy downloads
diff --git a/listenarr.api/Services/DownloadStateMachine.cs b/listenarr.api/Services/DownloadStateMachine.cs
index 39a885d..b687e07 100644
--- a/listenarr.api/Services/DownloadStateMachine.cs
+++ b/listenarr.api/Services/DownloadStateMachine.cs
@@ -229,8 +229,12 @@ public HashSet GetValidNextStates(DownloadItemStatus current
public bool IsTerminalState(DownloadItemStatus state)
{
// Terminal states: no valid transitions out (except to themselves)
- return !ValidTransitions.ContainsKey(state) ||
- (ValidTransitions[state].Count == 1 && ValidTransitions[state].Contains(state));
+ if (!ValidTransitions.TryGetValue(state, out var transitions))
+ {
+ return true;
+ }
+
+ return (transitions.Count == 1 && transitions.Contains(state));
}
///
diff --git a/listenarr.api/Services/ImageCacheService.cs b/listenarr.api/Services/ImageCacheService.cs
index 9d3e950..c8bc6b9 100644
--- a/listenarr.api/Services/ImageCacheService.cs
+++ b/listenarr.api/Services/ImageCacheService.cs
@@ -16,15 +16,10 @@
* along with this program. If not, see .
*/
-using System;
-using System.IO;
-using System.Linq;
+using AsyncKeyedLock;
+using SixLabors.ImageSharp;
using System.Net;
-using System.Net.Http;
using System.Net.Sockets;
-using System.Threading.Tasks;
-using Microsoft.Extensions.Logging;
-using SixLabors.ImageSharp;
namespace Listenarr.Api.Services
{
@@ -46,32 +41,33 @@ public class ImageCacheService : IImageCacheService, IDisposable
private readonly string _tempCachePath;
private readonly string _libraryImagePath;
private readonly string _authorImagePath;
- private readonly string _contentRootPath;
- private readonly System.Collections.Concurrent.ConcurrentDictionary _downloadLocks = new();
- public ImageCacheService(ILogger logger, IHttpClientFactory httpClientFactory, string contentRootPath)
- {
- _logger = logger;
- _httpClient = httpClientFactory.CreateClient();
- _httpClientNoRedirect = new HttpClient(new HttpClientHandler
- {
- AllowAutoRedirect = false
- })
+ private readonly string _contentRootPath;
+ private readonly AsyncKeyedLocker _downloadLocks = new();
+
+ public ImageCacheService(ILogger logger, IHttpClientFactory httpClientFactory, string contentRootPath)
{
- Timeout = _httpClient.Timeout
- };
- _contentRootPath = contentRootPath;
-
- // Set up cache directories relative to content root
- var baseDir = Path.Combine(contentRootPath, "config");
- _tempCachePath = Path.Combine(baseDir, "cache", "images", "temp");
- _libraryImagePath = Path.Combine(baseDir, "cache", "images", "library");
- _authorImagePath = Path.Combine(baseDir, "cache", "images", "authors");
-
- // Ensure directories exist
- Directory.CreateDirectory(_tempCachePath);
- Directory.CreateDirectory(_libraryImagePath);
- Directory.CreateDirectory(_authorImagePath);
- }
+ _logger = logger;
+ _httpClient = httpClientFactory.CreateClient();
+ _httpClientNoRedirect = new HttpClient(new HttpClientHandler
+ {
+ AllowAutoRedirect = false
+ })
+ {
+ Timeout = _httpClient.Timeout
+ };
+ _contentRootPath = contentRootPath;
+
+ // Set up cache directories relative to content root
+ var baseDir = Path.Combine(contentRootPath, "config");
+ _tempCachePath = Path.Combine(baseDir, "cache", "images", "temp");
+ _libraryImagePath = Path.Combine(baseDir, "cache", "images", "library");
+ _authorImagePath = Path.Combine(baseDir, "cache", "images", "authors");
+
+ // Ensure directories exist
+ Directory.CreateDirectory(_tempCachePath);
+ Directory.CreateDirectory(_libraryImagePath);
+ Directory.CreateDirectory(_authorImagePath);
+ }
///
/// Downloads an image from a URL and caches it temporarily
@@ -131,71 +127,65 @@ public ImageCacheService(ILogger logger, IHttpClientFactory h
}
// Use per-identifier lock to prevent concurrent downloads for same identifier
- var sem = _downloadLocks.GetOrAdd(identifier, _ => new System.Threading.SemaphoreSlim(1, 1));
- await sem.WaitAsync();
- try
+ using var _ = await _downloadLocks.LockAsync(identifier);
+
+ // Re-check after acquiring lock
+ libraryPath = GetImagePath(identifier, _libraryImagePath);
+ if (File.Exists(libraryPath))
{
- // Re-check after acquiring lock
- libraryPath = GetImagePath(identifier, _libraryImagePath);
- if (File.Exists(libraryPath))
+ if (IsValidCachedCoverFile(libraryPath, identifier, "library"))
{
- if (IsValidCachedCoverFile(libraryPath, identifier, "library"))
- {
- _logger.LogInformation("Image already in library storage (after wait): {Identifier}", identifier);
- return GetRelativePath(libraryPath);
- }
+ _logger.LogInformation("Image already in library storage (after wait): {Identifier}", identifier);
+ return GetRelativePath(libraryPath);
}
+ }
- // Also check author storage after lock
- authorPath = GetImagePath(identifier, _authorImagePath);
- if (File.Exists(authorPath))
+ // Also check author storage after lock
+ authorPath = GetImagePath(identifier, _authorImagePath);
+ if (File.Exists(authorPath))
+ {
+ if (IsValidCachedCoverFile(authorPath, identifier, "author"))
{
- if (IsValidCachedCoverFile(authorPath, identifier, "author"))
- {
- _logger.LogInformation("Image already in author storage (after wait): {Identifier}", identifier);
- return GetRelativePath(authorPath);
- }
+ _logger.LogInformation("Image already in author storage (after wait): {Identifier}", identifier);
+ return GetRelativePath(authorPath);
}
+ }
- tempExisting = GetBestTempImagePathIfValid(identifier);
- if (!string.IsNullOrEmpty(tempExisting))
- {
- _logger.LogInformation("Image already cached (after wait): {Identifier}", identifier);
- return GetRelativePath(tempExisting);
- }
+ tempExisting = GetBestTempImagePathIfValid(identifier);
+ if (!string.IsNullOrEmpty(tempExisting))
+ {
+ _logger.LogInformation("Image already cached (after wait): {Identifier}", identifier);
+ return GetRelativePath(tempExisting);
+ }
- // Download image with manual redirect handling so every redirect target is revalidated.
- var download = await DownloadWithValidatedRedirectsAsync(imageUrl);
- using var response = download.Response;
- var finalUri = download.FinalUri;
- response.EnsureSuccessStatusCode();
+ // Download image with manual redirect handling so every redirect target is revalidated.
+ var download = await DownloadWithValidatedRedirectsAsync(imageUrl);
+ using var response = download.Response;
+ var finalUri = download.FinalUri;
+ response.EnsureSuccessStatusCode();
- // Read bytes first so we can reject tiny placeholder images (for example 1x1)
- var imageBytes = await response.Content.ReadAsByteArrayAsync();
- var mediaType = response.Content.Headers.ContentType?.MediaType;
- if (IsPlaceholderImage(imageBytes, mediaType))
- {
- _logger.LogInformation("Skipping placeholder/tiny image for {Identifier} from {Url}", identifier, imageUrl);
- return null;
- }
+ // Read bytes first so we can reject tiny placeholder images (for example 1x1)
+ var imageBytes = await response.Content.ReadAsByteArrayAsync();
+ var mediaType = response.Content.Headers.ContentType?.MediaType;
+ if (IsPlaceholderImage(imageBytes, mediaType))
+ {
+ _logger.LogInformation("Skipping placeholder/tiny image for {Identifier} from {Url}", identifier, imageUrl);
+ return null;
+ }
- // Determine file extension from content type or URL
- var extension = GetImageExtension(finalUri.ToString(), response.Content.Headers.ContentType?.MediaType);
- var fileName = $"{SanitizeFileName(identifier)}{extension}";
- var filePath = Path.Combine(_tempCachePath, fileName);
+ // Determine file extension from content type or URL
+ var extension = GetImageExtension(finalUri.ToString(), response.Content.Headers.ContentType?.MediaType);
+ var fileName = $"{SanitizeFileName(identifier)}{extension}";
+ var filePath = Path.Combine(_tempCachePath, fileName);
- // Save to temp cache
- await File.WriteAllBytesAsync(filePath, imageBytes);
+ // Save to temp cache
+ await File.WriteAllBytesAsync(filePath, imageBytes);
- _logger.LogInformation("Image cached successfully: {FilePath}", filePath);
- return GetRelativePath(filePath);
- }
- finally
- {
- sem.Release();
- }
+ _logger.LogInformation("Image cached successfully: {FilePath}", filePath);
+ return GetRelativePath(filePath);
}
- catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException) {
+ catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException)
+ {
_logger.LogError(ex, "Failed to download and cache image from {Url}", imageUrl);
return null;
}
@@ -259,7 +249,8 @@ public ImageCacheService(ILogger logger, IHttpClientFactory h
_logger.LogInformation("Image moved to library storage: {Identifier}", identifier);
return GetRelativePath(libraryPath);
}
- catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException) {
+ catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException)
+ {
_logger.LogError(ex, "Failed to move image to library storage for {Identifier}", identifier);
return null;
}
@@ -323,7 +314,8 @@ public ImageCacheService(ILogger logger, IHttpClientFactory h
_logger.LogInformation("Author image moved to author storage: {Identifier}", identifier);
return GetRelativePath(authorPath);
}
- catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException) {
+ catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException)
+ {
_logger.LogError(ex, "Failed to move author image to author storage for {Identifier}", identifier);
return null;
}
@@ -348,19 +340,19 @@ public ImageCacheService(ILogger logger, IHttpClientFactory h
// Check library storage first
var libraryPath = GetImagePath(identifier, _libraryImagePath);
- if (File.Exists(libraryPath))
- {
- if (IsValidCachedCoverFile(libraryPath, identifier, "library"))
- return Task.FromResult(GetRelativePath(libraryPath));
- }
+ if (File.Exists(libraryPath))
+ {
+ if (IsValidCachedCoverFile(libraryPath, identifier, "library"))
+ return Task.FromResult(GetRelativePath(libraryPath));
+ }
// Check authors storage next
var authorPath = GetImagePath(identifier, _authorImagePath);
- if (File.Exists(authorPath))
- {
- if (IsValidCachedCoverFile(authorPath, identifier, "author"))
- return Task.FromResult(GetRelativePath(authorPath));
- }
+ if (File.Exists(authorPath))
+ {
+ if (IsValidCachedCoverFile(authorPath, identifier, "author"))
+ return Task.FromResult(GetRelativePath(authorPath));
+ }
// Check temp cache and prefer non-placeholder images
var tempBest = GetBestTempImagePathIfValid(identifier);
@@ -410,14 +402,16 @@ public Task ClearTempCacheAsync()
{
File.Delete(file);
}
- catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException) {
+ catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException)
+ {
_logger.LogWarning(ex, "Failed to delete cached file: {File}", file);
}
}
_logger.LogInformation("Temp cache cleared: {Count} files deleted", files.Length);
}
}
- catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException) {
+ catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException)
+ {
_logger.LogError(ex, "Failed to clear temp cache");
}
@@ -627,7 +621,8 @@ private async Task TryValidateResolvedExternalImageUriAsync(Uri uri)
_logger.LogWarning(ex, "Blocked image URL because DNS resolution failed for host {Host}", uri.Host);
return false;
}
- catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException) {
+ catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException)
+ {
_logger.LogWarning(ex, "Blocked image URL due to unexpected DNS validation error for host {Host}", uri.Host);
return false;
}
@@ -687,14 +682,16 @@ private bool IsValidCachedCoverFile(string filePath, string identifier, string b
{
File.Delete(filePath);
}
- catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException) {
+ catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException)
+ {
_logger.LogDebug(ex, "Failed deleting invalid cached image for {Identifier} in {Bucket}: {Path}", identifier, bucket, filePath);
}
return false;
}
return true;
}
- catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException) {
+ catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException)
+ {
_logger.LogWarning(ex, "Failed validating cached image file for {Identifier}: {Path}", identifier, filePath);
return false;
}
@@ -742,7 +739,8 @@ public void Dispose()
{
_httpClientNoRedirect.Dispose();
}
- catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException) {
+ catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException)
+ {
_logger.LogWarning(ex, "Failed disposing no-redirect HttpClient in ImageCacheService");
}
@@ -750,7 +748,8 @@ public void Dispose()
{
_httpClient.Dispose();
}
- catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException) {
+ catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException)
+ {
_logger.LogWarning(ex, "Failed disposing HttpClient in ImageCacheService");
}
}
diff --git a/listenarr.api/Services/MetadataExtractionLimiter.cs b/listenarr.api/Services/MetadataExtractionLimiter.cs
index 125557a..c165e2a 100644
--- a/listenarr.api/Services/MetadataExtractionLimiter.cs
+++ b/listenarr.api/Services/MetadataExtractionLimiter.cs
@@ -1,10 +1,10 @@
-using System.Threading;
+using AsyncKeyedLock;
namespace Listenarr.Api.Services
{
public class MetadataExtractionLimiter
{
// Default concurrent ffprobe extractions
- public SemaphoreSlim Sem { get; } = new SemaphoreSlim(4);
+ public AsyncNonKeyedLocker Sem { get; } = new(4);
}
}
diff --git a/listenarr.api/Services/MetadataRescanService.cs b/listenarr.api/Services/MetadataRescanService.cs
index aa0f7fb..2d7788c 100644
--- a/listenarr.api/Services/MetadataRescanService.cs
+++ b/listenarr.api/Services/MetadataRescanService.cs
@@ -1,14 +1,6 @@
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Threading;
-using System.Threading.Tasks;
-using Microsoft.Extensions.Hosting;
-using Microsoft.Extensions.Logging;
-using Microsoft.EntityFrameworkCore;
-using Microsoft.Extensions.DependencyInjection;
-using Listenarr.Domain.Models;
+using AsyncKeyedLock;
using Listenarr.Infrastructure.Models;
+using Microsoft.EntityFrameworkCore;
namespace Listenarr.Api.Services
{
@@ -18,7 +10,7 @@ public class MetadataRescanService : BackgroundService
private readonly IServiceScopeFactory _scopeFactory;
private readonly ILogger _logger;
private readonly TimeSpan _interval = TimeSpan.FromMinutes(5);
- private readonly SemaphoreSlim _sem = new SemaphoreSlim(2); // bound concurrent extractions
+ private readonly AsyncNonKeyedLocker _sem = new(2); // bound concurrent extractions
public MetadataRescanService(IServiceScopeFactory scopeFactory, ILogger logger)
{
@@ -52,7 +44,7 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken)
var tasks = new List();
foreach (var f in candidates)
{
- await _sem.WaitAsync(stoppingToken);
+ using var releaser = await _sem.LockAsync(stoppingToken);
// Capture loop variable
var file = f;
@@ -98,7 +90,7 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken)
}
finally
{
- _sem.Release();
+ releaser.Dispose();
}
}));
}
diff --git a/listenarr.api/Services/NotificationPayloadBuilder.cs b/listenarr.api/Services/NotificationPayloadBuilder.cs
index 53bbac9..1bfa6e9 100644
--- a/listenarr.api/Services/NotificationPayloadBuilder.cs
+++ b/listenarr.api/Services/NotificationPayloadBuilder.cs
@@ -195,13 +195,13 @@ static string Truncate(string? value, int max)
{
var e = embeds[0]!.AsObject();
- string titleText = e.ContainsKey("title") ? e["title"]?.ToString() ?? string.Empty : string.Empty;
- string descriptionText = e.ContainsKey("description") ? e["description"]?.ToString() ?? string.Empty : string.Empty;
+ string titleText = e.TryGetPropertyValue("title", out var tt) && tt != null ? tt.ToString() ?? string.Empty : string.Empty;
+ string descriptionText = e.TryGetPropertyValue("description", out var dt) && dt != null ? dt.ToString() ?? string.Empty : string.Empty;
int total = titleText.Length + descriptionText.Length;
- if (e.ContainsKey("fields") && e["fields"] != null)
+ if (e.TryGetPropertyValue("fields", out var fieldsObj) && fieldsObj != null)
{
- foreach (var f in e["fields"]!.AsArray())
+ foreach (var f in fieldsObj!.AsArray())
{
var fo = f!.AsObject();
var n = fo["name"]?.ToString() ?? string.Empty;
@@ -221,9 +221,9 @@ static string Truncate(string? value, int max)
excess = excess - reduce;
}
- if (excess > 0 && e.ContainsKey("fields") && e["fields"] != null)
+ if (excess > 0 && fieldsObj != null)
{
- var arr = e["fields"]!.AsArray();
+ var arr = fieldsObj!.AsArray();
for (int i = 0; i < arr.Count && excess > 0; i++)
{
var fo = arr[i]!.AsObject();
diff --git a/listenarr.api/Services/Search/AsinEnricher.cs b/listenarr.api/Services/Search/AsinEnricher.cs
index 1ea927a..1b0dbe3 100644
--- a/listenarr.api/Services/Search/AsinEnricher.cs
+++ b/listenarr.api/Services/Search/AsinEnricher.cs
@@ -1,9 +1,7 @@
-using System.Collections.Concurrent;
-using System.Threading;
+using AsyncKeyedLock;
using Listenarr.Api.Services.Search.Filters;
using Listenarr.Api.Services.Search.Strategies;
-using Listenarr.Infrastructure.Models;
-using Microsoft.Extensions.Logging;
+using System.Collections.Concurrent;
namespace Listenarr.Api.Services.Search;
@@ -44,7 +42,7 @@ public async Task EnrichAsinsAsync(
string? query,
CancellationToken ct = default)
{
- var semaphore = new SemaphoreSlim(5); // Increased from 3 to 5 for better throughput
+ var semaphore = new AsyncNonKeyedLocker(5); // Increased from 3 to 5 for better throughput
var enrichmentTasks = new List();
var enriched = new ConcurrentBag();
var asinsNeedingFallback = new ConcurrentBag();
@@ -54,7 +52,7 @@ public async Task EnrichAsinsAsync(
{
enrichmentTasks.Add(Task.Run(async () =>
{
- await semaphore.WaitAsync(ct);
+ using var _ = await semaphore.LockAsync(ct);
try
{
ct.ThrowIfCancellationRequested();
@@ -203,10 +201,6 @@ public async Task EnrichAsinsAsync(
catch (Exception ex) when (ex is not OperationCanceledException && ex is not OutOfMemoryException && ex is not StackOverflowException) {
_logger.LogWarning(ex, "Metadata enrichment failed for ASIN {Asin}", asin);
}
- finally
- {
- semaphore.Release();
- }
}));
}
diff --git a/listenarr.api/Services/Search/Providers/MyAnonamouseSearchProvider.cs b/listenarr.api/Services/Search/Providers/MyAnonamouseSearchProvider.cs
index 754c17c..2bc9b81 100644
--- a/listenarr.api/Services/Search/Providers/MyAnonamouseSearchProvider.cs
+++ b/listenarr.api/Services/Search/Providers/MyAnonamouseSearchProvider.cs
@@ -5,6 +5,7 @@
using System.Text.Json;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
+using AsyncKeyedLock;
using Listenarr.Api.Models;
using Listenarr.Domain.Models;
using Listenarr.Infrastructure.Models;
@@ -798,10 +799,10 @@ private async Task EnrichMyAnonamouseResultsAsync(Indexer indexer, List
{
- await sem.WaitAsync();
+ using var _ = await sem.LockAsync();
try
{
if (string.IsNullOrEmpty(r.ResultUrl)) return;
@@ -888,10 +889,6 @@ private async Task EnrichMyAnonamouseResultsAsync(Indexer indexer, List> IntelligentSearchAsync(string quer
if (parsed.TryGetValue("AUTHOR:", out var authorVal)) authorVal = authorVal?.Trim();
if (parsed.TryGetValue("TITLE:", out var titleVal)) titleVal = titleVal?.Trim();
- string? parsedAsin = parsed.ContainsKey("ASIN:") ? parsed["ASIN:"] : null;
- string? parsedIsbn = parsed.ContainsKey("ISBN:") ? parsed["ISBN:"] : null;
- string? parsedAuthor = parsed.ContainsKey("AUTHOR:") ? parsed["AUTHOR:"] : null;
- string? parsedTitle = parsed.ContainsKey("TITLE:") ? parsed["TITLE:"] : null;
-
- try { _logger.LogInformation("Parsed prefixes: ASIN={Asin}, ISBN={Isbn}, AUTHOR={Author}, TITLE={Title}", parsedAsin, parsedIsbn, parsedAuthor, parsedTitle); } catch (Exception caughtEx_1) when (caughtEx_1 is not OperationCanceledException && caughtEx_1 is not OutOfMemoryException && caughtEx_1 is not StackOverflowException) {
+ try { _logger.LogInformation("Parsed prefixes: ASIN={Asin}, ISBN={Isbn}, AUTHOR={Author}, TITLE={Title}", asinVal, isbnVal, authorVal, titleVal); } catch (Exception caughtEx_1) when (caughtEx_1 is not OperationCanceledException && caughtEx_1 is not OutOfMemoryException && caughtEx_1 is not StackOverflowException) {
System.Diagnostics.Debug.WriteLine("Suppressed non-fatal exception in catch block.");
}
// Determine search type (priority: ASIN > ISBN > AUTHOR+TITLE > AUTHOR > TITLE)
- if (!string.IsNullOrEmpty(parsedAsin)) searchType = "ASIN";
- else if (!string.IsNullOrEmpty(parsedIsbn)) searchType = "ISBN";
- else if (!string.IsNullOrEmpty(parsedAuthor) && !string.IsNullOrEmpty(parsedTitle)) searchType = "AUTHOR_TITLE";
- else if (!string.IsNullOrEmpty(parsedAuthor)) searchType = "AUTHOR";
- else if (!string.IsNullOrEmpty(parsedTitle)) searchType = "TITLE";
+ if (!string.IsNullOrEmpty(asinVal)) searchType = "ASIN";
+ else if (!string.IsNullOrEmpty(isbnVal)) searchType = "ISBN";
+ else if (!string.IsNullOrEmpty(authorVal) && !string.IsNullOrEmpty(titleVal)) searchType = "AUTHOR_TITLE";
+ else if (!string.IsNullOrEmpty(authorVal)) searchType = "AUTHOR";
+ else if (!string.IsNullOrEmpty(titleVal)) searchType = "TITLE";
else searchType = null;
try { _logger.LogInformation("[DBG] Determined searchType='{SearchType}'", searchType); } catch (Exception caughtEx_2) when (caughtEx_2 is not OperationCanceledException && caughtEx_2 is not OutOfMemoryException && caughtEx_2 is not StackOverflowException) {
@@ -540,9 +536,9 @@ public async Task> IntelligentSearchAsync(string quer
// ASIN case is handled separately above via ASIN handler
// ISBN
- if (searchType == "ISBN" && !string.IsNullOrWhiteSpace(parsedIsbn))
+ if (searchType == "ISBN" && !string.IsNullOrEmpty(isbnVal))
{
- var amRes = await _audimetaService.SearchByIsbnAsync(parsedIsbn, 1, 50, region, language);
+ var amRes = await _audimetaService.SearchByIsbnAsync(isbnVal, 1, 50, region, language);
if (amRes?.Results != null && amRes.Results.Any())
{
var converted = new List();
@@ -578,7 +574,7 @@ public async Task> IntelligentSearchAsync(string quer
}
// AUTHOR-only
- if (searchType == "AUTHOR" && !string.IsNullOrWhiteSpace(parsedAuthor))
+ if (searchType == "AUTHOR" && !string.IsNullOrEmpty(authorVal))
{
// Aggregate multiple pages from Audimeta until we reach candidateLimit
var aggregated = new List();
@@ -592,13 +588,13 @@ public async Task> IntelligentSearchAsync(string quer
{
try
{
- var pageRes = await _audimetaService.SearchByAuthorAsync(parsedAuthor, page, pageSize, region, language);
+ var pageRes = await _audimetaService.SearchByAuthorAsync(authorVal, page, pageSize, region, language);
var pageCount = pageRes?.Results?.Count ?? 0;
aggregated.AddRange(pageRes?.Results ?? Enumerable.Empty());
- _logger.LogInformation("Audimeta author page {Page} returned {PageCount} results (aggregated {AggregatedCount}) for author '{Author}'", page, pageCount, aggregated.Count, parsedAuthor);
+ _logger.LogInformation("Audimeta author page {Page} returned {PageCount} results (aggregated {AggregatedCount}) for author '{Author}'", page, pageCount, aggregated.Count, authorVal);
if (pageRes?.Results == null || pageCount == 0)
{
- _logger.LogInformation("Stopping aggregation: page {Page} returned no results for author '{Author}'", page, parsedAuthor);
+ _logger.LogInformation("Stopping aggregation: page {Page} returned no results for author '{Author}'", page, authorVal);
break;
}
if (pageCount < pageSize)
@@ -609,12 +605,12 @@ public async Task> IntelligentSearchAsync(string quer
// Do not stop aggregating based on candidateLimit for audimeta
}
catch (Exception exPage) when (exPage is not OperationCanceledException && exPage is not OutOfMemoryException && exPage is not StackOverflowException) {
- _logger.LogDebug(exPage, "Failed fetching audimeta author page {Page} for author {Author}", page, parsedAuthor);
+ _logger.LogDebug(exPage, "Failed fetching audimeta author page {Page} for author {Author}", page, authorVal);
break;
}
}
- _logger.LogInformation("Finished aggregating author pages for '{Author}': total aggregated={AggregatedCount}, candidateLimit={CandidateLimit}, pageSize={PageSize}, maxPages={MaxPages}", parsedAuthor, aggregated.Count, candidateLimit, pageSize, maxPages);
+ _logger.LogInformation("Finished aggregating author pages for '{Author}': total aggregated={AggregatedCount}, candidateLimit={CandidateLimit}, pageSize={PageSize}, maxPages={MaxPages}", authorVal, aggregated.Count, candidateLimit, pageSize, maxPages);
if (aggregated.Any())
{
// Deduplicate results based on ASIN to prevent repeated books across pages
@@ -624,7 +620,7 @@ public async Task> IntelligentSearchAsync(string quer
.Select(g => g.First())
.ToList();
- _logger.LogInformation("Deduplicated author results for '{Author}': {OriginalCount} -> {DeduplicatedCount}", parsedAuthor, aggregated.Count, deduplicated.Count);
+ _logger.LogInformation("Deduplicated author results for '{Author}': {OriginalCount} -> {DeduplicatedCount}", authorVal, aggregated.Count, deduplicated.Count);
var converted = new List();
var authorFiltered = deduplicated.AsEnumerable();
@@ -658,9 +654,9 @@ public async Task> IntelligentSearchAsync(string quer
}
// AUTHOR + TITLE: prefer author endpoint then filter by title/isbn to ensure consistent Audimeta enrichment
- if (searchType == "AUTHOR_TITLE" && !string.IsNullOrWhiteSpace(parsedAuthor))
+ if (searchType == "AUTHOR_TITLE" && !string.IsNullOrEmpty(authorVal))
{
- try { _logger.LogInformation("Entering AUTHOR_TITLE branch: author='{Author}', title='{Title}', isbn='{Isbn}'", parsedAuthor, parsedTitle, parsedIsbn); } catch (Exception caughtEx_3) when (caughtEx_3 is not OperationCanceledException && caughtEx_3 is not OutOfMemoryException && caughtEx_3 is not StackOverflowException) {
+ try { _logger.LogInformation("Entering AUTHOR_TITLE branch: author='{Author}', title='{Title}', isbn='{Isbn}'", authorVal, titleVal, isbnVal); } catch (Exception caughtEx_3) when (caughtEx_3 is not OperationCanceledException && caughtEx_3 is not OutOfMemoryException && caughtEx_3 is not StackOverflowException) {
System.Diagnostics.Debug.WriteLine("Suppressed non-fatal exception in catch block.");
}
// Aggregate author pages up to candidateLimit to enrich matching
@@ -674,10 +670,10 @@ public async Task> IntelligentSearchAsync(string quer
{
try
{
- var pageRes = await _audimetaService.SearchByAuthorAsync(parsedAuthor, page, pageSize, region, language);
+ var pageRes = await _audimetaService.SearchByAuthorAsync(authorVal, page, pageSize, region, language);
var pageCount = pageRes?.Results?.Count ?? 0;
aggregated.AddRange(pageRes?.Results ?? Enumerable.Empty());
- _logger.LogInformation("Audimeta AUTHOR_TITLE: page {Page} returned {PageCount} results (aggregated {AggregatedCount}) for author '{Author}'", page, pageCount, aggregated.Count, parsedAuthor);
+ _logger.LogInformation("Audimeta AUTHOR_TITLE: page {Page} returned {PageCount} results (aggregated {AggregatedCount}) for author '{Author}'", page, pageCount, aggregated.Count, authorVal);
if (pageRes?.Results == null || pageCount == 0)
{
_logger.LogInformation("Audimeta AUTHOR_TITLE: stopping aggregation — page {Page} returned no results", page);
@@ -690,11 +686,11 @@ public async Task> IntelligentSearchAsync(string quer
}
}
catch (Exception exPage) when (exPage is not OperationCanceledException && exPage is not OutOfMemoryException && exPage is not StackOverflowException) {
- _logger.LogDebug(exPage, "Failed fetching audimeta author page {Page} for author {Author}", page, parsedAuthor);
+ _logger.LogDebug(exPage, "Failed fetching audimeta author page {Page} for author {Author}", page, authorVal);
break;
}
}
- _logger.LogInformation("Audimeta AUTHOR_TITLE: finished aggregating pages for '{Author}': aggregated={AggregatedCount}, pageSize={PageSize}, maxPages={MaxPages}", parsedAuthor, aggregated.Count, pageSize, maxPages);
+ _logger.LogInformation("Audimeta AUTHOR_TITLE: finished aggregating pages for '{Author}': aggregated={AggregatedCount}, pageSize={PageSize}, maxPages={MaxPages}", authorVal, aggregated.Count, pageSize, maxPages);
if (aggregated?.Any() == true)
{
// Deduplicate results based on ASIN to prevent repeated books across pages
@@ -704,10 +700,10 @@ public async Task> IntelligentSearchAsync(string quer
.Select(g => g.First())
.ToList();
- _logger.LogInformation("Deduplicated AUTHOR_TITLE results for '{Author}': {OriginalCount} -> {DeduplicatedCount}", parsedAuthor, aggregated.Count, deduplicated.Count);
+ _logger.LogInformation("Deduplicated AUTHOR_TITLE results for '{Author}': {OriginalCount} -> {DeduplicatedCount}", authorVal, aggregated.Count, deduplicated.Count);
var converted = new List();
- try { _logger.LogInformation("Audimeta author lookup returned {Count} aggregated results for author '{Author}'", deduplicated.Count, parsedAuthor); } catch (Exception caughtEx_4) when (caughtEx_4 is not OperationCanceledException && caughtEx_4 is not OutOfMemoryException && caughtEx_4 is not StackOverflowException) {
+ try { _logger.LogInformation("Audimeta author lookup returned {Count} aggregated results for author '{Author}'", deduplicated.Count, authorVal); } catch (Exception caughtEx_4) when (caughtEx_4 is not OperationCanceledException && caughtEx_4 is not OutOfMemoryException && caughtEx_4 is not StackOverflowException) {
System.Diagnostics.Debug.WriteLine("Suppressed non-fatal exception in catch block.");
}
@@ -719,12 +715,11 @@ public async Task> IntelligentSearchAsync(string quer
if (!string.IsNullOrWhiteSpace(language)) authorFiltered = authorFiltered.Where(b => !string.IsNullOrWhiteSpace(b.Language) && string.Equals(b.Language, language, StringComparison.OrdinalIgnoreCase));
// Title-based filtering can be done directly against the author results
- if (!string.IsNullOrWhiteSpace(parsedTitle))
+ if (!string.IsNullOrEmpty(titleVal))
{
- var t = parsedTitle.Trim();
authorFiltered = authorFiltered.Where(b =>
- (!string.IsNullOrWhiteSpace(b.Title) && b.Title.IndexOf(t, StringComparison.OrdinalIgnoreCase) >= 0) ||
- (!string.IsNullOrWhiteSpace(b.Subtitle) && b.Subtitle.IndexOf(t, StringComparison.OrdinalIgnoreCase) >= 0)
+ (!string.IsNullOrWhiteSpace(b.Title) && b.Title.IndexOf(titleVal, StringComparison.OrdinalIgnoreCase) >= 0) ||
+ (!string.IsNullOrWhiteSpace(b.Subtitle) && b.Subtitle.IndexOf(titleVal, StringComparison.OrdinalIgnoreCase) >= 0)
);
}
@@ -732,13 +727,12 @@ public async Task> IntelligentSearchAsync(string quer
// instead of fetching metadata for every ASIN, scan a limited set
// of candidates and only fetch metadata until we find ISBN matches.
var detailedMetaByAsin = new Dictionary(StringComparer.OrdinalIgnoreCase);
- if (!string.IsNullOrWhiteSpace(parsedIsbn))
+ if (!string.IsNullOrEmpty(isbnVal))
{
- var isbn = parsedIsbn.Trim();
// Limit how many author results to scan for ISBNs to avoid huge loads
var isbnScanLimit = Math.Min(200, Math.Max(50, candidateLimit));
var scanCandidates = aggregated.Where(r => !string.IsNullOrWhiteSpace(r.Asin)).Take(isbnScanLimit).ToList();
- try { _logger.LogInformation("Scanning up to {Limit} author candidates for ISBN {Isbn}", scanCandidates.Count, isbn); } catch (Exception caughtEx_5) when (caughtEx_5 is not OperationCanceledException && caughtEx_5 is not OutOfMemoryException && caughtEx_5 is not StackOverflowException) {
+ try { _logger.LogInformation("Scanning up to {Limit} author candidates for ISBN {Isbn}", scanCandidates.Count, isbnVal); } catch (Exception caughtEx_5) when (caughtEx_5 is not OperationCanceledException && caughtEx_5 is not OutOfMemoryException && caughtEx_5 is not StackOverflowException) {
System.Diagnostics.Debug.WriteLine("Suppressed non-fatal exception in catch block.");
}
foreach (var c in scanCandidates)
@@ -749,7 +743,7 @@ public async Task> IntelligentSearchAsync(string quer
var meta = await _audimetaService.GetBookMetadataAsync(c.Asin, region, true, language);
if (meta == null) continue;
detailedMetaByAsin[c.Asin] = meta;
- if (!string.IsNullOrWhiteSpace(meta.Isbn) && string.Equals(meta.Isbn.Trim(), isbn, StringComparison.OrdinalIgnoreCase))
+ if (!string.IsNullOrWhiteSpace(meta.Isbn) && string.Equals(meta.Isbn.Trim(), isbnVal, StringComparison.OrdinalIgnoreCase))
{
// Narrow authorFiltered to only matching ASINs
authorFiltered = authorFiltered.Where(r => !string.IsNullOrWhiteSpace(r.Asin) && string.Equals(r.Asin, c.Asin, StringComparison.OrdinalIgnoreCase));
@@ -810,9 +804,9 @@ public async Task> IntelligentSearchAsync(string quer
}
// TITLE-only
- if (searchType == "TITLE" && !string.IsNullOrWhiteSpace(parsedTitle))
+ if (searchType == "TITLE" && !string.IsNullOrEmpty(titleVal))
{
- var titleRes = await _audimetaService.SearchByTitleAsync(parsedTitle, 1, 50, region, language);
+ var titleRes = await _audimetaService.SearchByTitleAsync(titleVal, 1, 50, region, language);
if (titleRes?.Results != null && titleRes.Results.Any())
{
var converted = new List();
@@ -891,11 +885,10 @@ public async Task> IntelligentSearchAsync(string quer
var skipOpenLibrary = false;
// Handle ASIN queries immediately with metadata-first approach
- if (searchType == "ASIN" && !string.IsNullOrEmpty(parsedAsin))
+ if (searchType == "ASIN" && !string.IsNullOrEmpty(asinVal))
{
- var asin = parsedAsin.Trim();
var asinMetadataSources = await GetEnabledMetadataSourcesAsync();
- var asinSearchResults = await _asinSearchHandler.SearchByAsinAsync(asin, asinMetadataSources);
+ var asinSearchResults = await _asinSearchHandler.SearchByAsinAsync(asinVal, asinMetadataSources);
return asinSearchResults.Select(r => SearchResultConverters.ToMetadata(r)).ToList();
}
@@ -2978,10 +2971,10 @@ private async Task EnrichMyAnonamouseResultsAsync(Indexer indexer, List
{
- await sem.WaitAsync();
+ using var _ = await sem.LockAsync();
try
{
var cacheKey = $"mam:enrich:{r.ResultUrl}";
@@ -3081,10 +3074,6 @@ private async Task EnrichMyAnonamouseResultsAsync(Indexer indexer, List
{
private static readonly ConcurrentDictionary DbFilesToCleanup = new(StringComparer.OrdinalIgnoreCase);
- private static readonly object CleanupSync = new();
+ private static readonly Lock CleanupSync = new();
private static bool _isProcessExitCleanupHooked;
private string? _sqliteDbPath;