fix: use ConcurrentBag for hash cache

This commit is contained in:
Oliver Booth 2024-04-17 14:35:04 +01:00
parent 3396c2bc74
commit 7da6faff83
Signed by: oliverbooth
GPG Key ID: E60B570D1B7557B5
1 changed files with 4 additions and 7 deletions

View File

@ -8,7 +8,7 @@ namespace FindDuplicates;
internal sealed class ListCommand : AsyncCommand<ListSettings> internal sealed class ListCommand : AsyncCommand<ListSettings>
{ {
private readonly ConcurrentDictionary<string, List<FileInfo>> _fileHashMap = new(); private readonly ConcurrentDictionary<string, ConcurrentBag<FileInfo>> _fileHashMap = new();
public override async Task<int> ExecuteAsync(CommandContext context, ListSettings settings) public override async Task<int> ExecuteAsync(CommandContext context, ListSettings settings)
{ {
@ -29,7 +29,7 @@ internal sealed class ListCommand : AsyncCommand<ListSettings>
AnsiConsole.WriteLine(); AnsiConsole.WriteLine();
int duplicates = 0; int duplicates = 0;
foreach ((string hash, List<FileInfo> files) in _fileHashMap) foreach ((string hash, ConcurrentBag<FileInfo> files) in _fileHashMap)
{ {
int fileCount = files.Count; int fileCount = files.Count;
@ -119,11 +119,8 @@ internal sealed class ListCommand : AsyncCommand<ListSettings>
if (settings.Verbose) if (settings.Verbose)
AnsiConsole.WriteLine($"{file.FullName} ->\n {hash}"); AnsiConsole.WriteLine($"{file.FullName} ->\n {hash}");
if (!_fileHashMap.TryGetValue(hash, out List<FileInfo>? cache)) ConcurrentBag<FileInfo> cache = _fileHashMap.GetOrAdd(hash, _ => []);
_fileHashMap[hash] = cache = new List<FileInfo>(); cache.Add(file);
lock (cache)
cache.Add(file);
} }
catch (Exception ex) catch (Exception ex)
{ {