This commit is contained in:
2025-08-22 11:55:35 +01:00
commit b21d2a685e
312 changed files with 31174 additions and 0 deletions

View File

@@ -0,0 +1,243 @@
using MareSynchronosShared.Metrics;
using MareSynchronosShared.Services;
using MareSynchronosStaticFilesServer.Utils;
using System.Collections.Concurrent;
using System.Net.Http.Headers;
using MareSynchronosShared.Utils;
using MareSynchronos.API.Routes;
using MareSynchronosShared.Utils.Configuration;
namespace MareSynchronosStaticFilesServer.Services;
public sealed class CachedFileProvider : IDisposable
{
private readonly IConfigurationService<StaticFilesServerConfiguration> _configuration;
private readonly ILogger<CachedFileProvider> _logger;
private readonly FileStatisticsService _fileStatisticsService;
private readonly MareMetrics _metrics;
private readonly ServerTokenGenerator _generator;
private readonly ITouchHashService _touchService;
private readonly Uri _remoteCacheSourceUri;
private readonly bool _useColdStorage;
private readonly string _hotStoragePath;
private readonly string _coldStoragePath;
private readonly ConcurrentDictionary<string, Task> _currentTransfers = new(StringComparer.Ordinal);
private readonly HttpClient _httpClient;
private readonly SemaphoreSlim _downloadSemaphore = new(1, 1);
private bool _disposed;
private bool IsMainServer => _remoteCacheSourceUri == null && _isDistributionServer;
private bool _isDistributionServer;
public CachedFileProvider(IConfigurationService<StaticFilesServerConfiguration> configuration, ILogger<CachedFileProvider> logger,
FileStatisticsService fileStatisticsService, MareMetrics metrics, ServerTokenGenerator generator, ITouchHashService touchService)
{
AppContext.SetSwitch("System.Net.Http.SocketsHttpHandler.Http2UnencryptedSupport", true);
_configuration = configuration;
_logger = logger;
_fileStatisticsService = fileStatisticsService;
_metrics = metrics;
_generator = generator;
_touchService = touchService;
_remoteCacheSourceUri = configuration.GetValueOrDefault<Uri>(nameof(StaticFilesServerConfiguration.DistributionFileServerAddress), null);
_isDistributionServer = configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.IsDistributionNode), false);
_useColdStorage = configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.UseColdStorage), false);
_hotStoragePath = configuration.GetValue<string>(nameof(StaticFilesServerConfiguration.CacheDirectory));
_coldStoragePath = configuration.GetValue<string>(nameof(StaticFilesServerConfiguration.ColdStorageDirectory));
_httpClient = new();
_httpClient.DefaultRequestHeaders.UserAgent.Add(new ProductInfoHeaderValue("MareSynchronosServer", "1.0.0.0"));
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
_httpClient?.Dispose();
}
private async Task DownloadTask(string hash)
{
var destinationFilePath = FilePathUtil.GetFilePath(_useColdStorage ? _coldStoragePath : _hotStoragePath, hash);
// if cold storage is not configured or file not found or error is present try to download file from remote
var downloadUrl = MareFiles.DistributionGetFullPath(_remoteCacheSourceUri, hash);
_logger.LogInformation("Did not find {hash}, downloading from {server}", hash, downloadUrl);
using var requestMessage = new HttpRequestMessage(HttpMethod.Get, downloadUrl);
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", _generator.Token);
if (_configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.DistributionFileServerForceHTTP2), false))
{
requestMessage.Version = new Version(2, 0);
requestMessage.VersionPolicy = HttpVersionPolicy.RequestVersionExact;
}
HttpResponseMessage? response = null;
try
{
response = await _httpClient.SendAsync(requestMessage).ConfigureAwait(false);
response.EnsureSuccessStatusCode();
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to download {url}", downloadUrl);
response?.Dispose();
return;
}
var tempFileName = destinationFilePath + ".dl";
var fileStream = new FileStream(tempFileName, FileMode.Create, FileAccess.ReadWrite);
var bufferSize = 4096;
var buffer = new byte[bufferSize];
var bytesRead = 0;
using var content = await response.Content.ReadAsStreamAsync().ConfigureAwait(false);
while ((bytesRead = await content.ReadAsync(buffer).ConfigureAwait(false)) > 0)
{
await fileStream.WriteAsync(buffer.AsMemory(0, bytesRead)).ConfigureAwait(false);
}
await fileStream.FlushAsync().ConfigureAwait(false);
await fileStream.DisposeAsync().ConfigureAwait(false);
File.Move(tempFileName, destinationFilePath, true);
_metrics.IncGauge(_useColdStorage ? MetricsAPI.GaugeFilesTotalColdStorage : MetricsAPI.GaugeFilesTotal);
_metrics.IncGauge(_useColdStorage ? MetricsAPI.GaugeFilesTotalSizeColdStorage : MetricsAPI.GaugeFilesTotalSize, new FileInfo(destinationFilePath).Length);
response.Dispose();
}
private bool TryCopyFromColdStorage(string hash, string destinationFilePath)
{
if (!_useColdStorage) return false;
if (string.IsNullOrEmpty(_coldStoragePath)) return false;
var coldStorageFilePath = FilePathUtil.GetFilePath(_coldStoragePath, hash);
if (!File.Exists(coldStorageFilePath)) return false;
try
{
_logger.LogDebug("Copying {hash} from cold storage: {path}", hash, coldStorageFilePath);
var tempFileName = destinationFilePath + ".dl";
File.Copy(coldStorageFilePath, tempFileName, true);
File.Move(tempFileName, destinationFilePath, true);
var destinationFile = new FileInfo(destinationFilePath);
_metrics.IncGauge(MetricsAPI.GaugeFilesTotal);
_metrics.IncGauge(MetricsAPI.GaugeFilesTotalSize, new FileInfo(destinationFilePath).Length);
return true;
}
catch (Exception ex)
{
// Recover from a fairly common race condition -- max wait time is 75ms
// Having TryCopyFromColdStorage protected by the downloadtask mutex doesn't work for some reason?
for (int retry = 0; retry < 5; ++retry)
{
Thread.Sleep(5 + retry * 5);
if (File.Exists(destinationFilePath))
return true;
}
_logger.LogWarning(ex, "Could not copy {coldStoragePath} from cold storage", coldStorageFilePath);
}
return false;
}
// Returns FileInfo ONLY if the hot file was immediately available without downloading
// Since the intended use is for pre-fetching files from hot storage, this is exactly what we need anyway
public async Task<FileInfo?> DownloadFileWhenRequired(string hash)
{
var fi = FilePathUtil.GetFileInfoForHash(_hotStoragePath, hash);
if (fi != null && fi.Length != 0)
return fi;
// first check cold storage
if (TryCopyFromColdStorage(hash, FilePathUtil.GetFilePath(_hotStoragePath, hash)))
return null;
// no distribution server configured to download from
if (_remoteCacheSourceUri == null)
return null;
await _downloadSemaphore.WaitAsync().ConfigureAwait(false);
if (!_currentTransfers.TryGetValue(hash, out var downloadTask) || (downloadTask?.IsCompleted ?? true))
{
_currentTransfers[hash] = Task.Run(async () =>
{
try
{
_metrics.IncGauge(MetricsAPI.GaugeFilesDownloadingFromCache);
await DownloadTask(hash).ConfigureAwait(false);
TryCopyFromColdStorage(hash, FilePathUtil.GetFilePath(_hotStoragePath, hash));
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Error during Download Task for {hash}", hash);
}
finally
{
_metrics.DecGauge(MetricsAPI.GaugeFilesDownloadingFromCache);
_currentTransfers.Remove(hash, out _);
}
});
}
_downloadSemaphore.Release();
return null;
}
public async Task<FileInfo?> GetAndDownloadFile(string hash)
{
var fi = await DownloadFileWhenRequired(hash).ConfigureAwait(false);
if (fi == null && _currentTransfers.TryGetValue(hash, out var downloadTask))
{
try
{
using CancellationTokenSource cts = new();
cts.CancelAfter(TimeSpan.FromSeconds(120));
_metrics.IncGauge(MetricsAPI.GaugeFilesTasksWaitingForDownloadFromCache);
await downloadTask.WaitAsync(cts.Token).ConfigureAwait(false);
}
catch (Exception e)
{
_logger.LogWarning(e, "Failed while waiting for download task for {hash}", hash);
return null;
}
finally
{
_metrics.DecGauge(MetricsAPI.GaugeFilesTasksWaitingForDownloadFromCache);
}
}
fi ??= FilePathUtil.GetFileInfoForHash(_hotStoragePath, hash);
if (fi == null)
return null;
fi.LastAccessTimeUtc = DateTime.UtcNow;
_touchService.TouchColdHash(hash);
_fileStatisticsService.LogFile(hash, fi.Length);
return fi;
}
public async Task<FileStream?> GetAndDownloadFileStream(string hash)
{
var fi = await GetAndDownloadFile(hash).ConfigureAwait(false);
return new FileStream(fi.FullName, FileMode.Open, FileAccess.Read, FileShare.Inheritable | FileShare.Read);
}
public void TouchColdHash(string hash)
{
_touchService.TouchColdHash(hash);
}
public bool AnyFilesDownloading(List<string> hashes)
{
return hashes.Exists(_currentTransfers.Keys.Contains);
}
}

View File

@@ -0,0 +1,82 @@
using MareSynchronosShared.Services;
using MareSynchronosShared.Utils.Configuration;
using MareSynchronosStaticFilesServer.Utils;
using System.Collections.Concurrent;
namespace MareSynchronosStaticFilesServer.Services;
// Perform access time updates for cold cache files accessed via hot cache or shard servers
public class ColdTouchHashService : ITouchHashService
{
private readonly ILogger<ColdTouchHashService> _logger;
private readonly IConfigurationService<StaticFilesServerConfiguration> _configuration;
private readonly bool _useColdStorage;
private readonly string _coldStoragePath;
// Debounce multiple updates towards the same file
private readonly ConcurrentDictionary<string, DateTime> _lastUpdateTimesUtc = new(StringComparer.Ordinal);
private int _cleanupCounter = 0;
private object _cleanupLockObj = new();
private const double _debounceTimeSecs = 900.0;
public ColdTouchHashService(ILogger<ColdTouchHashService> logger, IConfigurationService<StaticFilesServerConfiguration> configuration)
{
_logger = logger;
_configuration = configuration;
_useColdStorage = configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.UseColdStorage), false);
_coldStoragePath = configuration.GetValue<string>(nameof(StaticFilesServerConfiguration.ColdStorageDirectory));
}
public Task StartAsync(CancellationToken cancellationToken)
{
return Task.CompletedTask;
}
public Task StopAsync(CancellationToken cancellationToken)
{
return Task.CompletedTask;
}
public void TouchColdHash(string hash)
{
if (!_useColdStorage)
return;
var nowUtc = DateTime.UtcNow;
// Clean up debounce dictionary regularly
if (_cleanupCounter++ >= 1000)
{
_cleanupCounter = 0;
if (Monitor.TryEnter(_cleanupLockObj))
{
try
{
foreach (var entry in _lastUpdateTimesUtc.Where(entry => (nowUtc - entry.Value).TotalSeconds >= _debounceTimeSecs).ToList())
_lastUpdateTimesUtc.TryRemove(entry.Key, out _);
}
finally
{
Monitor.Exit(_cleanupLockObj);
}
}
}
// Ignore multiple updates within a time window of the first
if (_lastUpdateTimesUtc.TryGetValue(hash, out var lastUpdateTimeUtc) && (nowUtc - lastUpdateTimeUtc).TotalSeconds < _debounceTimeSecs)
return;
var fileInfo = FilePathUtil.GetFileInfoForHash(_coldStoragePath, hash);
if (fileInfo != null)
{
_logger.LogTrace("Touching {fileName}", fileInfo.Name);
try
{
fileInfo.LastAccessTimeUtc = nowUtc;
}
catch (IOException) { return; }
_lastUpdateTimesUtc.TryAdd(hash, nowUtc);
}
}
}

View File

@@ -0,0 +1,359 @@
using ByteSizeLib;
using MareSynchronos.API.Dto.Files;
using MareSynchronosShared.Data;
using MareSynchronosShared.Metrics;
using MareSynchronosShared.Models;
using MareSynchronosShared.Services;
using MareSynchronosShared.Utils.Configuration;
using MareSynchronosStaticFilesServer.Utils;
using MessagePack.Formatters;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Hosting.Systemd;
namespace MareSynchronosStaticFilesServer.Services;
public class FileCleanupService : IHostedService
{
private readonly IConfigurationService<StaticFilesServerConfiguration> _configuration;
private readonly ILogger<FileCleanupService> _logger;
private readonly MareMetrics _metrics;
private readonly IServiceProvider _services;
private readonly string _hotStoragePath;
private readonly string _coldStoragePath;
private readonly bool _isMain = false;
private readonly bool _isDistributionNode = false;
private readonly bool _useColdStorage = false;
private HashSet<string> _orphanedFiles = new(StringComparer.Ordinal);
private CancellationTokenSource _cleanupCts;
private int HotStorageMinimumRetention => _configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.MinimumFileRetentionPeriodInDays), 7);
private int HotStorageRetention => _configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.UnusedFileRetentionPeriodInDays), 14);
private double HotStorageSize => _configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.CacheSizeHardLimitInGiB), -1.0);
private int ColdStorageMinimumRetention => _configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.ColdStorageMinimumFileRetentionPeriodInDays), 60);
private int ColdStorageRetention => _configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.ColdStorageUnusedFileRetentionPeriodInDays), 60);
private double ColdStorageSize => _configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.ColdStorageSizeHardLimitInGiB), -1.0);
private double SmallSizeKiB => _configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.CacheSmallSizeThresholdKiB), 64.0);
private double LargeSizeKiB => _configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.CacheLargeSizeThresholdKiB), 1024.0);
private int ForcedDeletionAfterHours => _configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.ForcedDeletionOfFilesAfterHours), -1);
private int CleanupCheckMinutes => _configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.CleanupCheckInMinutes), 15);
private List<FileInfo> GetAllHotFiles() => new DirectoryInfo(_hotStoragePath).GetFiles("*", SearchOption.AllDirectories)
.Where(f => f != null && f.Name.Length == 40)
.OrderBy(f => f.LastAccessTimeUtc).ToList();
private List<FileInfo> GetAllColdFiles() => new DirectoryInfo(_coldStoragePath).GetFiles("*", SearchOption.AllDirectories)
.Where(f => f != null && f.Name.Length == 40)
.OrderBy(f => f.LastAccessTimeUtc).ToList();
private List<FileInfo> GetTempFiles() => new DirectoryInfo(_useColdStorage ? _coldStoragePath : _hotStoragePath).GetFiles("*", SearchOption.AllDirectories)
.Where(f => f != null && (f.Name.EndsWith(".dl", StringComparison.InvariantCultureIgnoreCase) || f.Name.EndsWith(".tmp", StringComparison.InvariantCultureIgnoreCase))).ToList();
public FileCleanupService(MareMetrics metrics, ILogger<FileCleanupService> logger,
IServiceProvider services, IConfigurationService<StaticFilesServerConfiguration> configuration)
{
_metrics = metrics;
_logger = logger;
_services = services;
_configuration = configuration;
_useColdStorage = _configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.UseColdStorage), false);
_hotStoragePath = configuration.GetValue<string>(nameof(StaticFilesServerConfiguration.CacheDirectory));
_coldStoragePath = configuration.GetValue<string>(nameof(StaticFilesServerConfiguration.ColdStorageDirectory));
_isDistributionNode = configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.IsDistributionNode), false);
_isMain = configuration.GetValue<Uri>(nameof(StaticFilesServerConfiguration.MainFileServerAddress)) == null && _isDistributionNode;
}
public Task StartAsync(CancellationToken cancellationToken)
{
_logger.LogInformation("Cleanup Service started");
InitializeGauges();
_cleanupCts = new();
_ = CleanUpTask(_cleanupCts.Token);
return Task.CompletedTask;
}
public Task StopAsync(CancellationToken cancellationToken)
{
_cleanupCts.Cancel();
return Task.CompletedTask;
}
private List<string> CleanUpFilesBeyondSizeLimit(List<FileInfo> files, double sizeLimit, double minTTL, double maxTTL, CancellationToken ct)
{
var removedFiles = new List<string>();
if (sizeLimit <= 0)
{
return removedFiles;
}
var smallSize = SmallSizeKiB * 1024.0;
var largeSize = LargeSizeKiB * 1024.0;
var now = DateTime.Now;
// Avoid nonsense in future calculations
if (smallSize < 0.0)
smallSize = 0.0;
if (largeSize < smallSize)
largeSize = smallSize;
if (minTTL < 0.0)
minTTL = 0.0;
if (maxTTL < minTTL)
maxTTL = minTTL;
// Calculates a deletion priority to prioritize deletion of larger files over a configured TTL range based on a file's size.
// This is intended to be applied to the hot cache, as the cost of recovering many small files is greater than a single large file.
// Example (minTTL=7, maxTTL=30):
// - A 10MB file was last accessed 5 days ago. Its calculated optimum TTL is 7 days. result = 0.7143
// - A 50kB file was last accessed 10 days ago. Its calculated optimum TTL is 30 days. result = 0.3333
// The larger file will be deleted with a higher priority than the smaller file.
double CalculateTTLProgression(FileInfo file)
{
var fileLength = (double)file.Length;
var fileAgeDays = (now - file.LastAccessTime).TotalDays;
var sizeNorm = Math.Clamp((fileLength - smallSize) / (largeSize - smallSize), 0.0, 1.0);
// Using Math.Sqrt(sizeNorm) would create a more logical scaling curve, but it barely matters
var ttlDayRange = (maxTTL - minTTL) * (1.0 - sizeNorm);
var daysPastMinTTL = Math.Max(fileAgeDays - minTTL, 0.0);
// There is some creativity in choosing an upper bound here:
// - With no upper bound, any file larger than `largeSize` is always the highest priority for deletion once it passes its calculated TTL
// - With 1.0 as an upper bound, all files older than `maxTTL` will have the same priority regardless of size
// - Using maxTTL/minTTL chooses a logical cut-off point where any files old enough to be affected would have been cleaned up already
var ttlProg = Math.Clamp(daysPastMinTTL / ttlDayRange, 0.0, maxTTL / minTTL);
return ttlProg;
}
try
{
// Since we already have the file list sorted by access time, the list index is incorporated in to
// the dictionary key to preserve it as a secondary ordering
var sortedFiles = new PriorityQueue<FileInfo, (double, int)>();
foreach (var (file, i) in files.Select((file, i) => ( file, i )))
{
double ttlProg = CalculateTTLProgression(file);
sortedFiles.Enqueue(file, (-ttlProg, i));
}
_logger.LogInformation("Cleaning up files beyond the cache size limit of {cacheSizeLimit} GiB", sizeLimit);
var totalCacheSizeInBytes = files.Sum(s => s.Length);
long cacheSizeLimitInBytes = (long)ByteSize.FromGibiBytes(sizeLimit).Bytes;
while (totalCacheSizeInBytes > cacheSizeLimitInBytes && sortedFiles.Count != 0 && !ct.IsCancellationRequested)
{
var file = sortedFiles.Dequeue();
totalCacheSizeInBytes -= file.Length;
_logger.LogInformation("Deleting {file} with size {size:N2}MiB", file.FullName, ByteSize.FromBytes(file.Length).MebiBytes);
file.Delete();
removedFiles.Add(file.Name);
}
files.RemoveAll(f => removedFiles.Contains(f.Name, StringComparer.InvariantCultureIgnoreCase));
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Error during cache size limit cleanup");
}
return removedFiles;
}
private void CleanUpOrphanedFiles(HashSet<string> allDbFileHashes, List<FileInfo> allPhysicalFiles, CancellationToken ct)
{
// To avoid race conditions with file uploads, only delete files on a second pass
var newOrphanedFiles = new HashSet<string>(StringComparer.Ordinal);
foreach (var file in allPhysicalFiles.ToList())
{
if (!allDbFileHashes.Contains(file.Name.ToUpperInvariant()))
{
_logger.LogInformation("File not in DB, marking: {fileName}", file.Name);
newOrphanedFiles.Add(file.FullName);
}
ct.ThrowIfCancellationRequested();
}
foreach (var fullName in _orphanedFiles.Where(f => newOrphanedFiles.Contains(f)))
{
var name = Path.GetFileName(fullName);
File.Delete(fullName);
_logger.LogInformation("File still not in DB, deleting: {fileName}", name);
allPhysicalFiles.RemoveAll(f => f.FullName.Equals(fullName, StringComparison.InvariantCultureIgnoreCase));
}
_orphanedFiles = newOrphanedFiles;
}
private List<string> CleanUpOutdatedFiles(List<FileInfo> files, int unusedRetention, int forcedDeletionAfterHours, CancellationToken ct)
{
var removedFiles = new List<string>();
try
{
_logger.LogInformation("Cleaning up files older than {filesOlderThanDays} days", unusedRetention);
if (forcedDeletionAfterHours > 0)
{
_logger.LogInformation("Cleaning up files written to longer than {hours}h ago", forcedDeletionAfterHours);
}
var lastAccessCutoffTime = DateTime.Now.Subtract(TimeSpan.FromDays(unusedRetention));
var forcedDeletionCutoffTime = DateTime.Now.Subtract(TimeSpan.FromHours(forcedDeletionAfterHours));
foreach (var file in files)
{
if (file.LastAccessTime < lastAccessCutoffTime)
{
_logger.LogInformation("File outdated: {fileName}, {fileSize:N2}MiB", file.Name, ByteSize.FromBytes(file.Length).MebiBytes);
file.Delete();
removedFiles.Add(file.Name);
}
else if (forcedDeletionAfterHours > 0 && file.LastWriteTime < forcedDeletionCutoffTime)
{
_logger.LogInformation("File forcefully deleted: {fileName}, {fileSize:N2}MiB", file.Name, ByteSize.FromBytes(file.Length).MebiBytes);
file.Delete();
removedFiles.Add(file.Name);
}
ct.ThrowIfCancellationRequested();
}
files.RemoveAll(f => removedFiles.Contains(f.Name, StringComparer.InvariantCultureIgnoreCase));
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Error during file cleanup of old files");
}
return removedFiles;
}
private void CleanUpTempFiles()
{
var pastTime = DateTime.UtcNow.Subtract(TimeSpan.FromMinutes(20));
var tempFiles = GetTempFiles();
foreach (var tempFile in tempFiles.Where(f => f.LastWriteTimeUtc < pastTime))
tempFile.Delete();
}
private async Task CleanUpTask(CancellationToken ct)
{
while (!ct.IsCancellationRequested)
{
try
{
using var scope = _services.CreateScope();
using var dbContext = _isMain ? scope.ServiceProvider.GetService<MareDbContext>()! : null;
HashSet<string> allDbFileHashes = null;
// Database operations only performed on main server
if (_isMain)
{
var allDbFiles = await dbContext.Files.ToListAsync(ct).ConfigureAwait(false);
allDbFileHashes = new HashSet<string>(allDbFiles.Select(a => a.Hash.ToUpperInvariant()), StringComparer.Ordinal);
}
if (_useColdStorage)
{
var coldFiles = GetAllColdFiles();
var removedColdFiles = new List<string>();
removedColdFiles.AddRange(
CleanUpOutdatedFiles(coldFiles, ColdStorageRetention, ForcedDeletionAfterHours, ct)
);
removedColdFiles.AddRange(
CleanUpFilesBeyondSizeLimit(coldFiles, ColdStorageSize, ColdStorageMinimumRetention, ColdStorageRetention, ct)
);
// Remove cold storage files are deleted from the database, if we are the main file server
if (_isMain)
{
dbContext.Files.RemoveRange(
dbContext.Files.Where(f => removedColdFiles.Contains(f.Hash))
);
allDbFileHashes.ExceptWith(removedColdFiles);
CleanUpOrphanedFiles(allDbFileHashes, coldFiles, ct);
}
// Remove hot copies of files now that the authoritative copy is gone
foreach (var removedFile in removedColdFiles)
{
var hotFile = FilePathUtil.GetFileInfoForHash(_hotStoragePath, removedFile);
hotFile?.Delete();
}
_metrics.SetGaugeTo(MetricsAPI.GaugeFilesTotalSizeColdStorage, coldFiles.Sum(f => { try { return f.Length; } catch { return 0; } }));
_metrics.SetGaugeTo(MetricsAPI.GaugeFilesTotalColdStorage, coldFiles.Count);
}
var hotFiles = GetAllHotFiles();
var removedHotFiles = new List<string>();
removedHotFiles.AddRange(
CleanUpOutdatedFiles(hotFiles, HotStorageRetention, forcedDeletionAfterHours: _useColdStorage ? ForcedDeletionAfterHours : -1, ct)
);
removedHotFiles.AddRange(
CleanUpFilesBeyondSizeLimit(hotFiles, HotStorageSize, HotStorageMinimumRetention, HotStorageRetention, ct)
);
if (_isMain)
{
// If cold storage is not active, then "hot" files are deleted from the database instead
if (!_useColdStorage)
{
dbContext.Files.RemoveRange(
dbContext.Files.Where(f => removedHotFiles.Contains(f.Hash))
);
allDbFileHashes.ExceptWith(removedHotFiles);
}
CleanUpOrphanedFiles(allDbFileHashes, hotFiles, ct);
await dbContext.SaveChangesAsync(ct).ConfigureAwait(false);
}
_metrics.SetGaugeTo(MetricsAPI.GaugeFilesTotalSize, hotFiles.Sum(f => { try { return f.Length; } catch { return 0; } }));
_metrics.SetGaugeTo(MetricsAPI.GaugeFilesTotal, hotFiles.Count);
CleanUpTempFiles();
}
catch (Exception e)
{
_logger.LogError(e, "Error during cleanup task");
}
var cleanupCheckMinutes = CleanupCheckMinutes;
var now = DateTime.Now;
TimeOnly currentTime = new(now.Hour, now.Minute, now.Second);
TimeOnly futureTime = new(now.Hour, now.Minute - now.Minute % cleanupCheckMinutes, 0);
var span = futureTime.AddMinutes(cleanupCheckMinutes) - currentTime;
_logger.LogInformation("File Cleanup Complete, next run at {date}", now.Add(span));
await Task.Delay(span, ct).ConfigureAwait(false);
}
}
private void InitializeGauges()
{
if (_useColdStorage)
{
var allFilesInColdStorageDir = GetAllColdFiles();
_metrics.SetGaugeTo(MetricsAPI.GaugeFilesTotalSizeColdStorage, allFilesInColdStorageDir.Sum(f => f.Length));
_metrics.SetGaugeTo(MetricsAPI.GaugeFilesTotalColdStorage, allFilesInColdStorageDir.Count);
}
var allFilesInHotStorage = GetAllHotFiles();
_metrics.SetGaugeTo(MetricsAPI.GaugeFilesTotalSize, allFilesInHotStorage.Sum(f => { try { return f.Length; } catch { return 0; } }));
_metrics.SetGaugeTo(MetricsAPI.GaugeFilesTotal, allFilesInHotStorage.Count);
}
}

View File

@@ -0,0 +1,125 @@
using Microsoft.Win32.SafeHandles;
using System.Runtime.InteropServices;
using System.Threading.Channels;
namespace MareSynchronosStaticFilesServer.Services;
// Pre-fetch files from cache storage in to memory
public class FilePreFetchService : IHostedService
{
private struct PreFetchRequest
{
public FileInfo FileInfo;
public DateTime ExpiryUtc;
}
private readonly ILogger<FilePreFetchService> _logger;
private CancellationTokenSource _prefetchCts;
private readonly Channel<PreFetchRequest> _prefetchChannel;
private const int _readAheadBytes = 8 * 1024 * 1024; // Maximum number of of bytes to prefetch per file (8MB)
private const int _preFetchTasks = 4; // Maximum number of tasks to process prefetches concurrently
// Use readahead() on linux if its available
[DllImport("libc", EntryPoint = "readahead")]
static extern int LinuxReadAheadExternal(SafeFileHandle fd, Int64 offset, int count);
private bool _hasLinuxReadAhead = true;
public FilePreFetchService(ILogger<FilePreFetchService> logger)
{
_logger = logger;
_prefetchChannel = Channel.CreateUnbounded<PreFetchRequest>();
}
public Task StartAsync(CancellationToken cancellationToken)
{
_logger.LogInformation("File PreFetch Service started");
_prefetchCts = new();
for (int i = 0; i < _preFetchTasks; ++i)
_ = PrefetchTask(_prefetchCts.Token);
return Task.CompletedTask;
}
public Task StopAsync(CancellationToken cancellationToken)
{
_prefetchCts.Cancel();
return Task.CompletedTask;
}
// Queue a list of hashes to be prefetched in a background task
public void PrefetchFiles(ICollection<FileInfo> fileList)
{
if (!_hasLinuxReadAhead)
{
if (!_prefetchCts.IsCancellationRequested)
{
_logger.LogError("readahead() is not available - aborting File PreFetch Service");
_prefetchCts.Cancel();
}
return;
}
var nowUtc = DateTime.UtcNow;
// Expire prefetch requests that aren't picked up within 500ms
// By this point the request is probably already being served, or things are moving too slow to matter anyway
var expiry = nowUtc + TimeSpan.FromMilliseconds(500);
foreach (var fileInfo in fileList)
{
_ = _prefetchChannel.Writer.TryWrite(new PreFetchRequest(){
FileInfo = fileInfo,
ExpiryUtc = expiry,
});
}
}
private async Task PrefetchTask(CancellationToken ct)
{
var reader = _prefetchChannel.Reader;
while (!ct.IsCancellationRequested)
{
try
{
var req = await reader.ReadAsync(ct).ConfigureAwait(false);
var nowUtc = DateTime.UtcNow;
if (nowUtc >= req.ExpiryUtc)
{
_logger.LogDebug("Skipped expired prefetch for {hash}", req.FileInfo.Name);
continue;
}
try
{
var fs = new FileStream(req.FileInfo.FullName, FileMode.Open, FileAccess.Read, FileShare.Inheritable | FileShare.Read);
await using (fs.ConfigureAwait(false))
{
try
{
_ = LinuxReadAheadExternal(fs.SafeFileHandle, 0, _readAheadBytes);
_logger.LogTrace("Prefetched {hash}", req.FileInfo.Name);
}
catch (EntryPointNotFoundException)
{
_hasLinuxReadAhead = false;
}
}
}
catch (IOException) { }
}
catch (OperationCanceledException)
{
continue;
}
catch (Exception e)
{
_logger.LogError(e, "Error during prefetch task");
}
}
}
}

View File

@@ -0,0 +1,94 @@
using MareSynchronosShared.Metrics;
using System.Collections.Concurrent;
namespace MareSynchronosStaticFilesServer.Services;
public class FileStatisticsService : IHostedService
{
private readonly MareMetrics _metrics;
private readonly ILogger<FileStatisticsService> _logger;
private CancellationTokenSource _resetCancellationTokenSource;
private ConcurrentDictionary<string, long> _pastHourFiles = new(StringComparer.Ordinal);
private ConcurrentDictionary<string, long> _pastDayFiles = new(StringComparer.Ordinal);
public FileStatisticsService(MareMetrics metrics, ILogger<FileStatisticsService> logger)
{
_metrics = metrics;
_logger = logger;
}
public void LogFile(string fileHash, long length)
{
if (!_pastHourFiles.ContainsKey(fileHash))
{
_pastHourFiles[fileHash] = length;
_metrics.IncGauge(MetricsAPI.GaugeFilesUniquePastHour);
_metrics.IncGauge(MetricsAPI.GaugeFilesUniquePastHourSize, length);
}
if (!_pastDayFiles.ContainsKey(fileHash))
{
_pastDayFiles[fileHash] = length;
_metrics.IncGauge(MetricsAPI.GaugeFilesUniquePastDay);
_metrics.IncGauge(MetricsAPI.GaugeFilesUniquePastDaySize, length);
}
}
public void LogRequest(long requestSize)
{
_metrics.IncCounter(MetricsAPI.CounterFileRequests, 1);
_metrics.IncCounter(MetricsAPI.CounterFileRequestSize, requestSize);
}
public Task StartAsync(CancellationToken cancellationToken)
{
_logger.LogInformation("Starting FileStatisticsService");
_resetCancellationTokenSource = new();
_ = ResetHourlyFileData();
_ = ResetDailyFileData();
return Task.CompletedTask;
}
public async Task ResetHourlyFileData()
{
while (!_resetCancellationTokenSource.Token.IsCancellationRequested)
{
_logger.LogInformation("Resetting 1h Data");
_pastHourFiles = new(StringComparer.Ordinal);
_metrics.SetGaugeTo(MetricsAPI.GaugeFilesUniquePastHour, 0);
_metrics.SetGaugeTo(MetricsAPI.GaugeFilesUniquePastHourSize, 0);
var now = DateTime.UtcNow;
TimeOnly currentTime = new(now.Hour, now.Minute, now.Second);
TimeOnly futureTime = new(now.Hour, 0, 0);
var span = futureTime.AddHours(1) - currentTime;
await Task.Delay(span, _resetCancellationTokenSource.Token).ConfigureAwait(false);
}
}
public async Task ResetDailyFileData()
{
while (!_resetCancellationTokenSource.Token.IsCancellationRequested)
{
_logger.LogInformation("Resetting 24h Data");
_pastDayFiles = new(StringComparer.Ordinal);
_metrics.SetGaugeTo(MetricsAPI.GaugeFilesUniquePastDay, 0);
_metrics.SetGaugeTo(MetricsAPI.GaugeFilesUniquePastDaySize, 0);
var now = DateTime.UtcNow;
DateTime midnight = new(new DateOnly(now.Date.Year, now.Date.Month, now.Date.Day), new(0, 0, 0));
var span = midnight.AddDays(1) - now;
await Task.Delay(span, _resetCancellationTokenSource.Token).ConfigureAwait(false);
}
}
public Task StopAsync(CancellationToken cancellationToken)
{
_resetCancellationTokenSource.Cancel();
_logger.LogInformation("Stopping FileStatisticsService");
return Task.CompletedTask;
}
}

View File

@@ -0,0 +1,6 @@
namespace MareSynchronosStaticFilesServer.Services;
public interface IClientReadyMessageService
{
void SendDownloadReady(string uid, Guid requestId);
}

View File

@@ -0,0 +1,6 @@
namespace MareSynchronosStaticFilesServer.Services;
public interface ITouchHashService : IHostedService
{
void TouchColdHash(string hash);
}

View File

@@ -0,0 +1,26 @@
using Microsoft.AspNetCore.SignalR;
using MareSynchronos.API.SignalR;
using MareSynchronosServer.Hubs;
namespace MareSynchronosStaticFilesServer.Services;
public class MainClientReadyMessageService : IClientReadyMessageService
{
private readonly ILogger<MainClientReadyMessageService> _logger;
private readonly IHubContext<MareHub> _mareHub;
public MainClientReadyMessageService(ILogger<MainClientReadyMessageService> logger, IHubContext<MareHub> mareHub)
{
_logger = logger;
_mareHub = mareHub;
}
public void SendDownloadReady(string uid, Guid requestId)
{
_ = Task.Run(async () =>
{
_logger.LogDebug("Sending Client Ready for {uid}:{requestId} to SignalR", uid, requestId);
await _mareHub.Clients.User(uid).SendAsync(nameof(IMareHub.Client_DownloadReady), requestId).ConfigureAwait(false);
});
}
}

View File

@@ -0,0 +1,222 @@
using MareSynchronosShared.Metrics;
using MareSynchronosShared.Services;
using MareSynchronosShared.Utils.Configuration;
using MareSynchronosStaticFilesServer.Utils;
using System.Collections.Concurrent;
using System.Timers;
namespace MareSynchronosStaticFilesServer.Services;
public class RequestQueueService : IHostedService
{
private readonly IClientReadyMessageService _clientReadyMessageService;
private readonly CachedFileProvider _cachedFileProvider;
private readonly ILogger<RequestQueueService> _logger;
private readonly MareMetrics _metrics;
private readonly ConcurrentQueue<UserRequest> _queue = new();
private readonly ConcurrentQueue<UserRequest> _priorityQueue = new();
private readonly int _queueExpirationSeconds;
private readonly SemaphoreSlim _queueProcessingSemaphore = new(1);
private readonly UserQueueEntry[] _userQueueRequests;
private int _queueLimitForReset;
private readonly int _queueReleaseSeconds;
private System.Timers.Timer _queueTimer;
public RequestQueueService(MareMetrics metrics, IConfigurationService<StaticFilesServerConfiguration> configurationService,
ILogger<RequestQueueService> logger, IClientReadyMessageService hubContext, CachedFileProvider cachedFileProvider)
{
_userQueueRequests = new UserQueueEntry[configurationService.GetValueOrDefault(nameof(StaticFilesServerConfiguration.DownloadQueueSize), 50)];
_queueExpirationSeconds = configurationService.GetValueOrDefault(nameof(StaticFilesServerConfiguration.DownloadTimeoutSeconds), 5);
_queueLimitForReset = configurationService.GetValueOrDefault(nameof(StaticFilesServerConfiguration.DownloadQueueClearLimit), 15000);
_queueReleaseSeconds = configurationService.GetValueOrDefault(nameof(StaticFilesServerConfiguration.DownloadQueueReleaseSeconds), 15);
_metrics = metrics;
_logger = logger;
_clientReadyMessageService = hubContext;
_cachedFileProvider = cachedFileProvider;
}
public void ActivateRequest(Guid request)
{
_logger.LogDebug("Activating request {guid}", request);
var req = _userQueueRequests.First(f => f != null && f.UserRequest.RequestId == request);
req.MarkActive();
}
public async Task EnqueueUser(UserRequest request, bool isPriority, CancellationToken token)
{
while (_queueProcessingSemaphore.CurrentCount == 0)
{
await Task.Delay(50, token).ConfigureAwait(false);
}
_logger.LogDebug("Enqueueing req {guid} from {user} for {file}", request.RequestId, request.User, string.Join(", ", request.FileIds));
GetQueue(isPriority).Enqueue(request);
}
public void FinishRequest(Guid request)
{
var req = _userQueueRequests.FirstOrDefault(f => f != null && f.UserRequest.RequestId == request);
if (req != null)
{
var idx = Array.IndexOf(_userQueueRequests, req);
_logger.LogDebug("Finishing Request {guid}, clearing slot {idx}", request, idx);
_userQueueRequests[idx] = null;
}
else
{
_logger.LogDebug("Request {guid} already cleared", request);
}
}
public bool IsActiveProcessing(Guid request, string user, out UserRequest userRequest)
{
var userQueueRequest = _userQueueRequests.FirstOrDefault(u => u != null && u.UserRequest.RequestId == request && string.Equals(u.UserRequest.User, user, StringComparison.Ordinal));
userRequest = userQueueRequest?.UserRequest ?? null;
return userQueueRequest != null && userRequest != null && userQueueRequest.ExpirationDate > DateTime.UtcNow;
}
public void RemoveFromQueue(Guid requestId, string user, bool isPriority)
{
var existingRequest = GetQueue(isPriority).FirstOrDefault(f => f.RequestId == requestId && string.Equals(f.User, user, StringComparison.Ordinal));
if (existingRequest == null)
{
var activeSlot = _userQueueRequests.FirstOrDefault(r => r != null && string.Equals(r.UserRequest.User, user, StringComparison.Ordinal) && r.UserRequest.RequestId == requestId);
if (activeSlot != null)
{
var idx = Array.IndexOf(_userQueueRequests, activeSlot);
if (idx >= 0)
{
_userQueueRequests[idx] = null;
}
}
}
else
{
existingRequest.IsCancelled = true;
}
}
public Task StartAsync(CancellationToken cancellationToken)
{
_queueTimer = new System.Timers.Timer(500);
_queueTimer.Elapsed += ProcessQueue;
_queueTimer.AutoReset = true;
_queueTimer.Start();
return Task.CompletedTask;
}
private ConcurrentQueue<UserRequest> GetQueue(bool isPriority) => isPriority ? _priorityQueue : _queue;
public bool StillEnqueued(Guid request, string user, bool isPriority)
{
return GetQueue(isPriority).Any(c => c.RequestId == request && string.Equals(c.User, user, StringComparison.Ordinal));
}
public Task StopAsync(CancellationToken cancellationToken)
{
_queueTimer.Stop();
return Task.CompletedTask;
}
private void DequeueIntoSlot(UserRequest userRequest, int slot)
{
_logger.LogDebug("Dequeueing {req} into {i}: {user} with {file}", userRequest.RequestId, slot, userRequest.User, string.Join(", ", userRequest.FileIds));
_userQueueRequests[slot] = new(userRequest, DateTime.UtcNow.AddSeconds(_queueExpirationSeconds));
_clientReadyMessageService.SendDownloadReady(userRequest.User, userRequest.RequestId);
}
private void ProcessQueue(object src, ElapsedEventArgs e)
{
if (_queueProcessingSemaphore.CurrentCount == 0) return;
_queueProcessingSemaphore.Wait();
try
{
if (_queue.Count(c => !c.IsCancelled) > _queueLimitForReset)
{
_queue.Clear();
return;
}
for (int i = 0; i < _userQueueRequests.Length; i++)
{
try
{
if (_userQueueRequests[i] != null
&& (((!_userQueueRequests[i].IsActive && _userQueueRequests[i].ExpirationDate < DateTime.UtcNow))
|| (_userQueueRequests[i].IsActive && _userQueueRequests[i].ActivationDate < DateTime.UtcNow.Subtract(TimeSpan.FromSeconds(_queueReleaseSeconds))))
)
{
_logger.LogDebug("Expiring request {guid} slot {slot}", _userQueueRequests[i].UserRequest.RequestId, i);
_userQueueRequests[i] = null;
}
if (_userQueueRequests[i] != null) continue;
while (true)
{
if (!_priorityQueue.All(u => _cachedFileProvider.AnyFilesDownloading(u.FileIds))
&& _priorityQueue.TryDequeue(out var prioRequest))
{
if (prioRequest.IsCancelled)
{
continue;
}
if (_cachedFileProvider.AnyFilesDownloading(prioRequest.FileIds))
{
_priorityQueue.Enqueue(prioRequest);
continue;
}
DequeueIntoSlot(prioRequest, i);
break;
}
if (!_queue.All(u => _cachedFileProvider.AnyFilesDownloading(u.FileIds))
&& _queue.TryDequeue(out var request))
{
if (request.IsCancelled)
{
continue;
}
if (_cachedFileProvider.AnyFilesDownloading(request.FileIds))
{
_queue.Enqueue(request);
continue;
}
DequeueIntoSlot(request, i);
break;
}
break;
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Error during inside queue processing");
}
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Error during Queue processing");
}
finally
{
_queueProcessingSemaphore.Release();
}
_metrics.SetGaugeTo(MetricsAPI.GaugeQueueFree, _userQueueRequests.Count(c => c == null));
_metrics.SetGaugeTo(MetricsAPI.GaugeQueueActive, _userQueueRequests.Count(c => c != null && c.IsActive));
_metrics.SetGaugeTo(MetricsAPI.GaugeQueueInactive, _userQueueRequests.Count(c => c != null && !c.IsActive));
_metrics.SetGaugeTo(MetricsAPI.GaugeDownloadQueue, _queue.Count(q => !q.IsCancelled));
_metrics.SetGaugeTo(MetricsAPI.GaugeDownloadQueueCancelled, _queue.Count(q => q.IsCancelled));
_metrics.SetGaugeTo(MetricsAPI.GaugeDownloadPriorityQueue, _priorityQueue.Count(q => !q.IsCancelled));
_metrics.SetGaugeTo(MetricsAPI.GaugeDownloadPriorityQueueCancelled, _priorityQueue.Count(q => q.IsCancelled));
}
}

View File

@@ -0,0 +1,51 @@
using MareSynchronos.API.Routes;
using MareSynchronosShared.Services;
using MareSynchronosShared.Utils;
using MareSynchronosShared.Utils.Configuration;
using System.Net.Http.Headers;
namespace MareSynchronosStaticFilesServer.Services;
public class ShardClientReadyMessageService : IClientReadyMessageService
{
private readonly ILogger<ShardClientReadyMessageService> _logger;
private readonly ServerTokenGenerator _tokenGenerator;
private readonly IConfigurationService<StaticFilesServerConfiguration> _configurationService;
private readonly HttpClient _httpClient;
public ShardClientReadyMessageService(ILogger<ShardClientReadyMessageService> logger, ServerTokenGenerator tokenGenerator, IConfigurationService<StaticFilesServerConfiguration> configurationService)
{
_logger = logger;
_tokenGenerator = tokenGenerator;
_configurationService = configurationService;
_httpClient = new();
_httpClient.DefaultRequestHeaders.UserAgent.Add(new ProductInfoHeaderValue("MareSynchronosServer", "1.0.0.0"));
}
public void SendDownloadReady(string uid, Guid requestId)
{
_ = Task.Run(async () =>
{
var mainUrlConfigKey = _configurationService.GetValue<bool>(nameof(StaticFilesServerConfiguration.NotifyMainServerDirectly))
? nameof(StaticFilesServerConfiguration.MainServerAddress)
: nameof(StaticFilesServerConfiguration.MainFileServerAddress);
var mainUrl = _configurationService.GetValue<Uri>(mainUrlConfigKey);
var path = MareFiles.MainSendReadyFullPath(mainUrl, uid, requestId);
using HttpRequestMessage msg = new()
{
RequestUri = path
};
msg.Headers.Authorization = new AuthenticationHeaderValue("Bearer", _tokenGenerator.Token);
_logger.LogDebug("Sending Client Ready for {uid}:{requestId} to {path}", uid, requestId, path);
try
{
using var result = await _httpClient.SendAsync(msg).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failure to send for {uid}:{requestId}", uid, requestId);
}
});
}
}

View File

@@ -0,0 +1,132 @@
using MareSynchronos.API.Routes;
using MareSynchronosShared.Services;
using MareSynchronosShared.Utils;
using MareSynchronosShared.Utils.Configuration;
using System.Net.Http.Headers;
namespace MareSynchronosStaticFilesServer.Services;
// Notify distribution server of file hashes downloaded via shards, so they are not prematurely purged from its cold cache
public class ShardTouchMessageService : ITouchHashService
{
private readonly ILogger<ShardTouchMessageService> _logger;
private readonly ServerTokenGenerator _tokenGenerator;
private readonly IConfigurationService<StaticFilesServerConfiguration> _configuration;
private readonly HttpClient _httpClient;
private readonly Uri _remoteCacheSourceUri;
private readonly HashSet<string> _touchHashSet = new();
private readonly ColdTouchHashService _nestedService = null;
private CancellationTokenSource _touchmsgCts;
public ShardTouchMessageService(ILogger<ShardTouchMessageService> logger, ILogger<ColdTouchHashService> nestedLogger,
ServerTokenGenerator tokenGenerator, IConfigurationService<StaticFilesServerConfiguration> configuration)
{
_logger = logger;
_tokenGenerator = tokenGenerator;
_configuration = configuration;
_remoteCacheSourceUri = _configuration.GetValueOrDefault<Uri>(nameof(StaticFilesServerConfiguration.DistributionFileServerAddress), null);
_httpClient = new();
_httpClient.DefaultRequestHeaders.UserAgent.Add(new ProductInfoHeaderValue("MareSynchronosServer", "1.0.0.0"));
if (configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.UseColdStorage), false))
{
_nestedService = new ColdTouchHashService(nestedLogger, configuration);
}
}
public Task StartAsync(CancellationToken cancellationToken)
{
if (_remoteCacheSourceUri == null)
return Task.CompletedTask;
_logger.LogInformation("Touch Message Service started");
_touchmsgCts = new();
_ = TouchMessageTask(_touchmsgCts.Token);
return Task.CompletedTask;
}
public Task StopAsync(CancellationToken cancellationToken)
{
if (_remoteCacheSourceUri == null)
return Task.CompletedTask;
_touchmsgCts.Cancel();
return Task.CompletedTask;
}
private async Task SendTouches(IEnumerable<string> hashes)
{
var mainUrl = _remoteCacheSourceUri;
var path = new Uri(mainUrl, MareFiles.Distribution + "/touch");
using HttpRequestMessage msg = new()
{
RequestUri = path
};
msg.Headers.Authorization = new AuthenticationHeaderValue("Bearer", _tokenGenerator.Token);
msg.Method = HttpMethod.Post;
msg.Content = JsonContent.Create(hashes);
if (_configuration.GetValueOrDefault(nameof(StaticFilesServerConfiguration.DistributionFileServerForceHTTP2), false))
{
msg.Version = new Version(2, 0);
msg.VersionPolicy = HttpVersionPolicy.RequestVersionExact;
}
_logger.LogDebug("Sending remote touch to {path}", path);
try
{
using var result = await _httpClient.SendAsync(msg).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failure to send touches for {hashChunk}", hashes);
}
}
private async Task TouchMessageTask(CancellationToken ct)
{
List<string> hashes;
while (!ct.IsCancellationRequested)
{
try
{
lock (_touchHashSet)
{
hashes = _touchHashSet.ToList();
_touchHashSet.Clear();
}
if (hashes.Count > 0)
await SendTouches(hashes).ConfigureAwait(false);
await Task.Delay(TimeSpan.FromSeconds(60), ct).ConfigureAwait(false);
}
catch (Exception e)
{
_logger.LogError(e, "Error during touch message task");
}
}
lock (_touchHashSet)
{
hashes = _touchHashSet.ToList();
_touchHashSet.Clear();
}
if (hashes.Count > 0)
await SendTouches(hashes).ConfigureAwait(false);
}
public void TouchColdHash(string hash)
{
if (_nestedService != null)
_nestedService.TouchColdHash(hash);
lock (_touchHashSet)
{
_touchHashSet.Add(hash);
}
}
}