fix: split up monolithic class

This commit is contained in:
Damien 2025-03-01 13:08:01 -05:00
parent c83b0068e3
commit f21b92bc1f
10 changed files with 821 additions and 617 deletions

View File

@ -4,7 +4,6 @@ using System.Threading.Tasks;
using meilisearch.NET;
using meilisearch.NET.Configurations;
using meilisearch.NET.example;
using meilisearch.NET.Extensions;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
@ -56,13 +55,13 @@ public class test
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
// Wait until Meilisearch is running
while (!service.IsMeilisearchRunning())
while (!service.IsRunning())
{
_logger.LogInformation("Waiting for Meilisearch to start...");
Task.Delay(1000).Wait(); // Wait for 1 second before checking again
}
var usage = service.GetProcessResourceUsage();
var usage = service.GetResourceUsage();
_logger.LogInformation($"Memory usage: {usage.MemoryUsageBytes} MB");
_logger.LogInformation($"CPU usage: {usage.CpuPercentage} %");
_logger.LogInformation($"Disk read: {usage.DiskReadBytes} MB");
@ -70,44 +69,63 @@ public class test
_logger.LogInformation($"Thread count: {usage.ThreadCount}");
_logger.LogInformation($"Process ID: {usage.ProcessId}");
//service.UpdateIndexStatus("test",false).Wait();
service.CreateIndex<document>("test");
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
Task.Delay(10000).Wait();
service.SetIndexEnabled("test", false).Wait();
Task.Delay(10000).Wait();
usage = service.GetResourceUsage();
_logger.LogInformation($"Memory usage: {usage.MemoryUsageBytes} MB");
_logger.LogInformation($"CPU usage: {usage.CpuPercentage} %");
_logger.LogInformation($"Disk read: {usage.DiskReadBytes} MB");
_logger.LogInformation($"Disk write: {usage.DiskWriteBytes} MB");
_logger.LogInformation($"Thread count: {usage.ThreadCount}");
_logger.LogInformation($"Process ID: {usage.ProcessId}");
var storage = service.GetIndexStorageUsage("test");
var totalStorage = service.GetTotalStorageUsage();
_logger.LogInformation($"Index storage usage: {storage} MB");
_logger.LogInformation($"Total storage usage: {totalStorage} MB");
Task.Delay(10000).Wait();
service.SetIndexEnabled("test", false).Wait();
_logger.LogInformation("Test service initialized.");
}
}

View File

@ -1,15 +1,24 @@
using Meilisearch;
using meilisearch.NET.Configurations;
using meilisearch.NET.Services.DocumentManagement;
using meilisearch.NET.Services.IndexManagement;
using meilisearch.NET.Services.ProcessManagement;
using Microsoft.Extensions.DependencyInjection;
using meilisearch.NET;
namespace meilisearch.NET.Extensions;
public static class ServiceCollectionExtension
{
public static IServiceCollection AddMeiliSearchService(this IServiceCollection services)
{
services.AddSingleton<MeilisearchClient>(sp =>
{
return new MeilisearchClient("http://localhost:7700");
});
services.AddHttpClient<MeiliSearchService>();
services.AddSingleton<MeiliSearchConfiguration>();
services.AddSingleton< MeiliSearchProcessManager>();
services.AddSingleton<IIndexManager, IndexManager>();
services.AddSingleton<IDocumentManager, DocumentManager>();
services.AddSingleton<MeiliSearchService>();
return services;
}
}
}

View File

@ -1,594 +1,52 @@
using System.Collections.ObjectModel;
using System.Collections.Specialized;
using System.Diagnostics;
using System.IO.Compression;
using System.Net.Sockets;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Security;
using System.Security.Cryptography;
using System.Text;
using Meilisearch;
using meilisearch.NET.Configurations;
using meilisearch.NET.Enums;
using meilisearch.NET.Extensions;
using meilisearch.NET.Interfaces;
using meilisearch.NET.Models;
using Meilisearch.QueryParameters;
using Microsoft.Extensions.Logging;
using Index = Meilisearch.Index;
using meilisearch.NET.Services.DocumentManagement;
using meilisearch.NET.Services.IndexManagement;
using meilisearch.NET.Services.ProcessManagement;
namespace meilisearch.NET;
public class MeiliSearchService:IDisposable
public class MeiliSearchService : IDisposable
{
private readonly HttpClient _httpClient;
private readonly ILogger<MeiliSearchService> _logger;
private readonly MeilisearchClient _client;
private readonly MeiliSearchConfiguration _meiliConfiguration;
private readonly string _indexBasePath = Path.Combine(AppContext.BaseDirectory, "db", "indexes" );
private static string _apiKey = GenerateApiKey();
private const int THRESHOLD = 10000;
private const string DEFAULT_DATA_FILE_PATH = "data.mdb";
private const string DEFAULT_LOCK_FILE_PATH = "lock.mdb";
private Process? process;
private ObservableCollection<KeyValuePair<string,IDocument>> _documentCollection;
private List<Index> indexes { get; set; } = new();
private readonly MeiliSearchProcessManager _processManager;
private readonly IIndexManager _indexManager;
private readonly IDocumentManager _documentManager;
protected virtual ObservableCollection<KeyValuePair<string, IDocument>> DocumentCollection
public MeiliSearchService(
MeiliSearchProcessManager processManager,
IIndexManager indexManager,
IDocumentManager documentManager)
{
get
{
return _documentCollection;
}
_processManager = processManager;
_indexManager = indexManager;
_documentManager = documentManager;
_processManager.StartProcess();
}
public MeiliSearchService(HttpClient httpClient, ILogger<MeiliSearchService> logger, MeiliSearchConfiguration meiliConfiguration)
{
_httpClient = httpClient;
_meiliConfiguration = meiliConfiguration;
_logger = logger;
_client = new MeilisearchClient("http://localhost:"+meiliConfiguration.MeiliPort, _apiKey );
_documentCollection = new ObservableCollection<KeyValuePair<string,IDocument>>();
_documentCollection.CollectionChanged += CheckIfNeedDocumentSync;
StartMeilisearch().Wait();
EnsureRepositoryIndexExists().Wait();
_logger.LogTrace("API Key: " + _apiKey);
process = null;
}
public async Task Start() => await _processManager.StartProcess();
public long GetTotalStorageUsage(bool useCompressedSize = true)
=> _indexManager.GetTotalStorageUsage(useCompressedSize);
public long GetIndexStorageUsage(string indexName, bool useCompressedSize = true)
=> _indexManager.GetIndexStorageUsage(indexName, useCompressedSize);
public void Stop() => _processManager.StopProcess();
public bool IsRunning() => _processManager.IsProcessRunning();
#region Private
private async Task CompressIndex(string indexName)
{
var indexPath = await GetIndexFilePath(indexName);
if (!Directory.Exists(indexPath))
{
_logger.LogWarning($"Index directory not found at: {indexPath}");
return;
}
var compressedPath = indexPath + ".zip";
_logger.LogTrace($"Compressing index '{indexName}' to {compressedPath}...");
try
{
var size = new DirectoryInfo(indexPath).GetFiles().Sum(f => f.Length);
// Create temp directory to ensure we don't lose data if compression fails
var tempPath = compressedPath + ".temp";
ZipFile.CreateFromDirectory(indexPath, tempPath, CompressionLevel.SmallestSize, false);
// If compression succeeded, safely replace old zip (if exists) and remove original directory
if (File.Exists(compressedPath))
{
File.Delete(compressedPath);
}
File.Move(tempPath, compressedPath);
Directory.Delete(indexPath, true);
// Update index metadata
var indexBindings = await _client.GetIndexAsync("index_bindings");
var doc = await _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName);
var document = new Models.Index
{
Name = indexName,
IsCompressed = true,
FolderId = doc.FolderId,
CreatedAt = doc.CreatedAt,
SizeBeforeCompression = size,
LastCompressedAt = DateTime.UtcNow
};
await indexBindings.UpdateDocumentsAsync(new List<Models.Index> { document });
_logger.LogInformation($"Successfully compressed index '{indexName}'");
Directory.CreateDirectory(indexPath);
File.Copy(Path.Combine(AppContext.BaseDirectory,DEFAULT_DATA_FILE_PATH), Path.Combine(indexPath, DEFAULT_DATA_FILE_PATH));
File.Copy(Path.Combine(AppContext.BaseDirectory,DEFAULT_LOCK_FILE_PATH), Path.Combine(indexPath, DEFAULT_LOCK_FILE_PATH));
_logger.LogInformation($"Created placeholder data file for compressed index '{indexName}'");
}
catch (Exception ex)
{
_logger.LogError($"Failed to compress index '{indexName}': {ex.Message}");
throw;
}
Stop();
}
private async Task DecompressIndex(string indexName)
{
var compressedPath = await GetIndexFilePath(indexName) + ".zip";
var extractPath = await GetIndexFilePath(indexName);
if (!File.Exists(compressedPath))
{
_logger.LogWarning($"Compressed index not found at: {compressedPath}");
return;
}
_logger.LogTrace($"Decompressing index '{indexName}' to {extractPath}...");
try
{
if (Directory.Exists(extractPath))
{
Directory.Delete(extractPath, true);
}
// Create temp directory to ensure we don't lose data if decompression fails
var tempPath = extractPath + ".temp";
ZipFile.ExtractToDirectory(compressedPath, tempPath);
// If decompression succeeded, safely move to final location
if (Directory.Exists(extractPath))
{
Directory.Delete(extractPath, true);
}
Directory.Move(tempPath, extractPath);
File.Delete(compressedPath);
// Update index metadata
var indexBindings = await _client.GetIndexAsync("index_bindings");
var doc = await _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName);
var document = new Models.Index
{
Name = indexName,
FolderId = doc.FolderId,
CreatedAt = doc.CreatedAt,
SizeBeforeCompression = null,
IsCompressed = false,
LastCompressedAt = doc.LastCompressedAt
};
await indexBindings.UpdateDocumentsAsync(new List<Models.Index> { document });
_logger.LogInformation($"Successfully decompressed index '{indexName}'");
}
catch (Exception ex)
{
_logger.LogError($"Failed to decompress index '{indexName}': {ex.Message}");
throw;
}
}
private static string FormatBytes(long bytes)
{
string[] sizes = { "B", "KB", "MB", "GB", "TB" };
int order = 0;
double len = bytes;
while (len >= 1024 && order < sizes.Length - 1)
{
order++;
len = len / 1024;
}
return $"{len:0.##} {sizes[order]}";
}
private async Task<string> GetIndexFilePath(string folderId)
{
var doc = await _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(folderId);
return Path.Combine(_indexBasePath, doc.FolderId);
}
private static string GenerateApiKey(int length = 64)
{
if (length <= 0)
{
throw new ArgumentException("Length must be greater than zero.", nameof(length));
}
const string allowedChars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_";
var apiKey = new StringBuilder();
var randomBytes = new byte[length];
using (var rng = RandomNumberGenerator.Create())
{
rng.GetBytes(randomBytes);
}
foreach (var randomByte in randomBytes)
{
apiKey.Append(allowedChars[randomByte % allowedChars.Length]);
}
return apiKey.ToString();
}
private async Task EnsureRepositoryIndexExists()
{
Task.Delay(5000).Wait();
var indexes = _client.GetAllIndexesAsync().Result;
if (indexes.Results.Any(x => x.Uid == "index_bindings"))
{
_logger.LogInformation("index bindings already exists, skipping creation of index.");
return;
}
_logger.LogInformation("Creating index bindings for SDK to track indexs...");
_client.CreateIndexAsync("index_bindings").Wait();
}
public Task SetIndexEnabled(string indexName, bool enabled)
=> _indexManager.SetIndexEnabledAsync(indexName, enabled);
private string GetMeilisearchBinaryName()
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
return "meilisearch-windows.exe";
}
if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
{
return RuntimeInformation.ProcessArchitecture == Architecture.Arm64
? "meilisearch-macos-arm"
: "meilisearch-macos-x64";
}
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
{
return RuntimeInformation.ProcessArchitecture == Architecture.Arm64
? "meilisearch-linux-arm"
: "meilisearch-linux-x64";
}
throw new PlatformNotSupportedException("Current platform and architecture combination is not supported");
}
private async Task StartMeilisearch()
{
var binaryName = GetMeilisearchBinaryName();
var binaryPath = Path.Combine(AppContext.BaseDirectory, binaryName);
if (!File.Exists(binaryPath))
{
_logger.LogError($"Meilisearch binary not found at: {binaryPath}");
throw new FileNotFoundException($"Could not find Meilisearch binary: {binaryName}");
}
// Set execute permissions on Unix-like systems
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
try
{
var chmod = Process.Start("chmod", $"+x {binaryPath}");
chmod?.WaitForExit();
}
catch (Exception ex)
{
_logger.LogWarning($"Failed to set execute permissions on binary: {ex.Message}");
}
}
var host = RuntimeInformation.IsOSPlatform(OSPlatform.Windows)
? "localhost"
: "127.0.0.1";
var args = "--http-addr " + host + ":" + _meiliConfiguration.MeiliPort
+ " --env development --db-path "
+ Path.Combine(AppContext.BaseDirectory, "db");
//+ " --master-key " + _apiKey; note: bring back, masterkey not working when compressingi ndexs
var processStartInfo = new ProcessStartInfo
{
FileName = binaryPath,
Arguments = args,
UseShellExecute = false,
RedirectStandardOutput = false,
RedirectStandardError = false,
CreateNoWindow = false,
};
process = new Process { StartInfo = processStartInfo, EnableRaisingEvents = true};
process.Exited += (sender, e) =>
{
_logger.LogWarning("Meilisearch process has exited. Restarting...");
_ = StartMeilisearch(); // Restart the process
};
process.Disposed += (sender, eventArgs) =>
{
_logger.LogWarning("Meilisearch process has exited. Restarting...");
_ = StartMeilisearch(); // Restart the process
};
try
{
process.Start();
await Task.Delay(5000); // Wait for the process to start
_logger.LogInformation($"Started Meilisearch process using binary: {binaryName}");
}
catch (Exception ex)
{
_logger.LogError($"Failed to start Meilisearch: {ex.Message}");
throw;
}
}
public ProcessResourceStats GetResourceUsage() => _processManager.GetResourceUsage();
private void CheckIfNeedDocumentSync(object? sender, NotifyCollectionChangedEventArgs e)
{
CheckIfNeedDocumentSync(THRESHOLD);
}
private void CheckIfNeedDocumentSync(int? threshold = null)
{
threshold = threshold ?? 0;
if(_documentCollection.Count>=threshold)
{
_logger.LogInformation("Threshold reached, syncing metadata to server.");
SyncDocumentsToServer();
}
}
private void SyncDocumentsToServer()
{
var grouped = _documentCollection.GroupBy(pair => pair.Key)
.ToDictionary(group => group.Key, group => group.Select(pair => pair.Value).ToList());
foreach (var repository in grouped)
{
var repositoryIndex = _client.GetIndexAsync(repository.Key).Result;
var documents = _documentCollection.ToList();
_documentCollection.Clear();
var result = RetryAsync(() => repositoryIndex.AddDocumentsAsync(repository.Value, "id")).Result;
}
}
private async Task<T> RetryAsync<T>(Func<Task<T>> action, int maxRetries = 3, int delayMilliseconds = 1000)
{
int retryCount = 0;
while (true)
{
try
{
return await action();
}
catch (Exception ex)
{
retryCount++;
if (retryCount >= maxRetries)
{
_logger.LogError($"Operation failed after {maxRetries} retries: {ex.Message}");
throw;
}
_logger.LogWarning($"Operation failed, retrying {retryCount}/{maxRetries}...");
await Task.Delay(delayMilliseconds);
}
}
}
private static string[] GetPropertiesInCamelCase<T>()
{
var properties = typeof(T).GetProperties(BindingFlags.Public | BindingFlags.Instance);
return properties
.Select(p => ToCamelCase(p.Name))
.ToArray();
}
private static string ToCamelCase(string input)
{
if (string.IsNullOrEmpty(input) || char.IsLower(input[0]))
{
return input;
}
return char.ToLowerInvariant(input[0]) + input.Substring(1);
}
#endregion
public Task<List<string>> GetAllIndexes() => _indexManager.GetAllIndexes();
public Task CreateIndex<T>(string indexName) where T : IDocument
=> _indexManager.CreateIndexAsync<T>(indexName);
public Task DeleteIndex(string indexName) => _indexManager.DeleteIndexAsync(indexName);
#region Public
public virtual MeilisearchUsageStats GetProcessResourceUsage()
{
if (process == null || process.HasExited)
{
return new MeilisearchUsageStats();
}
public void AddDocument(string repositoryId, IDocument document, bool autoCommit = false)
=> _documentManager.AddDocument(repositoryId, document, autoCommit);
try
{
process.Refresh();
// CPU Usage
TimeSpan cpuUsage = process.TotalProcessorTime;
double cpuPercentage = cpuUsage.TotalMilliseconds / (Environment.ProcessorCount * process.TotalProcessorTime.TotalMilliseconds) * 100;
// Memory Usage (in bytes)
long memoryUsage = process.WorkingSet64;
// Disk Usage (in bytes) - reads and writes
long diskRead = process.StartInfo.RedirectStandardOutput ? process.StandardOutput.BaseStream.Length : 0;
long diskWrite = process.StartInfo.RedirectStandardError ? process.StandardError.BaseStream.Length : 0;
return new MeilisearchUsageStats
{
CpuPercentage = Math.Round(cpuPercentage, 2),
MemoryUsageBytes = memoryUsage,
DiskReadBytes = diskRead,
DiskWriteBytes = diskWrite,
ProcessId = process.Id,
ThreadCount = process.Threads.Count
};
}
catch (InvalidOperationException)
{
// Process has exited
return new MeilisearchUsageStats();
}
catch (Exception ex)
{
_logger.LogError($"Error getting process resource usage: {ex.Message}");
return new MeilisearchUsageStats();
}
}
public virtual bool IsMeilisearchRunning()
{
var processName = RuntimeInformation.IsOSPlatform(OSPlatform.Windows)
? "meilisearch-windows"
: "meilisearch";
var processes = Process.GetProcessesByName(processName);
return processes.Any();
}
public virtual async Task SetIndexEnabled(string indexName, bool enabled)
{
_logger.LogTrace($"Updating index '{indexName}' status to {enabled}...");
if(enabled)
{
await DecompressIndex(indexName);
}
else
{
await CompressIndex(indexName);
}
_logger.LogInformation($"Updated index '{indexName}' status to {enabled}.");
}
public virtual void CreateIndex<T>(string indexName) where T : IDocument
{
if(GetAllIndexes().Count>=1000)
{
_logger.LogWarning("Maximum number of indexes reached, cannot create new index.");
return;
}
var indexes = _client.GetAllIndexesAsync().Result;
if (indexes.Results.Any(x => x.Uid == indexName))
{
_logger.LogWarning($"Index {indexName} already exists, skipping creation of index.");
return;
}
var foldersBefore = Directory.GetDirectories(_indexBasePath);
_logger.LogTrace($"Creating index '{indexName}'...");
_client.CreateIndexAsync(indexName).Wait();
Task.Delay(5000).Wait();
var index = _client.GetIndexAsync(indexName).Result;
var test = index.GetFilterableAttributesAsync().Result;
index.UpdateFilterableAttributesAsync(GetPropertiesInCamelCase<T>()).Wait();
_logger.LogInformation($"{indexName} index created!");
var foldersAfter = Directory.GetDirectories(_indexBasePath);
var folder = Path.GetFileName(foldersAfter.Except(foldersBefore).FirstOrDefault());
if (folder != null)
{
_client.GetIndexAsync("index_bindings").Result.AddDocumentsAsync(new List<Models.Index>
{
new()
{
Name = indexName,
CreatedAt = DateTime.UtcNow,
SizeBeforeCompression = null,
FolderId = folder,
IsCompressed = false,
LastCompressedAt = null
}
}, "name").Wait();
}
}
public virtual void DeleteIndex(string indexName)
{
var indexes = _client.GetAllIndexesAsync().Result;
if (indexes.Results.Any(x => x.Uid == indexName)==false)
{
_logger.LogWarning($"Index '{indexName}' does not exist, skipping deletion of index.");
return;
}
_logger.LogTrace($"Deleting index '{indexName}'...");
_client.DeleteIndexAsync(indexName).Wait();
_client.GetIndexAsync("index_bindings").Result.DeleteOneDocumentAsync(indexName).Wait();
_logger.LogInformation($"Deleted index '{indexName}'!");
}
public void AddDocument(string repositoryId, IDocument document, bool autoCommit = false)
{
_logger.LogTrace($"Adding document '{document.Id}' to repository '{repositoryId}'...");
_documentCollection.Add(new KeyValuePair<string, IDocument>(repositoryId, document));
_logger.LogInformation($"Document {document.Id} added to collection.");
if (autoCommit)
{
SyncDocumentsToServer();
}
}
public virtual List<string> GetAllIndexes()
{
_logger.LogTrace("Fetching all indexes from Meilisearch server created with the SDK...");
var result = _client.GetAllIndexesAsync().Result.Results.Select(x => x.Uid).Where(x=>x!="index_bindings").ToList();
_logger.LogInformation($"Fetched {result.Count} indexes from Meilisearch server.");
return result;
}
public virtual long GetIndexStorageUsage(string indexName, bool useCompressedSize = true)
{
var doc = _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName).Result;
if (doc.IsCompressed)
{
if(!useCompressedSize)
return doc.SizeBeforeCompression ?? 0;
var indexPath = GetIndexFilePath(indexName).Result+".zip";
if (!File.Exists(indexPath))
{
_logger.LogWarning($"Compressed index not found at: {indexPath}");
return 0;
}
return new FileInfo(indexPath).Length;
}
var path = Path.Combine(_indexBasePath, doc.FolderId);
if (!Directory.Exists(path))
{
_logger.LogWarning($"Index directory not found at: {path}");
return 0;
}
return new DirectoryInfo(path).GetFiles().Sum(f => f.Length);
}
public virtual long GetTotalStorageUsage(bool useCompressedSize = true)
{
var result = _client.GetIndexAsync("index_bindings").Result.GetDocumentsAsync<Models.Index>(new DocumentsQuery(){Limit = 1000}).Result;
var total = 0L;
foreach (var index in result.Results)
{
var indexPath = GetIndexFilePath(index.Name).Result+".zip";
if (index.IsCompressed)
if (useCompressedSize)
total += new FileInfo(indexPath).Length;
else
total += index.SizeBeforeCompression ?? 0;
else
total += index.IsCompressed ? index.SizeBeforeCompression ?? 0 : new DirectoryInfo(Path.Combine(_indexBasePath, index.FolderId)).GetFiles().Sum(f => f.Length);
}
return total;
}
public async void Start()
{
await StartMeilisearch();
}
public virtual void Stop()
{
process?.Kill();
}
public void Dispose()
{
CheckIfNeedDocumentSync();
Stop();
_httpClient.Dispose();
_processManager.Dispose();
}
#endregion
}

View File

@ -0,0 +1,110 @@
using System.Collections.ObjectModel;
using System.Collections.Specialized;
using Meilisearch;
using meilisearch.NET.Interfaces;
using Microsoft.Extensions.Logging;
namespace meilisearch.NET.Services.DocumentManagement;
public class DocumentManager:IDocumentManager
{
private readonly ILogger<DocumentManager> _logger;
private readonly MeilisearchClient _client;
private const int THRESHOLD = 100;
private ObservableCollection<KeyValuePair<string,IDocument>> _documentCollection;
public DocumentManager(MeilisearchClient client, ILogger<DocumentManager> logger)
{
_logger = logger;
_client = client;
_documentCollection = new ObservableCollection<KeyValuePair<string,IDocument>>();
_documentCollection.CollectionChanged += CheckIfNeedDocumentSync;
}
public async Task AddDocumentAsync(string repositoryId, IDocument document, bool autoCommit = false)
{
_logger.LogTrace($"Adding document '{document.Id}' to repository '{repositoryId}'...");
_documentCollection.Add(new KeyValuePair<string, IDocument>(repositoryId, document));
_logger.LogInformation($"Document {document.Id} added to collection.");
if (autoCommit)
{
SyncDocumentsToServer();
}
}
public void AddDocument(string repositoryId, IDocument document, bool autoCommit = false)
{
_logger.LogTrace($"Adding document '{document.Id}' to repository '{repositoryId}'...");
_documentCollection.Add(new KeyValuePair<string, IDocument>(repositoryId, document));
_logger.LogInformation($"Document {document.Id} added to collection.");
if (autoCommit)
{
SyncDocumentsToServer();
}
}
public void SyncDocumentsToServer()
{
var grouped = _documentCollection.GroupBy(pair => pair.Key)
.ToDictionary(group => group.Key, group => group.Select(pair => pair.Value).ToList());
foreach (var repository in grouped)
{
var repositoryIndex = _client.GetIndexAsync(repository.Key).Result;
var documents = _documentCollection.ToList();
_documentCollection.Clear();
var result = RetryAsync(() => repositoryIndex.AddDocumentsAsync(repository.Value, "id")).Result;
}
}
public async Task SyncDocumentsToServerAsync()
{
var grouped = _documentCollection.GroupBy(pair => pair.Key)
.ToDictionary(group => group.Key, group => group.Select(pair => pair.Value).ToList());
foreach (var repository in grouped)
{
var repositoryIndex = await _client.GetIndexAsync(repository.Key);
var documents = _documentCollection.ToList();
_documentCollection.Clear();
var result = RetryAsync(() => repositoryIndex.AddDocumentsAsync(repository.Value, "id")).Result;
}
}
#region Private Methods
private void CheckIfNeedDocumentSync(object? sender, NotifyCollectionChangedEventArgs e)
{
CheckIfNeedDocumentSync(THRESHOLD);
}
private void CheckIfNeedDocumentSync(int? threshold = null)
{
threshold = threshold ?? 0;
if(_documentCollection.Count>=threshold)
{
_logger.LogInformation("Threshold reached, syncing metadata to server.");
SyncDocumentsToServer();
}
}
private async Task<T> RetryAsync<T>(Func<Task<T>> action, int maxRetries = 3, int delayMilliseconds = 1000)
{
int retryCount = 0;
while (true)
{
try
{
return await action();
}
catch (Exception ex)
{
retryCount++;
if (retryCount >= maxRetries)
{
_logger.LogError($"Operation failed after {maxRetries} retries: {ex.Message}");
throw;
}
_logger.LogWarning($"Operation failed, retrying {retryCount}/{maxRetries}...");
await Task.Delay(delayMilliseconds);
}
}
}
#endregion
}

View File

@ -0,0 +1,11 @@
using meilisearch.NET.Interfaces;
namespace meilisearch.NET.Services.DocumentManagement;
public interface IDocumentManager
{
void AddDocument(string repositoryId, IDocument document, bool autoCommit = false);
void SyncDocumentsToServer();
Task AddDocumentAsync(string repositoryId, IDocument document, bool autoCommit = false);
Task SyncDocumentsToServerAsync();
}

View File

@ -0,0 +1,18 @@
using meilisearch.NET.Interfaces;
namespace meilisearch.NET.Services.IndexManagement;
public interface IIndexManager
{
Task<List<string>> GetAllIndexes();
Task CreateIndexAsync<T>(string indexName) where T : IDocument;
void CreateIndex<T>(string indexName) where T : IDocument;
Task DeleteIndexAsync(string indexName);
void DeleteIndex(string indexName);
Task SetIndexEnabledAsync(string indexName, bool enabled);
void SetIndexEnabled(string indexName, bool enabled);
Task<long> GetIndexStorageUsageAsync(string indexName, bool useCompressedSize = true);
Task<long> GetTotalStorageUsageAsync(bool useCompressedSize = true);
long GetIndexStorageUsage(string indexName, bool useCompressedSize = true);
long GetTotalStorageUsage(bool useCompressedSize = true);
}

View File

@ -0,0 +1,387 @@
using System.IO.Compression;
using System.Reflection;
using Meilisearch;
using meilisearch.NET.Interfaces;
using meilisearch.NET.Services.ProcessManagement;
using Meilisearch.QueryParameters;
using Microsoft.Extensions.Logging;
namespace meilisearch.NET.Services.IndexManagement;
public class IndexManager:IIndexManager
{
private const string DefaultDataFilePath = "data.mdb";
private const string DefaultLockFilePath = "lock.mdb";
private readonly string _indexBasePath = Path.Combine(AppContext.BaseDirectory, "db", "indexes" );
private readonly ILogger<IndexManager> _logger;
private readonly MeilisearchClient _client;
private readonly MeiliSearchProcessManager _processManager;
public IndexManager(ILogger<IndexManager> logger, MeilisearchClient client, MeiliSearchProcessManager processManager)
{
_processManager = processManager;
_client = client;
_logger = logger;
}
public Task<List<string>> GetAllIndexes()
{
throw new NotImplementedException();
}
public void CreateIndex<T>(string indexName) where T : IDocument
{
var indexes = GetAllIndexes().Result;
if(indexes.Count>=1000)
{
_logger.LogWarning("Maximum number of indexes reached, cannot create new index.");
return;
}
if (indexes.Any(x => x == indexName))
{
_logger.LogWarning($"Index {indexName} already exists, skipping creation of index.");
return;
}
var foldersBefore = Directory.GetDirectories(_indexBasePath);
_logger.LogTrace($"Creating index '{indexName}'...");
_client.CreateIndexAsync(indexName).Wait();
Task.Delay(5000).Wait();
var index = _client.GetIndexAsync(indexName).Result;
var test = index.GetFilterableAttributesAsync().Result;
index.UpdateFilterableAttributesAsync(GetPropertiesInCamelCase<T>()).Wait();
_logger.LogInformation($"{indexName} index created!");
var foldersAfter = Directory.GetDirectories(_indexBasePath);
var folder = Path.GetFileName(foldersAfter.Except(foldersBefore).FirstOrDefault());
if (folder != null)
{
_client.GetIndexAsync("index_bindings").Result.AddDocumentsAsync(new List<Models.Index>
{
new()
{
Name = indexName,
CreatedAt = DateTime.UtcNow,
SizeBeforeCompression = null,
FolderId = folder,
IsCompressed = false,
LastCompressedAt = null
}
}, "name").Wait();
}
}
public async Task CreateIndexAsync<T>(string indexName) where T : IDocument
{
var indexes = await GetAllIndexes();
if(indexes.Count>=1000)
{
_logger.LogWarning("Maximum number of indexes reached, cannot create new index.");
return;
}
if (indexes.Any(x => x == indexName))
{
_logger.LogWarning($"Index {indexName} already exists, skipping creation of index.");
return;
}
var foldersBefore = Directory.GetDirectories(_indexBasePath);
_logger.LogTrace($"Creating index '{indexName}'...");
_client.CreateIndexAsync(indexName).Wait();
Task.Delay(5000).Wait();
var index = await _client.GetIndexAsync(indexName);
index.UpdateFilterableAttributesAsync(GetPropertiesInCamelCase<T>()).Wait();
_logger.LogInformation($"{indexName} index created!");
var foldersAfter = Directory.GetDirectories(_indexBasePath);
var folder = Path.GetFileName(foldersAfter.Except(foldersBefore).FirstOrDefault());
if (folder != null)
{
_client.GetIndexAsync("index_bindings").Result.AddDocumentsAsync(new List<Models.Index>
{
new()
{
Name = indexName,
CreatedAt = DateTime.UtcNow,
SizeBeforeCompression = null,
FolderId = folder,
IsCompressed = false,
LastCompressedAt = null
}
}, "name").Wait();
}
}
public void DeleteIndex(string indexName)
{
var indexes = _client.GetAllIndexesAsync().Result;
if (indexes.Results.Any(x => x.Uid == indexName)==false)
{
_logger.LogWarning($"Index '{indexName}' does not exist, skipping deletion of index.");
return;
}
_logger.LogTrace($"Deleting index '{indexName}'...");
_client.DeleteIndexAsync(indexName).Wait();
_client.GetIndexAsync("index_bindings").Result.DeleteOneDocumentAsync(indexName).Wait();
_logger.LogInformation($"Deleted index '{indexName}'!");
}
public async Task DeleteIndexAsync(string indexName)
{
var indexes = _client.GetAllIndexesAsync().Result;
if (indexes.Results.Any(x => x.Uid == indexName)==false)
{
_logger.LogWarning($"Index '{indexName}' does not exist, skipping deletion of index.");
return;
}
_logger.LogTrace($"Deleting index '{indexName}'...");
await _client.DeleteIndexAsync(indexName);
await _client.GetIndexAsync("index_bindings").Result.DeleteOneDocumentAsync(indexName);
_logger.LogInformation($"Deleted index '{indexName}'!");
}
public void SetIndexEnabled(string indexName, bool enabled)
{
_logger.LogTrace($"Updating index '{indexName}' status to {enabled}...");
if(enabled)
{
DecompressIndex(indexName).Wait();
}
else
{
CompressIndex(indexName).Wait();
}
_logger.LogInformation($"Updated index '{indexName}' status to {enabled}.");
}
public async Task SetIndexEnabledAsync(string indexName, bool enabled)
{
_logger.LogTrace($"Updating index '{indexName}' status to {enabled}...");
if(enabled)
{
await DecompressIndex(indexName);
}
else
{
await CompressIndex(indexName);
}
_logger.LogInformation($"Updated index '{indexName}' status to {enabled}.");
}
public long GetIndexStorageUsage(string indexName, bool useCompressedSize = true)
{
var doc = _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName).Result;
if (doc.IsCompressed)
{
if(!useCompressedSize)
return doc.SizeBeforeCompression ?? 0;
var indexPath = GetIndexFilePath(indexName).Result+".zip";
if (!File.Exists(indexPath))
{
_logger.LogWarning($"Compressed index not found at: {indexPath}");
return 0;
}
return new FileInfo(indexPath).Length;
}
var path = Path.Combine(_indexBasePath, doc.FolderId);
if (!Directory.Exists(path))
{
_logger.LogWarning($"Index directory not found at: {path}");
return 0;
}
return new DirectoryInfo(path).GetFiles().Sum(f => f.Length);
}
public long GetTotalStorageUsage(bool useCompressedSize = true)
{
var result = _client.GetIndexAsync("index_bindings").Result.GetDocumentsAsync<Models.Index>(new DocumentsQuery(){Limit = 1000}).Result;
var total = 0L;
foreach (var index in result.Results)
{
var indexPath = GetIndexFilePath(index.Name).Result+".zip";
if (index.IsCompressed)
if (useCompressedSize)
total += new FileInfo(indexPath).Length;
else
total += index.SizeBeforeCompression ?? 0;
else
total += index.IsCompressed ? index.SizeBeforeCompression ?? 0 : new DirectoryInfo(Path.Combine(_indexBasePath, index.FolderId)).GetFiles().Sum(f => f.Length);
}
return total;
}
public async Task<long> GetIndexStorageUsageAsync(string indexName, bool useCompressedSize = true)
{
var doc = _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName).Result;
if (doc.IsCompressed)
{
if(!useCompressedSize)
return doc.SizeBeforeCompression ?? 0;
var indexPath = await GetIndexFilePath(indexName)+".zip";
if (!File.Exists(indexPath))
{
_logger.LogWarning($"Compressed index not found at: {indexPath}");
return 0;
}
return new FileInfo(indexPath).Length;
}
var path = Path.Combine(_indexBasePath, doc.FolderId);
if (!Directory.Exists(path))
{
_logger.LogWarning($"Index directory not found at: {path}");
return 0;
}
return new DirectoryInfo(path).GetFiles().Sum(f => f.Length);
}
public async Task<long> GetTotalStorageUsageAsync(bool useCompressedSize = true)
{
var result = _client.GetIndexAsync("index_bindings").Result.GetDocumentsAsync<Models.Index>(new DocumentsQuery(){Limit = 1000}).Result;
var total = 0L;
foreach (var index in result.Results)
{
var indexPath = await GetIndexFilePath(index.Name)+".zip";
if (index.IsCompressed)
if (useCompressedSize)
total += new FileInfo(indexPath).Length;
else
total += index.SizeBeforeCompression ?? 0;
else
total += index.IsCompressed ? index.SizeBeforeCompression ?? 0 : new DirectoryInfo(Path.Combine(_indexBasePath, index.FolderId)).GetFiles().Sum(f => f.Length);
}
return total;
}
#region Private Methods
private static string[] GetPropertiesInCamelCase<T>()
{
var properties = typeof(T).GetProperties(BindingFlags.Public | BindingFlags.Instance);
return properties
.Select(p => ToCamelCase(p.Name))
.ToArray();
}
private static string ToCamelCase(string input)
{
if (string.IsNullOrEmpty(input) || char.IsLower(input[0]))
{
return input;
}
return char.ToLowerInvariant(input[0]) + input.Substring(1);
}
private async Task<string> GetIndexFilePath(string folderId)
{
var doc = await _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(folderId);
return Path.Combine(_indexBasePath, doc.FolderId);
}
private async Task CompressIndex(string indexName)
{
var indexPath = await GetIndexFilePath(indexName);
if (!Directory.Exists(indexPath))
{
_logger.LogWarning($"Index directory not found at: {indexPath}");
return;
}
var compressedPath = indexPath + ".zip";
_logger.LogTrace($"Compressing index '{indexName}' to {compressedPath}...");
try
{
var size = new DirectoryInfo(indexPath).GetFiles().Sum(f => f.Length);
// Create temp directory to ensure we don't lose data if compression fails
var tempPath = compressedPath + ".temp";
ZipFile.CreateFromDirectory(indexPath, tempPath, CompressionLevel.SmallestSize, false);
// If compression succeeded, safely replace old zip (if exists) and remove original directory
if (File.Exists(compressedPath))
{
File.Delete(compressedPath);
}
File.Move(tempPath, compressedPath);
Directory.Delete(indexPath, true);
// Update index metadata
var indexBindings = await _client.GetIndexAsync("index_bindings");
var doc = await _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName);
var document = new Models.Index
{
Name = indexName,
IsCompressed = true,
FolderId = doc.FolderId,
CreatedAt = doc.CreatedAt,
SizeBeforeCompression = size,
LastCompressedAt = DateTime.UtcNow
};
await indexBindings.UpdateDocumentsAsync(new List<Models.Index> { document });
_logger.LogInformation($"Successfully compressed index '{indexName}'");
Directory.CreateDirectory(indexPath);
File.Copy(Path.Combine(AppContext.BaseDirectory,DefaultDataFilePath), Path.Combine(indexPath, DefaultDataFilePath));
File.Copy(Path.Combine(AppContext.BaseDirectory,DefaultLockFilePath), Path.Combine(indexPath, DefaultLockFilePath));
_logger.LogInformation($"Created placeholder data file for compressed index '{indexName}'");
}
catch (Exception ex)
{
_logger.LogError($"Failed to compress index '{indexName}': {ex.Message}");
throw;
}
_processManager.StopProcess();
}
private async Task DecompressIndex(string indexName)
{
var compressedPath = await GetIndexFilePath(indexName) + ".zip";
var extractPath = await GetIndexFilePath(indexName);
if (!File.Exists(compressedPath))
{
_logger.LogWarning($"Compressed index not found at: {compressedPath}");
return;
}
_logger.LogTrace($"Decompressing index '{indexName}' to {extractPath}...");
try
{
if (Directory.Exists(extractPath))
{
Directory.Delete(extractPath, true);
}
// Create temp directory to ensure we don't lose data if decompression fails
var tempPath = extractPath + ".temp";
ZipFile.ExtractToDirectory(compressedPath, tempPath);
// If decompression succeeded, safely move to final location
if (Directory.Exists(extractPath))
{
Directory.Delete(extractPath, true);
}
Directory.Move(tempPath, extractPath);
File.Delete(compressedPath);
// Update index metadata
var indexBindings = await _client.GetIndexAsync("index_bindings");
var doc = await _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName);
var document = new Models.Index
{
Name = indexName,
FolderId = doc.FolderId,
CreatedAt = doc.CreatedAt,
SizeBeforeCompression = null,
IsCompressed = false,
LastCompressedAt = doc.LastCompressedAt
};
await indexBindings.UpdateDocumentsAsync(new List<Models.Index> { document });
_logger.LogInformation($"Successfully decompressed index '{indexName}'");
}
catch (Exception ex)
{
_logger.LogError($"Failed to decompress index '{indexName}': {ex.Message}");
throw;
}
}
#endregion
}

View File

@ -0,0 +1,89 @@
using System.Diagnostics;
using Microsoft.Extensions.Logging;
namespace meilisearch.NET.Services.ProcessManagement;
public abstract class BaseProcessManager : IProcessManager
{
protected Process? Process;
protected readonly ILogger Logger;
protected BaseProcessManager(ILogger logger)
{
Logger = logger;
}
protected abstract ProcessStartInfo CreateProcessStartInfo();
protected abstract string GetProcessName();
public virtual async Task StartProcess()
{
var processStartInfo = CreateProcessStartInfo();
Process = new Process { StartInfo = processStartInfo, EnableRaisingEvents = true };
Process.Exited += (sender, e) =>
{
Logger.LogWarning($"{GetProcessName()} process has exited. Restarting...");
_ = StartProcess();
};
try
{
Process.Start();
await Task.Delay(5000); // Wait for process to start
Logger.LogInformation($"Started {GetProcessName()} process");
}
catch (Exception ex)
{
Logger.LogError($"Failed to start {GetProcessName()}: {ex.Message}");
throw;
}
}
public virtual void StopProcess()
{
Process?.Kill();
}
public virtual bool IsProcessRunning()
{
return Process is { HasExited: false };
}
public virtual ProcessResourceStats GetResourceUsage()
{
if (Process == null || Process.HasExited)
{
return new ProcessResourceStats();
}
try
{
Process.Refresh();
TimeSpan cpuUsage = Process.TotalProcessorTime;
double cpuPercentage = cpuUsage.TotalMilliseconds /
(Environment.ProcessorCount * Process.TotalProcessorTime.TotalMilliseconds) * 100;
return new ProcessResourceStats
{
CpuPercentage = Math.Round(cpuPercentage, 2),
MemoryUsageBytes = Process.WorkingSet64,
DiskReadBytes = Process.StartInfo.RedirectStandardOutput ? Process.StandardOutput.BaseStream.Length : 0,
DiskWriteBytes = Process.StartInfo.RedirectStandardError ? Process.StandardError.BaseStream.Length : 0,
ProcessId = Process.Id,
ThreadCount = Process.Threads.Count
};
}
catch (Exception ex)
{
Logger.LogError($"Error getting process resource usage: {ex.Message}");
return new ProcessResourceStats();
}
}
public void Dispose()
{
StopProcess();
Process?.Dispose();
}
}

View File

@ -0,0 +1,21 @@
using System.Diagnostics;
namespace meilisearch.NET.Services.ProcessManagement;
public interface IProcessManager : IDisposable
{
Task StartProcess();
void StopProcess();
bool IsProcessRunning();
ProcessResourceStats GetResourceUsage();
}
public record ProcessResourceStats
{
public double CpuPercentage { get; init; }
public long MemoryUsageBytes { get; init; }
public long DiskReadBytes { get; init; }
public long DiskWriteBytes { get; init; }
public int ProcessId { get; init; }
public int ThreadCount { get; init; }
}

View File

@ -0,0 +1,83 @@
using System.Diagnostics;
using System.Runtime.InteropServices;
using Microsoft.Extensions.Logging;
using meilisearch.NET.Configurations;
namespace meilisearch.NET.Services.ProcessManagement;
public class MeiliSearchProcessManager : BaseProcessManager
{
private readonly MeiliSearchConfiguration _configuration;
private readonly string _binaryPath;
public MeiliSearchProcessManager(
ILogger<MeiliSearchProcessManager> logger,
MeiliSearchConfiguration configuration) : base(logger)
{
_configuration = configuration;
_binaryPath = Path.Combine(AppContext.BaseDirectory, GetMeilisearchBinaryName());
}
protected override string GetProcessName() => "Meilisearch";
protected override ProcessStartInfo CreateProcessStartInfo()
{
if (!File.Exists(_binaryPath))
{
throw new FileNotFoundException($"Could not find Meilisearch binary: {_binaryPath}");
}
SetExecutePermissionsIfNeeded();
var host = RuntimeInformation.IsOSPlatform(OSPlatform.Windows)
? "localhost"
: "127.0.0.1";
var args = $"--http-addr {host}:{_configuration.MeiliPort} " +
"--env development " +
$"--db-path {Path.Combine(AppContext.BaseDirectory, "db")}";
return new ProcessStartInfo
{
FileName = _binaryPath,
Arguments = args,
UseShellExecute = false,
RedirectStandardOutput = false,
RedirectStandardError = false,
CreateNoWindow = false,
};
}
private string GetMeilisearchBinaryName()
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
return "meilisearch-windows.exe";
if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
return RuntimeInformation.ProcessArchitecture == Architecture.Arm64
? "meilisearch-macos-arm"
: "meilisearch-macos-x64";
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
return RuntimeInformation.ProcessArchitecture == Architecture.Arm64
? "meilisearch-linux-arm"
: "meilisearch-linux-x64";
throw new PlatformNotSupportedException("Current platform and architecture combination is not supported");
}
private void SetExecutePermissionsIfNeeded()
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) return;
try
{
var chmod = Process.Start("chmod", $"+x {_binaryPath}");
chmod?.WaitForExit();
}
catch (Exception ex)
{
Logger.LogWarning($"Failed to set execute permissions on binary: {ex.Message}");
}
}
}