Compare commits

...

14 Commits

Author SHA1 Message Date
ef6ff45f40 feat!:comments for documentation in IDE
All checks were successful
build-packages / run-tests (push) Successful in 6m16s
build-packages / nuget-package (push) Successful in 6m26s
2025-03-01 14:43:13 -05:00
3d5b7db864 Merge branch 'master' of gitea.d4m13n.dev:damien/meilisearch.NET 2025-03-01 14:42:52 -05:00
9f29202764 fix:ci 2025-03-01 14:42:26 -05:00
47abd5b838 Update README.md
All checks were successful
build-packages / meilisearch-dotnet-packages (push) Successful in 5m58s
2025-03-01 19:13:25 +00:00
430e1d8617 feat: exceptions
Some checks failed
build-packages / meilisearch-dotnet-packages (push) Has been cancelled
2025-03-01 14:09:25 -05:00
ab8c4398f8 Merge branch 'master' of gitea.d4m13n.dev:damien/meilisearch.NET
All checks were successful
build-packages / meilisearch-dotnet-packages (push) Successful in 8m57s
2025-03-01 13:50:14 -05:00
38444d5cd5 fix: new exception classes 2025-03-01 13:49:57 -05:00
44ac877aae Update README.md
All checks were successful
build-packages / meilisearch-dotnet-packages (push) Successful in 8m51s
2025-03-01 18:37:35 +00:00
5417df81ea Update README.md
Some checks failed
build-packages / meilisearch-dotnet-packages (push) Has been cancelled
2025-03-01 18:36:56 +00:00
be6b8504be Update README.md
Some checks failed
build-packages / meilisearch-dotnet-packages (push) Has been cancelled
2025-03-01 18:35:49 +00:00
d3c4be572b fix: readme
Some checks failed
build-packages / meilisearch-dotnet-packages (push) Has been cancelled
2025-03-01 13:32:06 -05:00
845d0a332b fix: bugs with split up classes
All checks were successful
build-packages / meilisearch-dotnet-packages (push) Successful in 6m3s
2025-03-01 13:21:25 -05:00
f21b92bc1f fix: split up monolithic class 2025-03-01 13:08:01 -05:00
c83b0068e3 fix: readme
All checks were successful
build-packages / meilisearch-dotnet-packages (push) Successful in 9m22s
2025-03-01 12:10:16 -05:00
32 changed files with 1736 additions and 760 deletions

View File

@ -1,7 +1,7 @@
name: build-packages
on: pull_request
jobs:
connectors-packages:
build-check:
runs-on: ubuntu-latest
env:
DOTNET_INSTALL_DIR: "/home/runner"
@ -24,3 +24,29 @@ jobs:
fetch-depth: 0
- run: dotnet build --configuration Release
run-tests:
runs-on: ubuntu-latest
env:
DOTNET_INSTALL_DIR: "/home/runner"
permissions:
contents: write
packages: write
steps:
- name: Cleanup build folder
run: |
rm -rf ./* ./.??* || true
- name: Set up .NET
uses: actions/setup-dotnet@v3
with:
dotnet-version: 8.0.x
- name: Checkout repository
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Run Tests
run: |
dotnet test ./meilisearch.NET.Tests/meilisearch.NET.Tests.csproj

View File

@ -6,7 +6,7 @@ on:
- 'master'
jobs:
meilisearch-dotnet-packages:
nuget-package:
runs-on: ubuntu-latest
env:
DOTNET_INSTALL_DIR: "/home/runner"
@ -70,4 +70,30 @@ jobs:
Automated release of MeiliSearch.NET version ${{ steps.gitversion.outputs.SemVer }}."
files: nuget-packages/*.nupkg
draft: false
prerelease: false
prerelease: true
run-tests:
runs-on: ubuntu-latest
env:
DOTNET_INSTALL_DIR: "/home/runner"
permissions:
contents: write
packages: write
steps:
- name: Cleanup build folder
run: |
rm -rf ./* ./.??* || true
- name: Set up .NET
uses: actions/setup-dotnet@v3
with:
dotnet-version: 8.0.x
- name: Checkout repository
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Run Tests
run: |
dotnet test ./meilisearch.NET.Tests/meilisearch.NET.Tests.csproj

34
.github/workflows/on-release.yml vendored Normal file
View File

@ -0,0 +1,34 @@
name: update-release-status
on:
workflow_dispatch:
jobs:
update-release:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Get latest release
id: latest_release
uses: actions/github-script@v6
with:
script: |
const release = await github.rest.repos.getLatestRelease({
owner: context.repo.owner,
repo: context.repo.repo
});
return release.data;
- name: Update release
uses: actions/github-script@v6
with:
script: |
const release = ${{ steps.latest_release.outputs.result }};
await github.rest.repos.updateRelease({
owner: context.repo.owner,
repo: context.repo.repo,
release_id: release.id,
prerelease: false
});

267
README.md
View File

@ -6,192 +6,181 @@
![Ollama](https://img.shields.io/badge/Ollama-Powered-orange)
[![Gitea](https://img.shields.io/badge/Gitea-6eaa5b?logo=gitea&logoColor=fff)](#)
[![License](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE)
[![Discord](https://img.shields.io/discord/1195961063183765575?label=chat&logo=discord)](https://discord.gg/8dHnaarghJ)
## Overview
MeiliSearch .NET Integration is a NuGet package that seamlessly embeds MeiliSearch into your C# application. It manages the background process and health checks for you, simplifying the integration of full-text search capabilities. In future updates, it will also handle automatic compression and decompression of indexes to help manage local storage usage effectively.
MeiliSearch .NET Embedded is a powerful NuGet package that seamlessly integrates MeiliSearch into your .NET applications. It provides a robust wrapper around MeiliSearch, handling process management, health monitoring, and advanced features like index compression - all while maintaining compatibility with the native MeiliSearch SDK.
`You can use the default SDK for everything, but indexs that are disabled through the SDK wont appear until reenabled with the SDK.`
## Key Features
## Features
- [x] **Embedded MeiliSearch**: Integrate MeiliSearch directly into your application.
- [x] **Manage Indexes**: Manage your indexs and documents through the SDK, you can still use the default Meilisearch SDK.
- [x] **Add Documents**: Ability to add documents and have validation on if the index is loaded.
- [x] **Background Process Management**: Automatically handles the lifecycle of the MeiliSearch process.
- [x] **Health Monitoring**: Regular checks on the health of the MeiliSearch instance to ensure it stays running.
- [x] **API Key Management**: An API key is automatically regenerated every time the MeiliSearch service starts unless one is specified in the configuration.
- [x] **Resource Monitoring**: Monitor the resources being used including storage by your MeiliSearch.
- [x] **Future Index Management**: Upcoming feature to automatically compress and decompress indexes for optimized local storage.
- [x] **Caching Mechanism**: Cache the comrpessed indexes so they are returned when you ask for a list of all indexs.
- [ ] **Search Capabilities**: Ability to use the meilisearch native search capabilities with the index being loaded validation.
- [ ] **Embedded Ollama**: Intergated Ollama directly into your application with a configured model.
- [ ] **AI Search Capabilities**: Ability to use the meilisearch native AI search capabilities with the index being loaded validation.
- **Embedded MeiliSearch Engine**: Run MeiliSearch directly within your application
- **Automatic Process Management**: Handles startup, shutdown, and health monitoring
- **Smart Index Management**:
- Create and manage indexes with type safety
- Enable/disable indexes on demand
- Automatic compression for optimized storage
- **Efficient Document Management**:
- Batch processing system with configurable thresholds
- Automatic validation of index availability
- **Resource Monitoring**:
- Track memory and CPU usage
- Monitor storage utilization
- Index-specific metrics
- **Native SDK Compatibility**: Full support for the official MeiliSearch SDK
## Installation
To add the MeiliSearch .NET Integration package to your project, you can install it directly from NuGet. Follow the steps below based on your preferred method:
### Package Manager Console
Open the Package Manager Console in Visual Studio and run the following command:
### Via Package Manager Console
```bash
Install-Package meilisearch.NET
```
### .NET CLI
If you're using the .NET CLI, run the following command in your terminal:
### Via .NET CLI
```bash
dotnet add package meilisearch.NET
```
## AppSettings Options
## Quick Start
- **Port**: The port on which MeiliSearch will run (default is `7700`).
- **UiEnabled**: A boolean value to enable or disable the MeiliSearch UI (default is `true`).
- **ApiKey**: An optional API key. If specified, this key will be used; otherwise, a new key will be generated each time the service starts.
### 1. Basic Setup
Add MeiliSearch service to your dependency injection container:
## Configuration
```csharp
var builder = Host.CreateApplicationBuilder();
builder.Services.AddMeiliSearchService();
```
The MeiliSearch service can be configured using the `MeiliSearchConfiguration` class. The following options are available:
- **Port**: The port on which MeiliSearch will run (default is `7700`).
- **UiEnabled**: A boolean value to enable or disable the MeiliSearch UI (default is `true`).
- **ApiKey**: An optional API key. If specified, this key will be used; otherwise, a new key will be generated each time the service starts.
You can configure these options in your `appsettings.json` file as follows:
### 2. Configuration
Configure MeiliSearch in your `appsettings.json`:
```json
{
"MeiliSearch": {
"Port": 7700,
"UiEnabled": true,
"ApiKey": "your_api_key"
"UiEnabled": true
}
}
```
## Usage
To set up the MeiliSearch service in your application, configure dependency injection as shown below:
### 3. Basic Usage
#### Define Your Document Model
```csharp
using System.Net;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
ServicePointManager.SecurityProtocol = SecurityProtocolType.SystemDefault;
var builder = Host.CreateApplicationBuilder();
builder.Configuration.AddJsonFile("appsettings.json", optional: false, reloadOnChange: true);
builder.Services.AddMeiliSearchService();
builder.Logging.ClearProviders();
builder.Logging.AddConsole();
builder.Logging.SetMinimumLevel(LogLevel.Information);
builder.Services.AddLogging();
var app = builder.Build();
app.Run();
Console.ReadLine();
```
## MeiliSearchService Class Usage Guide
### Methods
#### Start
Starts the MeiliSearch process. Logs the start of the process, sets the status to **Starting**, and attempts to start the process.
```csharp
MeiliSearchService service = new MeiliSearchService();
service.Start();
```
#### Stop
Stops the MeiliSearch process. Logs the stop of the process, sets the status to **Stopping**, and attempts to stop the process.
```csharp
service.Stop();
```
#### Restart
Restarts the MeiliSearch process. Stops the process using the **Stop** method and starts it using the **Start** method.
```csharp
service.Restart();
```
#### CreateIndex
Creates a new index with the specified name.
```csharp
service.CreateIndex("my_index");
```
#### DeleteIndex
Deletes an existing index with the specified name.
```csharp
service.DeleteIndex("my_index");
```
#### AddDocument
Adds a document to the specified index.
```csharp
public class MyDocument : IDocument
public class Product : IDocument
{
public string Id { get; set; }
public string Title { get; set; }
public string Name { get; set; }
public string Description { get; set; }
public decimal Price { get; set; }
}
var document = new MyDocument { Id = "1", Title = "My Document" };
service.AddDocument("my_index", document);
```
#### GetAllIndexes
Retrieves a list of all existing indexes.
#### Create and Manage Indexes
```csharp
List<string> indexes = service.GetAllIndexes();
public class SearchService
{
private readonly MeiliSearchService _searchService;
public SearchService(MeiliSearchService searchService)
{
_searchService = searchService;
}
public async Task InitializeProductIndex()
{
// Create index
await _searchService.CreateIndex<Product>("products");
// Add documents
var product = new Product
{
Id = "1",
Name = "Gaming Laptop",
Description = "High-performance gaming laptop",
Price = 1299.99m
};
_searchService.AddDocument("products", product);
//_searchService.AddDocument("products", product, true); if you set the third parameter, which is autocommit, to true, it will ignore batching.
}
}
```
### Status
Indicates the current status of the MeiliSearch process.
### 4. Use MeiliSearch SDK
### Using Native MeiliSearch SDK
```csharp
MeiliSearchStatus status = service.Status;
await _searchService.SDK("products", async client =>
{
var index = await client.GetIndex("products");
var searchResults = await index.SearchAsync<Product>("laptop");
return searchResults;
});
```
## Notes
https://github.com/Mozilla-Ocho/llamafile
## Advanced Usage
### Resource Monitoring
```csharp
var usage = _searchService.GetResourceUsage();
Console.WriteLine($"Memory Usage: {usage.MemoryUsageBytes} bytes");
Console.WriteLine($"CPU Usage: {usage.CpuPercentage}%");
Console.WriteLine($"Storage Usage: {_searchService.GetTotalStorageUsage()} bytes");
```
### Index Management
```csharp
// Disable an index (automatically compresses)
await _searchService.SetIndexEnabled("products", false);
// Enable an index (automatically decompresses)
await _searchService.SetIndexEnabled("products", true);
// Get all indexes
var indexes = await _searchService.GetAllIndexes();
```
## Best Practices
1. **Resource Management**
- Always dispose of the `MeiliSearchService` when your application shuts down
- Monitor resource usage in production environments
2. **Index Management**
- Disable unused indexes to save resources
- Use type-safe index creation with generic parameters
3. **Document Management**
- Utilize batch processing for bulk operations
- Handle exceptions when adding documents
## Performance Considerations
- The batch system automatically manages document additions with a default threshold of 100 documents
- Compressed indexes use less storage but require decompression before use
- Monitor resource usage in production environments
## Contributing
We welcome contributions! Please follow these steps:
1. Fork the repository
2. Create a feature branch
3. Commit your changes
4. Push to the branch
5. Create a Pull Request
## License
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
## Contributing
We welcome contributions! Please feel free to submit issues, pull requests, or suggestions to improve this project.
## Support
For any issues or questions, please open an issue on GitHub or contact us via [your contact method].
- Create an issue on GitHub
- Contact us on discord [![Discord](https://img.shields.io/discord/1195961063183765575?label=chat&logo=discord)](https://discord.gg/8dHnaarghJ)
- Visit our documentation at meilisearchdotnet.d4m13n.dev
---
## Acknowledgments
Feel free to customize this README as necessary for your package, especially regarding the project name and license details!
---
- Built on top of the excellent [MeiliSearch](https://www.meilisearch.com/) search engine
- Powered by [Ollama](https://ollama.ai/) for AI capabilities

View File

@ -0,0 +1,211 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Meilisearch;
using meilisearch.NET.Interfaces;
using meilisearch.NET.Models;
using meilisearch.NET.Services.DocumentManagement;
using meilisearch.NET.Services.IndexManagement;
using meilisearch.NET.Services.ProcessManagement;
using Moq;
using Xunit;
namespace meilisearch.NET.Tests
{
public class MeiliSearchServiceTests
{
private readonly Mock<IProcessManager> _mockProcessManager;
private readonly Mock<IIndexManager> _mockIndexManager;
private readonly Mock<IDocumentManager> _mockDocumentManager;
private readonly Mock<MeilisearchClient> _mockClient;
private readonly MeiliSearchService _service;
public MeiliSearchServiceTests()
{
_mockProcessManager = new Mock<IProcessManager>();
_mockIndexManager = new Mock<IIndexManager>();
_mockDocumentManager = new Mock<IDocumentManager>();
_service = new MeiliSearchService(
_mockProcessManager.Object,
_mockIndexManager.Object,
_mockDocumentManager.Object
);
}
[Fact]
public async Task Start_CallsProcessManagerStartProcess()
{
// Arrange
_mockProcessManager.Setup(x => x.StartProcess()).Returns(Task.CompletedTask);
// Act
await _service.Start();
// Assert
_mockProcessManager.Verify(x => x.StartProcess(), Times.Exactly(2));
}
[Fact]
public void GetTotalStorageUsage_CallsIndexManagerWithCorrectParameters()
{
// Arrange
const bool useCompressedSize = true;
const long expectedUsage = 1000L;
_mockIndexManager.Setup(x => x.GetTotalStorageUsage(useCompressedSize)).Returns(expectedUsage);
// Act
var result = _service.GetTotalStorageUsage(useCompressedSize);
// Assert
Assert.Equal(expectedUsage, result);
_mockIndexManager.Verify(x => x.GetTotalStorageUsage(useCompressedSize), Times.Once);
}
[Fact]
public void GetIndexStorageUsage_CallsIndexManagerWithCorrectParameters()
{
// Arrange
const string indexName = "testIndex";
const bool useCompressedSize = true;
const long expectedUsage = 500L;
_mockIndexManager.Setup(x => x.GetIndexStorageUsage(indexName, useCompressedSize)).Returns(expectedUsage);
// Act
var result = _service.GetIndexStorageUsage(indexName, useCompressedSize);
// Assert
Assert.Equal(expectedUsage, result);
_mockIndexManager.Verify(x => x.GetIndexStorageUsage(indexName, useCompressedSize), Times.Once);
}
[Fact]
public void Stop_CallsProcessManagerStopProcess()
{
// Act
_service.Stop();
// Assert
_mockProcessManager.Verify(x => x.StopProcess(), Times.Once);
}
[Fact]
public void IsRunning_ReturnsProcessManagerStatus()
{
// Arrange
const bool expectedStatus = true;
_mockProcessManager.Setup(x => x.IsProcessRunning()).Returns(expectedStatus);
// Act
var result = _service.IsRunning();
// Assert
Assert.Equal(expectedStatus, result);
_mockProcessManager.Verify(x => x.IsProcessRunning(), Times.Once);
}
[Fact]
public async Task SetIndexEnabled_CallsIndexManagerWithCorrectParameters()
{
// Arrange
const string indexName = "testIndex";
const bool enabled = true;
_mockIndexManager.Setup(x => x.SetIndexEnabledAsync(indexName, enabled)).Returns(Task.CompletedTask);
// Act
await _service.SetIndexEnabled(indexName, enabled);
// Assert
_mockIndexManager.Verify(x => x.SetIndexEnabledAsync(indexName, enabled), Times.Once);
}
[Fact]
public void GetResourceUsage_ReturnsProcessManagerStats()
{
// Arrange
var expectedStats = new ProcessResourceStats();
_mockProcessManager.Setup(x => x.GetResourceUsage()).Returns(expectedStats);
// Act
var result = _service.GetResourceUsage();
// Assert
Assert.Same(expectedStats, result);
_mockProcessManager.Verify(x => x.GetResourceUsage(), Times.Once);
}
[Fact]
public async Task GetAllIndexes_ReturnsIndexManagerResults()
{
// Arrange
var expectedIndexes = new List<string> { "index1", "index2" };
_mockIndexManager.Setup(x => x.GetAllIndexes()).ReturnsAsync(expectedIndexes);
// Act
var result = await _service.GetAllIndexes();
// Assert
Assert.Equal(expectedIndexes, result);
_mockIndexManager.Verify(x => x.GetAllIndexes(), Times.Once);
}
[Fact]
public async Task CreateIndex_CallsIndexManagerWithCorrectParameters()
{
// Arrange
const string indexName = "testIndex";
_mockIndexManager.Setup(x => x.CreateIndexAsync<TestDocument>(indexName)).Returns(Task.CompletedTask);
// Act
await _service.CreateIndex<TestDocument>(indexName);
// Assert
_mockIndexManager.Verify(x => x.CreateIndexAsync<TestDocument>(indexName), Times.Once);
}
[Fact]
public async Task DeleteIndex_CallsIndexManagerWithCorrectParameters()
{
// Arrange
const string indexName = "testIndex";
_mockIndexManager.Setup(x => x.DeleteIndexAsync(indexName)).Returns(Task.CompletedTask);
// Act
await _service.DeleteIndex(indexName);
// Assert
_mockIndexManager.Verify(x => x.DeleteIndexAsync(indexName), Times.Once);
}
[Fact]
public void AddDocument_CallsDocumentManagerWithCorrectParameters()
{
// Arrange
const string repositoryId = "testRepo";
var document = new TestDocument();
const bool autoCommit = true;
// Act
_service.AddDocument(repositoryId, document, autoCommit);
// Assert
_mockDocumentManager.Verify(x => x.AddDocument(repositoryId, document, autoCommit), Times.Once);
}
[Fact]
public void Dispose_CallsProcessManagerDispose()
{
// Act
_service.Dispose();
// Assert
_mockProcessManager.Verify(x => x.Dispose(), Times.Once);
}
// Helper class for testing
private class TestDocument : IDocument
{
public Guid Id { get; set; }
}
}
}

View File

@ -0,0 +1,28 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<RootNamespace>TestProject1</RootNamespace>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.0"/>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.8.0"/>
<PackageReference Include="Moq" Version="4.20.72" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.extensibility.core" Version="2.9.3" />
</ItemGroup>
<ItemGroup>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\meilisearch.NET\meilisearch.NET.csproj" />
</ItemGroup>
</Project>

View File

@ -56,13 +56,13 @@ public class test
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
// Wait until Meilisearch is running
while (!service.IsMeilisearchRunning())
while (!service.IsRunning())
{
_logger.LogInformation("Waiting for Meilisearch to start...");
Task.Delay(1000).Wait(); // Wait for 1 second before checking again
}
var usage = service.GetProcessResourceUsage();
var usage = service.GetResourceUsage();
_logger.LogInformation($"Memory usage: {usage.MemoryUsageBytes} MB");
_logger.LogInformation($"CPU usage: {usage.CpuPercentage} %");
_logger.LogInformation($"Disk read: {usage.DiskReadBytes} MB");
@ -70,44 +70,61 @@ public class test
_logger.LogInformation($"Thread count: {usage.ThreadCount}");
_logger.LogInformation($"Process ID: {usage.ProcessId}");
//service.UpdateIndexStatus("test",false).Wait();
service.CreateIndex<document>("test");
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
service.AddDocument("test", new document()
{
Id = Guid.NewGuid(),
message = "Hello, Meilisearch!"
});
Task.Delay(10000).Wait();
service.SetIndexEnabled("test", false).Wait();
Task.Delay(10000).Wait();
usage = service.GetResourceUsage();
_logger.LogInformation($"Memory usage: {usage.MemoryUsageBytes} MB");
_logger.LogInformation($"CPU usage: {usage.CpuPercentage} %");
_logger.LogInformation($"Disk read: {usage.DiskReadBytes} MB");
_logger.LogInformation($"Disk write: {usage.DiskWriteBytes} MB");
_logger.LogInformation($"Thread count: {usage.ThreadCount}");
_logger.LogInformation($"Process ID: {usage.ProcessId}");
var storage = service.GetIndexStorageUsage("test");
var totalStorage = service.GetTotalStorageUsage();
_logger.LogInformation($"Index storage usage: {storage} MB");
_logger.LogInformation($"Total storage usage: {totalStorage} MB");
Task.Delay(10000).Wait();
service.SetIndexEnabled("test", false).Wait();
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
// service.AddDocument("test", new document()
// {
// Id = Guid.NewGuid(),
// message = "Hello, Meilisearch!"
// });
_logger.LogInformation("Test service initialized.");
}
}

View File

@ -4,6 +4,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "meilisearch.NET", "meilisea
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "meilisearch.NET.example", "meilisearch.NET.example\meilisearch.NET.example.csproj", "{E753BBD6-6ADF-4DD4-8822-7279CD55DF58}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "meilisearch.NET.Tests", "meilisearch.NET.Tests\meilisearch.NET.Tests.csproj", "{FDCF4472-79F8-4AF2-AB02-00522C399E22}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@ -18,5 +20,9 @@ Global
{E753BBD6-6ADF-4DD4-8822-7279CD55DF58}.Debug|Any CPU.Build.0 = Debug|Any CPU
{E753BBD6-6ADF-4DD4-8822-7279CD55DF58}.Release|Any CPU.ActiveCfg = Release|Any CPU
{E753BBD6-6ADF-4DD4-8822-7279CD55DF58}.Release|Any CPU.Build.0 = Release|Any CPU
{FDCF4472-79F8-4AF2-AB02-00522C399E22}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{FDCF4472-79F8-4AF2-AB02-00522C399E22}.Debug|Any CPU.Build.0 = Debug|Any CPU
{FDCF4472-79F8-4AF2-AB02-00522C399E22}.Release|Any CPU.ActiveCfg = Release|Any CPU
{FDCF4472-79F8-4AF2-AB02-00522C399E22}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
EndGlobal

View File

@ -1,10 +0,0 @@
namespace meilisearch.NET.Enums;
public enum MeiliSearchStatus
{
Stopped,
Starting,
Running,
Stopping,
Crashed
}

View File

@ -0,0 +1,12 @@
namespace meilisearch.NET.Exceptions;
public class IndexAlreadyExistsException : IndexManagementException
{
public string IndexName { get; }
public IndexAlreadyExistsException(string indexName)
: base($"Index '{indexName}' already exists")
{
IndexName = indexName;
}
}

View File

@ -0,0 +1,12 @@
namespace meilisearch.NET.Exceptions;
public class IndexCompressionException : IndexManagementException
{
public string IndexName { get; }
public IndexCompressionException(string indexName, string operation, Exception innerException)
: base($"Failed to {operation} index '{indexName}'", innerException)
{
IndexName = indexName;
}
}

View File

@ -0,0 +1,7 @@
namespace meilisearch.NET.Exceptions;
public class IndexLimitReachedException : IndexManagementException
{
public IndexLimitReachedException()
: base("Maximum number of indexes (1000) has been reached") { }
}

View File

@ -0,0 +1,10 @@
namespace meilisearch.NET.Exceptions;
/// <summary>
/// Exception thrown when there are issues with index management
/// </summary>
public class IndexManagementException : MeiliSearchException
{
public IndexManagementException(string message) : base(message) { }
public IndexManagementException(string message, Exception innerException) : base(message, innerException) { }
}

View File

@ -0,0 +1,12 @@
namespace meilisearch.NET.Exceptions;
public class IndexNotFoundException : IndexManagementException
{
public string IndexName { get; }
public IndexNotFoundException(string indexName)
: base($"Index '{indexName}' not found")
{
IndexName = indexName;
}
}

View File

@ -0,0 +1,10 @@
namespace meilisearch.NET.Exceptions;
/// <summary>
/// Base exception class for all Meilisearch.NET exceptions
/// </summary>
public class MeiliSearchException : Exception
{
public MeiliSearchException(string message) : base(message) { }
public MeiliSearchException(string message, Exception innerException) : base(message, innerException) { }
}

View File

@ -0,0 +1,10 @@
namespace meilisearch.NET.Exceptions;
/// <summary>
/// Exception thrown when there are issues with the Meilisearch process management
/// </summary>
public class ProcessManagementException : MeiliSearchException
{
public ProcessManagementException(string message) : base(message) { }
public ProcessManagementException(string message, Exception innerException) : base(message, innerException) { }
}

View File

@ -0,0 +1,6 @@
namespace meilisearch.NET.Exceptions;
public class ProcessNotRunningException : ProcessManagementException
{
public ProcessNotRunningException() : base("Meilisearch process is not running") { }
}

View File

@ -0,0 +1,6 @@
namespace meilisearch.NET.Exceptions;
public class ProcessStartException : ProcessManagementException
{
public ProcessStartException(Exception innerException, string message) : base($"Failed to start Meilisearch process: {message}", innerException) { }
}

View File

@ -0,0 +1,6 @@
namespace meilisearch.NET.Exceptions;
public class ProcessStopException : ProcessManagementException
{
public ProcessStopException(Exception innerException, string message) : base($"Failed to stop Meilisearch process: {message}", innerException) { }
}

View File

@ -0,0 +1,68 @@
using Meilisearch;
namespace meilisearch.NET.Extensions;
/// <summary>
/// Extension methods for executing operations on MeiliSearch client.
/// </summary>
public static class MeilisearchClientExtensions
{
/// <summary>
/// Executes an action against MeiliSearch client that returns a value, automatically handling compressed index states.
/// </summary>
/// <typeparam name="T">The type of value returned by the action</typeparam>
/// <param name="service">The MeiliSearch service instance</param>
/// <param name="indexName">Name of the index to operate on</param>
/// <param name="action">The action to execute against the MeiliSearch client</param>
/// <returns>The result of the executed action</returns>
/// <exception cref="InvalidOperationException">Thrown when MeiliSearch client is not initialized</exception>
public static async Task<T> SDK<T>(this MeiliSearchService service, string indexName, Func<MeilisearchClient, Task<T>> action)
{
var client = service.Client;
var indexManager = service.IndexManager;
if (client == null)
throw new InvalidOperationException("MeilisearchClient is not initialized");
if (!string.IsNullOrEmpty(indexName))
{
var index = await client.GetIndexAsync("index_bindings");
var doc = await index.GetDocumentAsync<meilisearch.NET.Models.Index>(indexName);
if (doc.IsCompressed)
{
await indexManager.SetIndexEnabledAsync(indexName, true);
}
}
var result = await action(client);
return result;
}
/// <summary>
/// Executes an action against MeiliSearch client that doesn't return a value, automatically handling compressed index states.
/// </summary>
/// <param name="service">The MeiliSearch service instance</param>
/// <param name="indexName">Name of the index to operate on</param>
/// <param name="action">The action to execute against the MeiliSearch client</param>
/// <exception cref="InvalidOperationException">Thrown when MeiliSearch client is not initialized</exception>
public static async Task SDK(this MeiliSearchService service, string indexName, Func<MeilisearchClient, Task> action)
{
var client = service.Client;
var indexManager = service.IndexManager;
if (client == null)
throw new InvalidOperationException("MeilisearchClient is not initialized");
if (!string.IsNullOrEmpty(indexName))
{
var index = await client.GetIndexAsync("index_bindings");
var doc = await index.GetDocumentAsync<meilisearch.NET.Models.Index>(indexName);
if (doc.IsCompressed)
{
await indexManager.SetIndexEnabledAsync(indexName, true);
}
}
await action(client);
}
}

View File

@ -1,14 +1,42 @@
using Meilisearch;
using meilisearch.NET.Configurations;
using meilisearch.NET.Services.DocumentManagement;
using meilisearch.NET.Services.IndexManagement;
using meilisearch.NET.Services.ProcessManagement;
using Microsoft.Extensions.DependencyInjection;
using meilisearch.NET;
namespace meilisearch.NET.Extensions;
/// <summary>
/// Provides extension methods for IServiceCollection to configure MeiliSearch services
/// </summary>
public static class ServiceCollectionExtension
{
/// <summary>
/// Adds MeiliSearch services to the dependency injection container
/// </summary>
/// <param name="services">The IServiceCollection to add services to</param>
/// <returns>The IServiceCollection for chaining</returns>
/// <remarks>
/// Registers the following services as singletons:
/// - MeilisearchClient (configured for localhost:7700)
/// - MeiliSearchService (with HttpClient)
/// - MeiliSearchConfiguration
/// - MeiliSearchProcessManager
/// - IIndexManager (implemented by IndexManager)
/// - IDocumentManager (implemented by DocumentManager)
/// </remarks>
public static IServiceCollection AddMeiliSearchService(this IServiceCollection services)
{
services.AddSingleton<MeilisearchClient>(sp =>
{
return new MeilisearchClient("http://localhost:7700");
});
services.AddHttpClient<MeiliSearchService>();
services.AddSingleton<MeiliSearchConfiguration>();
services.AddSingleton<MeiliSearchProcessManager>();
services.AddSingleton<IIndexManager, IndexManager>();
services.AddSingleton<IDocumentManager, DocumentManager>();
services.AddSingleton<MeiliSearchService>();
return services;
}

View File

@ -1,594 +1,98 @@
using System.Collections.ObjectModel;
using System.Collections.Specialized;
using System.Diagnostics;
using System.IO.Compression;
using System.Net.Sockets;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Security;
using System.Security.Cryptography;
using System.Text;
using Meilisearch;
using meilisearch.NET.Configurations;
using meilisearch.NET.Enums;
using meilisearch.NET.Extensions;
using meilisearch.NET.Interfaces;
using meilisearch.NET.Models;
using Meilisearch.QueryParameters;
using Microsoft.Extensions.Logging;
using Index = Meilisearch.Index;
using meilisearch.NET.Services.DocumentManagement;
using meilisearch.NET.Services.IndexManagement;
using meilisearch.NET.Services.ProcessManagement;
namespace meilisearch.NET;
public class MeiliSearchService:IDisposable
/// <summary>
/// Main service class for interacting with Meilisearch. Manages process, indexes and documents.
/// </summary>
public class MeiliSearchService : IDisposable
{
private readonly HttpClient _httpClient;
private readonly ILogger<MeiliSearchService> _logger;
private readonly MeilisearchClient _client;
private readonly MeiliSearchConfiguration _meiliConfiguration;
private readonly string _indexBasePath = Path.Combine(AppContext.BaseDirectory, "db", "indexes" );
private static string _apiKey = GenerateApiKey();
private const int THRESHOLD = 10000;
private const string DEFAULT_DATA_FILE_PATH = "data.mdb";
private const string DEFAULT_LOCK_FILE_PATH = "lock.mdb";
private Process? process;
private ObservableCollection<KeyValuePair<string,IDocument>> _documentCollection;
private List<Index> indexes { get; set; } = new();
internal readonly IProcessManager ProcessManager;
internal readonly IIndexManager IndexManager;
internal readonly IDocumentManager DocumentManager;
internal readonly MeilisearchClient Client;
protected virtual ObservableCollection<KeyValuePair<string, IDocument>> DocumentCollection
/// <summary>
/// Initializes a new instance of MeiliSearchService
/// </summary>
/// <param name="processManager">Manager for the Meilisearch process</param>
/// <param name="indexManager">Manager for index operations</param>
/// <param name="documentManager">Manager for document operations</param>
public MeiliSearchService(
IProcessManager processManager,
IIndexManager indexManager,
IDocumentManager documentManager)
{
get
{
return _documentCollection;
}
ProcessManager = processManager;
IndexManager = indexManager;
DocumentManager = documentManager;
ProcessManager.StartProcess().Wait();
}
public MeiliSearchService(HttpClient httpClient, ILogger<MeiliSearchService> logger, MeiliSearchConfiguration meiliConfiguration)
{
_httpClient = httpClient;
_meiliConfiguration = meiliConfiguration;
_logger = logger;
_client = new MeilisearchClient("http://localhost:"+meiliConfiguration.MeiliPort, _apiKey );
_documentCollection = new ObservableCollection<KeyValuePair<string,IDocument>>();
_documentCollection.CollectionChanged += CheckIfNeedDocumentSync;
StartMeilisearch().Wait();
EnsureRepositoryIndexExists().Wait();
_logger.LogTrace("API Key: " + _apiKey);
process = null;
}
/// <summary>Starts the Meilisearch process</summary>
public async Task Start() => await ProcessManager.StartProcess();
/// <summary>Gets the total storage usage across all indexes</summary>
/// <param name="useCompressedSize">If true, returns compressed size for compressed indexes</param>
/// <returns>Total storage usage in bytes</returns>
public long GetTotalStorageUsage(bool useCompressedSize = true)
=> IndexManager.GetTotalStorageUsage(useCompressedSize);
/// <summary>Gets the storage usage for a specific index</summary>
/// <param name="indexName">Name of the index</param>
/// <param name="useCompressedSize">If true, returns compressed size for compressed indexes</param>
/// <returns>Index storage usage in bytes</returns>
public long GetIndexStorageUsage(string indexName, bool useCompressedSize = true)
=> IndexManager.GetIndexStorageUsage(indexName, useCompressedSize);
#region Private
private async Task CompressIndex(string indexName)
{
var indexPath = await GetIndexFilePath(indexName);
if (!Directory.Exists(indexPath))
{
_logger.LogWarning($"Index directory not found at: {indexPath}");
return;
}
/// <summary>Stops the Meilisearch process</summary>
public void Stop() => ProcessManager.StopProcess();
var compressedPath = indexPath + ".zip";
_logger.LogTrace($"Compressing index '{indexName}' to {compressedPath}...");
/// <summary>Checks if the Meilisearch process is running</summary>
/// <returns>True if process is running, false otherwise</returns>
public bool IsRunning() => ProcessManager.IsProcessRunning();
try
{
var size = new DirectoryInfo(indexPath).GetFiles().Sum(f => f.Length);
// Create temp directory to ensure we don't lose data if compression fails
var tempPath = compressedPath + ".temp";
ZipFile.CreateFromDirectory(indexPath, tempPath, CompressionLevel.SmallestSize, false);
/// <summary>Enables or disables an index</summary>
/// <param name="indexName">Name of the index</param>
/// <param name="enabled">True to enable, false to disable</param>
public Task SetIndexEnabled(string indexName, bool enabled)
=> IndexManager.SetIndexEnabledAsync(indexName, enabled);
// If compression succeeded, safely replace old zip (if exists) and remove original directory
if (File.Exists(compressedPath))
{
File.Delete(compressedPath);
}
File.Move(tempPath, compressedPath);
Directory.Delete(indexPath, true);
/// <summary>Gets current resource usage statistics for the Meilisearch process</summary>
/// <returns>Process resource statistics</returns>
public ProcessResourceStats GetResourceUsage() => ProcessManager.GetResourceUsage();
// Update index metadata
var indexBindings = await _client.GetIndexAsync("index_bindings");
var doc = await _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName);
var document = new Models.Index
{
Name = indexName,
IsCompressed = true,
FolderId = doc.FolderId,
CreatedAt = doc.CreatedAt,
SizeBeforeCompression = size,
LastCompressedAt = DateTime.UtcNow
};
await indexBindings.UpdateDocumentsAsync(new List<Models.Index> { document });
_logger.LogInformation($"Successfully compressed index '{indexName}'");
/// <summary>Gets a list of all index names</summary>
/// <returns>List of index names</returns>
public Task<List<string>> GetAllIndexes() => IndexManager.GetAllIndexes();
Directory.CreateDirectory(indexPath);
File.Copy(Path.Combine(AppContext.BaseDirectory,DEFAULT_DATA_FILE_PATH), Path.Combine(indexPath, DEFAULT_DATA_FILE_PATH));
File.Copy(Path.Combine(AppContext.BaseDirectory,DEFAULT_LOCK_FILE_PATH), Path.Combine(indexPath, DEFAULT_LOCK_FILE_PATH));
_logger.LogInformation($"Created placeholder data file for compressed index '{indexName}'");
}
catch (Exception ex)
{
_logger.LogError($"Failed to compress index '{indexName}': {ex.Message}");
throw;
}
Stop();
}
/// <summary>Creates a new index</summary>
/// <typeparam name="T">Document type that implements IDocument</typeparam>
/// <param name="indexName">Name for the new index</param>
public Task CreateIndex<T>(string indexName) where T : IDocument
=> IndexManager.CreateIndexAsync<T>(indexName);
private async Task DecompressIndex(string indexName)
{
var compressedPath = await GetIndexFilePath(indexName) + ".zip";
var extractPath = await GetIndexFilePath(indexName);
if (!File.Exists(compressedPath))
{
_logger.LogWarning($"Compressed index not found at: {compressedPath}");
return;
}
_logger.LogTrace($"Decompressing index '{indexName}' to {extractPath}...");
try
{
if (Directory.Exists(extractPath))
{
Directory.Delete(extractPath, true);
}
// Create temp directory to ensure we don't lose data if decompression fails
var tempPath = extractPath + ".temp";
ZipFile.ExtractToDirectory(compressedPath, tempPath);
// If decompression succeeded, safely move to final location
if (Directory.Exists(extractPath))
{
Directory.Delete(extractPath, true);
}
Directory.Move(tempPath, extractPath);
File.Delete(compressedPath);
// Update index metadata
var indexBindings = await _client.GetIndexAsync("index_bindings");
var doc = await _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName);
var document = new Models.Index
{
Name = indexName,
FolderId = doc.FolderId,
CreatedAt = doc.CreatedAt,
SizeBeforeCompression = null,
IsCompressed = false,
LastCompressedAt = doc.LastCompressedAt
};
await indexBindings.UpdateDocumentsAsync(new List<Models.Index> { document });
_logger.LogInformation($"Successfully decompressed index '{indexName}'");
}
catch (Exception ex)
{
_logger.LogError($"Failed to decompress index '{indexName}': {ex.Message}");
throw;
}
}
private static string FormatBytes(long bytes)
{
string[] sizes = { "B", "KB", "MB", "GB", "TB" };
int order = 0;
double len = bytes;
while (len >= 1024 && order < sizes.Length - 1)
{
order++;
len = len / 1024;
}
return $"{len:0.##} {sizes[order]}";
}
private async Task<string> GetIndexFilePath(string folderId)
{
var doc = await _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(folderId);
return Path.Combine(_indexBasePath, doc.FolderId);
}
private static string GenerateApiKey(int length = 64)
{
if (length <= 0)
{
throw new ArgumentException("Length must be greater than zero.", nameof(length));
}
const string allowedChars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_";
var apiKey = new StringBuilder();
var randomBytes = new byte[length];
using (var rng = RandomNumberGenerator.Create())
{
rng.GetBytes(randomBytes);
}
foreach (var randomByte in randomBytes)
{
apiKey.Append(allowedChars[randomByte % allowedChars.Length]);
}
return apiKey.ToString();
}
private async Task EnsureRepositoryIndexExists()
{
Task.Delay(5000).Wait();
var indexes = _client.GetAllIndexesAsync().Result;
if (indexes.Results.Any(x => x.Uid == "index_bindings"))
{
_logger.LogInformation("index bindings already exists, skipping creation of index.");
return;
}
_logger.LogInformation("Creating index bindings for SDK to track indexs...");
_client.CreateIndexAsync("index_bindings").Wait();
}
private string GetMeilisearchBinaryName()
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
return "meilisearch-windows.exe";
}
if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
{
return RuntimeInformation.ProcessArchitecture == Architecture.Arm64
? "meilisearch-macos-arm"
: "meilisearch-macos-x64";
}
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
{
return RuntimeInformation.ProcessArchitecture == Architecture.Arm64
? "meilisearch-linux-arm"
: "meilisearch-linux-x64";
}
throw new PlatformNotSupportedException("Current platform and architecture combination is not supported");
}
private async Task StartMeilisearch()
{
var binaryName = GetMeilisearchBinaryName();
var binaryPath = Path.Combine(AppContext.BaseDirectory, binaryName);
if (!File.Exists(binaryPath))
{
_logger.LogError($"Meilisearch binary not found at: {binaryPath}");
throw new FileNotFoundException($"Could not find Meilisearch binary: {binaryName}");
}
// Set execute permissions on Unix-like systems
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
try
{
var chmod = Process.Start("chmod", $"+x {binaryPath}");
chmod?.WaitForExit();
}
catch (Exception ex)
{
_logger.LogWarning($"Failed to set execute permissions on binary: {ex.Message}");
}
}
var host = RuntimeInformation.IsOSPlatform(OSPlatform.Windows)
? "localhost"
: "127.0.0.1";
var args = "--http-addr " + host + ":" + _meiliConfiguration.MeiliPort
+ " --env development --db-path "
+ Path.Combine(AppContext.BaseDirectory, "db");
//+ " --master-key " + _apiKey; note: bring back, masterkey not working when compressingi ndexs
var processStartInfo = new ProcessStartInfo
{
FileName = binaryPath,
Arguments = args,
UseShellExecute = false,
RedirectStandardOutput = false,
RedirectStandardError = false,
CreateNoWindow = false,
};
process = new Process { StartInfo = processStartInfo, EnableRaisingEvents = true};
process.Exited += (sender, e) =>
{
_logger.LogWarning("Meilisearch process has exited. Restarting...");
_ = StartMeilisearch(); // Restart the process
};
process.Disposed += (sender, eventArgs) =>
{
_logger.LogWarning("Meilisearch process has exited. Restarting...");
_ = StartMeilisearch(); // Restart the process
};
try
{
process.Start();
await Task.Delay(5000); // Wait for the process to start
_logger.LogInformation($"Started Meilisearch process using binary: {binaryName}");
}
catch (Exception ex)
{
_logger.LogError($"Failed to start Meilisearch: {ex.Message}");
throw;
}
}
private void CheckIfNeedDocumentSync(object? sender, NotifyCollectionChangedEventArgs e)
{
CheckIfNeedDocumentSync(THRESHOLD);
}
private void CheckIfNeedDocumentSync(int? threshold = null)
{
threshold = threshold ?? 0;
if(_documentCollection.Count>=threshold)
{
_logger.LogInformation("Threshold reached, syncing metadata to server.");
SyncDocumentsToServer();
}
}
private void SyncDocumentsToServer()
{
var grouped = _documentCollection.GroupBy(pair => pair.Key)
.ToDictionary(group => group.Key, group => group.Select(pair => pair.Value).ToList());
foreach (var repository in grouped)
{
var repositoryIndex = _client.GetIndexAsync(repository.Key).Result;
var documents = _documentCollection.ToList();
_documentCollection.Clear();
var result = RetryAsync(() => repositoryIndex.AddDocumentsAsync(repository.Value, "id")).Result;
}
}
private async Task<T> RetryAsync<T>(Func<Task<T>> action, int maxRetries = 3, int delayMilliseconds = 1000)
{
int retryCount = 0;
while (true)
{
try
{
return await action();
}
catch (Exception ex)
{
retryCount++;
if (retryCount >= maxRetries)
{
_logger.LogError($"Operation failed after {maxRetries} retries: {ex.Message}");
throw;
}
_logger.LogWarning($"Operation failed, retrying {retryCount}/{maxRetries}...");
await Task.Delay(delayMilliseconds);
}
}
}
private static string[] GetPropertiesInCamelCase<T>()
{
var properties = typeof(T).GetProperties(BindingFlags.Public | BindingFlags.Instance);
return properties
.Select(p => ToCamelCase(p.Name))
.ToArray();
}
private static string ToCamelCase(string input)
{
if (string.IsNullOrEmpty(input) || char.IsLower(input[0]))
{
return input;
}
return char.ToLowerInvariant(input[0]) + input.Substring(1);
}
#endregion
#region Public
public virtual MeilisearchUsageStats GetProcessResourceUsage()
{
if (process == null || process.HasExited)
{
return new MeilisearchUsageStats();
}
try
{
process.Refresh();
// CPU Usage
TimeSpan cpuUsage = process.TotalProcessorTime;
double cpuPercentage = cpuUsage.TotalMilliseconds / (Environment.ProcessorCount * process.TotalProcessorTime.TotalMilliseconds) * 100;
// Memory Usage (in bytes)
long memoryUsage = process.WorkingSet64;
// Disk Usage (in bytes) - reads and writes
long diskRead = process.StartInfo.RedirectStandardOutput ? process.StandardOutput.BaseStream.Length : 0;
long diskWrite = process.StartInfo.RedirectStandardError ? process.StandardError.BaseStream.Length : 0;
return new MeilisearchUsageStats
{
CpuPercentage = Math.Round(cpuPercentage, 2),
MemoryUsageBytes = memoryUsage,
DiskReadBytes = diskRead,
DiskWriteBytes = diskWrite,
ProcessId = process.Id,
ThreadCount = process.Threads.Count
};
}
catch (InvalidOperationException)
{
// Process has exited
return new MeilisearchUsageStats();
}
catch (Exception ex)
{
_logger.LogError($"Error getting process resource usage: {ex.Message}");
return new MeilisearchUsageStats();
}
}
public virtual bool IsMeilisearchRunning()
{
var processName = RuntimeInformation.IsOSPlatform(OSPlatform.Windows)
? "meilisearch-windows"
: "meilisearch";
var processes = Process.GetProcessesByName(processName);
return processes.Any();
}
public virtual async Task SetIndexEnabled(string indexName, bool enabled)
{
_logger.LogTrace($"Updating index '{indexName}' status to {enabled}...");
if(enabled)
{
await DecompressIndex(indexName);
}
else
{
await CompressIndex(indexName);
}
_logger.LogInformation($"Updated index '{indexName}' status to {enabled}.");
}
public virtual void CreateIndex<T>(string indexName) where T : IDocument
{
if(GetAllIndexes().Count>=1000)
{
_logger.LogWarning("Maximum number of indexes reached, cannot create new index.");
return;
}
var indexes = _client.GetAllIndexesAsync().Result;
if (indexes.Results.Any(x => x.Uid == indexName))
{
_logger.LogWarning($"Index {indexName} already exists, skipping creation of index.");
return;
}
var foldersBefore = Directory.GetDirectories(_indexBasePath);
_logger.LogTrace($"Creating index '{indexName}'...");
_client.CreateIndexAsync(indexName).Wait();
Task.Delay(5000).Wait();
var index = _client.GetIndexAsync(indexName).Result;
var test = index.GetFilterableAttributesAsync().Result;
index.UpdateFilterableAttributesAsync(GetPropertiesInCamelCase<T>()).Wait();
_logger.LogInformation($"{indexName} index created!");
var foldersAfter = Directory.GetDirectories(_indexBasePath);
var folder = Path.GetFileName(foldersAfter.Except(foldersBefore).FirstOrDefault());
if (folder != null)
{
_client.GetIndexAsync("index_bindings").Result.AddDocumentsAsync(new List<Models.Index>
{
new()
{
Name = indexName,
CreatedAt = DateTime.UtcNow,
SizeBeforeCompression = null,
FolderId = folder,
IsCompressed = false,
LastCompressedAt = null
}
}, "name").Wait();
}
}
public virtual void DeleteIndex(string indexName)
{
var indexes = _client.GetAllIndexesAsync().Result;
if (indexes.Results.Any(x => x.Uid == indexName)==false)
{
_logger.LogWarning($"Index '{indexName}' does not exist, skipping deletion of index.");
return;
}
_logger.LogTrace($"Deleting index '{indexName}'...");
_client.DeleteIndexAsync(indexName).Wait();
_client.GetIndexAsync("index_bindings").Result.DeleteOneDocumentAsync(indexName).Wait();
_logger.LogInformation($"Deleted index '{indexName}'!");
}
/// <summary>Deletes an existing index</summary>
/// <param name="indexName">Name of the index to delete</param>
public Task DeleteIndex(string indexName) => IndexManager.DeleteIndexAsync(indexName);
/// <summary>Adds a document to an index</summary>
/// <param name="repositoryId">ID of the target repository/index</param>
/// <param name="document">Document to add</param>
/// <param name="autoCommit">If true, immediately syncs to server</param>
public void AddDocument(string repositoryId, IDocument document, bool autoCommit = false)
{
_logger.LogTrace($"Adding document '{document.Id}' to repository '{repositoryId}'...");
_documentCollection.Add(new KeyValuePair<string, IDocument>(repositoryId, document));
_logger.LogInformation($"Document {document.Id} added to collection.");
if (autoCommit)
{
SyncDocumentsToServer();
}
}
public virtual List<string> GetAllIndexes()
{
_logger.LogTrace("Fetching all indexes from Meilisearch server created with the SDK...");
var result = _client.GetAllIndexesAsync().Result.Results.Select(x => x.Uid).Where(x=>x!="index_bindings").ToList();
_logger.LogInformation($"Fetched {result.Count} indexes from Meilisearch server.");
return result;
}
public virtual long GetIndexStorageUsage(string indexName, bool useCompressedSize = true)
{
var doc = _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName).Result;
if (doc.IsCompressed)
{
if(!useCompressedSize)
return doc.SizeBeforeCompression ?? 0;
var indexPath = GetIndexFilePath(indexName).Result+".zip";
if (!File.Exists(indexPath))
{
_logger.LogWarning($"Compressed index not found at: {indexPath}");
return 0;
}
return new FileInfo(indexPath).Length;
}
var path = Path.Combine(_indexBasePath, doc.FolderId);
if (!Directory.Exists(path))
{
_logger.LogWarning($"Index directory not found at: {path}");
return 0;
}
return new DirectoryInfo(path).GetFiles().Sum(f => f.Length);
}
public virtual long GetTotalStorageUsage(bool useCompressedSize = true)
{
var result = _client.GetIndexAsync("index_bindings").Result.GetDocumentsAsync<Models.Index>(new DocumentsQuery(){Limit = 1000}).Result;
var total = 0L;
foreach (var index in result.Results)
{
var indexPath = GetIndexFilePath(index.Name).Result+".zip";
if (index.IsCompressed)
if (useCompressedSize)
total += new FileInfo(indexPath).Length;
else
total += index.SizeBeforeCompression ?? 0;
else
total += index.IsCompressed ? index.SizeBeforeCompression ?? 0 : new DirectoryInfo(Path.Combine(_indexBasePath, index.FolderId)).GetFiles().Sum(f => f.Length);
}
return total;
}
public async void Start()
{
await StartMeilisearch();
}
public virtual void Stop()
{
process?.Kill();
}
=> DocumentManager.AddDocument(repositoryId, document, autoCommit);
/// <summary>
/// Disposes the MeiliSearchService instance and its associated process manager
/// </summary>
public void Dispose()
{
CheckIfNeedDocumentSync();
Stop();
_httpClient.Dispose();
ProcessManager.Dispose();
}
#endregion
}

View File

@ -1,16 +1,51 @@
namespace meilisearch.NET.Models;
/// <summary>
/// Represents a Meilisearch index with its metadata and compression state.
/// </summary>
public class Index
{
/// <summary>
/// Gets or sets the name of the index.
/// </summary>
public string Name { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the creation timestamp of the index in UTC.
/// </summary>
public DateTime CreatedAt { get; set; } = DateTime.UtcNow;
/// <summary>
/// Gets or sets the unique folder identifier where the index data is stored.
/// </summary>
public string FolderId { get; set; } = string.Empty;
/// <summary>
/// Gets or sets whether the index is currently compressed.
/// </summary>
public bool IsCompressed { get; set; } = false;
/// <summary>
/// Gets or sets the timestamp when the index was last compressed.
/// </summary>
public DateTime? LastCompressedAt { get; set; }
/// <summary>
/// Gets or sets the size of the index in bytes before compression.
/// </summary>
public long? SizeBeforeCompression { get; set; } = 0;
/// <summary>
/// Initializes a new instance of the Index class.
/// </summary>
public Index() { }
/// <summary>
/// Initializes a new instance of the Index class with specified parameters.
/// </summary>
/// <param name="name">The name of the index.</param>
/// <param name="folderId">The folder identifier where index data is stored.</param>
/// <param name="createdAt">The creation timestamp of the index.</param>
public Index(string name, string folderId, DateTime createdAt)
{
Name = name;

View File

@ -1,11 +1,37 @@
namespace meilisearch.NET.Models;
/// <summary>
/// Represents resource usage statistics for a Meilisearch process
/// </summary>
public class MeilisearchUsageStats
{
/// <summary>
/// CPU usage as a percentage (0-100)
/// </summary>
public double CpuPercentage { get; set; }
/// <summary>
/// Memory usage in bytes
/// </summary>
public long MemoryUsageBytes { get; set; }
/// <summary>
/// Total bytes read from disk
/// </summary>
public long DiskReadBytes { get; set; }
/// <summary>
/// Total bytes written to disk
/// </summary>
public long DiskWriteBytes { get; set; }
/// <summary>
/// Number of threads used by the process
/// </summary>
public int ThreadCount { get; set; }
/// <summary>
/// Process identifier
/// </summary>
public int ProcessId { get; set; }
}

View File

@ -0,0 +1,37 @@
namespace meilisearch.NET.Models;
/// <summary>
/// Represents resource usage statistics for a process at a point in time.
/// </summary>
public record ProcessResourceStats
{
/// <summary>
/// Gets the CPU usage percentage of the process.
/// </summary>
public double CpuPercentage { get; init; }
/// <summary>
/// Gets the memory usage in bytes of the process.
/// </summary>
public long MemoryUsageBytes { get; init; }
/// <summary>
/// Gets the total number of bytes read from disk by the process.
/// </summary>
public long DiskReadBytes { get; init; }
/// <summary>
/// Gets the total number of bytes written to disk by the process.
/// </summary>
public long DiskWriteBytes { get; init; }
/// <summary>
/// Gets the process identifier.
/// </summary>
public int ProcessId { get; init; }
/// <summary>
/// Gets the number of threads currently running in the process.
/// </summary>
public int ThreadCount { get; init; }
}

View File

@ -0,0 +1,126 @@
using System.Collections.ObjectModel;
using System.Collections.Specialized;
using Meilisearch;
using meilisearch.NET.Exceptions;
using meilisearch.NET.Interfaces;
using meilisearch.NET.Services.ProcessManagement;
using Microsoft.Extensions.Logging;
namespace meilisearch.NET.Services.DocumentManagement;
public class DocumentManager:IDocumentManager
{
private readonly ILogger<DocumentManager> _logger;
private readonly MeiliSearchProcessManager _meiliSearchProcessManager;
private readonly MeilisearchClient _client;
private const int THRESHOLD = 100;
private ObservableCollection<KeyValuePair<string,IDocument>> _documentCollection;
public DocumentManager(MeilisearchClient client, ILogger<DocumentManager> logger, MeiliSearchProcessManager meiliSearchProcessManager)
{
_meiliSearchProcessManager = meiliSearchProcessManager;
_logger = logger;
_client = client;
_documentCollection = new ObservableCollection<KeyValuePair<string,IDocument>>();
_documentCollection.CollectionChanged += CheckIfNeedDocumentSync;
}
public async Task AddDocumentAsync(string repositoryId, IDocument document, bool autoCommit = false)
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
_logger.LogTrace($"Adding document '{document.Id}' to repository '{repositoryId}'...");
_documentCollection.Add(new KeyValuePair<string, IDocument>(repositoryId, document));
_logger.LogInformation($"Document {document.Id} added to collection.");
if (autoCommit)
{
await SyncDocumentsToServerAsync();
}
}
public void AddDocument(string repositoryId, IDocument document, bool autoCommit = false)
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
_logger.LogTrace($"Adding document '{document.Id}' to repository '{repositoryId}'...");
_documentCollection.Add(new KeyValuePair<string, IDocument>(repositoryId, document));
_logger.LogInformation($"Document {document.Id} added to collection.");
if (autoCommit)
{
SyncDocumentsToServer();
}
}
public void SyncDocumentsToServer()
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
var grouped = _documentCollection.GroupBy(pair => pair.Key)
.ToDictionary(group => group.Key, group => group.Select(pair => pair.Value).ToList());
foreach (var repository in grouped)
{
var repositoryIndex = _client.GetIndexAsync(repository.Key).Result;
var documents = _documentCollection.ToList();
_documentCollection.Clear();
var result = RetryAsync(() => repositoryIndex.AddDocumentsAsync(repository.Value, "id")).Result;
}
}
public async Task SyncDocumentsToServerAsync()
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
var grouped = _documentCollection.GroupBy(pair => pair.Key)
.ToDictionary(group => group.Key, group => group.Select(pair => pair.Value).ToList());
foreach (var repository in grouped)
{
var repositoryIndex = await _client.GetIndexAsync(repository.Key);
var documents = _documentCollection.ToList();
_documentCollection.Clear();
var result = RetryAsync(() => repositoryIndex.AddDocumentsAsync(repository.Value, "id")).Result;
}
}
#region Private Methods
private void CheckIfNeedDocumentSync(object? sender, NotifyCollectionChangedEventArgs e)
{
CheckIfNeedDocumentSync(THRESHOLD);
}
private void CheckIfNeedDocumentSync(int? threshold = null)
{
threshold = threshold ?? 0;
if(_documentCollection.Count>=threshold)
{
_logger.LogInformation("Threshold reached, syncing metadata to server.");
SyncDocumentsToServer();
}
}
private async Task<T> RetryAsync<T>(Func<Task<T>> action, int maxRetries = 3, int delayMilliseconds = 1000)
{
int retryCount = 0;
while (true)
{
try
{
return await action();
}
catch (Exception ex)
{
retryCount++;
if (retryCount >= maxRetries)
{
_logger.LogError($"Operation failed after {maxRetries} retries: {ex.Message}");
throw;
}
_logger.LogWarning($"Operation failed, retrying {retryCount}/{maxRetries}...");
await Task.Delay(delayMilliseconds);
}
}
}
#endregion
}

View File

@ -0,0 +1,35 @@
using meilisearch.NET.Interfaces;
namespace meilisearch.NET.Services.DocumentManagement;
/// <summary>
/// Manages document operations for Meilisearch indexes.
/// </summary>
public interface IDocumentManager
{
/// <summary>
/// Adds a document to the specified repository.
/// </summary>
/// <param name="repositoryId">The ID of the repository to add the document to.</param>
/// <param name="document">The document to add.</param>
/// <param name="autoCommit">If true, automatically syncs the document to the server.</param>
void AddDocument(string repositoryId, IDocument document, bool autoCommit = false);
/// <summary>
/// Synchronizes all pending documents to the Meilisearch server.
/// </summary>
void SyncDocumentsToServer();
/// <summary>
/// Asynchronously adds a document to the specified repository.
/// </summary>
/// <param name="repositoryId">The ID of the repository to add the document to.</param>
/// <param name="document">The document to add.</param>
/// <param name="autoCommit">If true, automatically syncs the document to the server.</param>
Task AddDocumentAsync(string repositoryId, IDocument document, bool autoCommit = false);
/// <summary>
/// Asynchronously synchronizes all pending documents to the Meilisearch server.
/// </summary>
Task SyncDocumentsToServerAsync();
}

View File

@ -0,0 +1,85 @@
using meilisearch.NET.Interfaces;
namespace meilisearch.NET.Services.IndexManagement;
/// <summary>
/// Manages Meilisearch index operations including creation, deletion, and storage management.
/// </summary>
public interface IIndexManager
{
/// <summary>
/// Retrieves a list of all index names from the Meilisearch server.
/// </summary>
/// <returns>A list of index names.</returns>
Task<List<string>> GetAllIndexes();
/// <summary>
/// Asynchronously creates a new index with the specified name for the given document type.
/// </summary>
/// <typeparam name="T">The document type that implements IDocument.</typeparam>
/// <param name="indexName">The name of the index to create.</param>
Task CreateIndexAsync<T>(string indexName) where T : IDocument;
/// <summary>
/// Synchronously creates a new index with the specified name for the given document type.
/// </summary>
/// <typeparam name="T">The document type that implements IDocument.</typeparam>
/// <param name="indexName">The name of the index to create.</param>
void CreateIndex<T>(string indexName) where T : IDocument;
/// <summary>
/// Asynchronously deletes the specified index.
/// </summary>
/// <param name="indexName">The name of the index to delete.</param>
Task DeleteIndexAsync(string indexName);
/// <summary>
/// Synchronously deletes the specified index.
/// </summary>
/// <param name="indexName">The name of the index to delete.</param>
void DeleteIndex(string indexName);
/// <summary>
/// Asynchronously enables or disables an index by compressing or decompressing it.
/// </summary>
/// <param name="indexName">The name of the index to modify.</param>
/// <param name="enabled">True to enable (decompress), false to disable (compress).</param>
Task SetIndexEnabledAsync(string indexName, bool enabled);
/// <summary>
/// Synchronously enables or disables an index by compressing or decompressing it.
/// </summary>
/// <param name="indexName">The name of the index to modify.</param>
/// <param name="enabled">True to enable (decompress), false to disable (compress).</param>
void SetIndexEnabled(string indexName, bool enabled);
/// <summary>
/// Asynchronously gets the storage usage of a specific index.
/// </summary>
/// <param name="indexName">The name of the index.</param>
/// <param name="useCompressedSize">If true, returns compressed size for compressed indexes.</param>
/// <returns>The storage usage in bytes.</returns>
Task<long> GetIndexStorageUsageAsync(string indexName, bool useCompressedSize = true);
/// <summary>
/// Asynchronously gets the total storage usage of all indexes.
/// </summary>
/// <param name="useCompressedSize">If true, uses compressed sizes for compressed indexes.</param>
/// <returns>The total storage usage in bytes.</returns>
Task<long> GetTotalStorageUsageAsync(bool useCompressedSize = true);
/// <summary>
/// Synchronously gets the storage usage of a specific index.
/// </summary>
/// <param name="indexName">The name of the index.</param>
/// <param name="useCompressedSize">If true, returns compressed size for compressed indexes.</param>
/// <returns>The storage usage in bytes.</returns>
long GetIndexStorageUsage(string indexName, bool useCompressedSize = true);
/// <summary>
/// Synchronously gets the total storage usage of all indexes.
/// </summary>
/// <param name="useCompressedSize">If true, uses compressed sizes for compressed indexes.</param>
/// <returns>The total storage usage in bytes.</returns>
long GetTotalStorageUsage(bool useCompressedSize = true);
}

View File

@ -0,0 +1,401 @@
using System.IO.Compression;
using System.Reflection;
using Meilisearch;
using meilisearch.NET.Exceptions;
using meilisearch.NET.Interfaces;
using meilisearch.NET.Services.ProcessManagement;
using Meilisearch.QueryParameters;
using Microsoft.Extensions.Logging;
namespace meilisearch.NET.Services.IndexManagement;
public class IndexManager:IIndexManager
{
private const string DefaultDataFilePath = "data.mdb";
private const string DefaultLockFilePath = "lock.mdb";
private readonly string _indexBasePath = Path.Combine(AppContext.BaseDirectory, "db", "indexes" );
private readonly ILogger<IndexManager> _logger;
private readonly MeilisearchClient _client;
private readonly MeiliSearchProcessManager _meiliSearchProcessManager;
public IndexManager(ILogger<IndexManager> logger, MeilisearchClient client, MeiliSearchProcessManager meiliSearchProcessManager)
{
_meiliSearchProcessManager = meiliSearchProcessManager;
_client = client;
_logger = logger;
}
public async Task<List<string>> GetAllIndexes()
{
_logger.LogTrace("Fetching all indexes from Meilisearch server created with the SDK...");
var result = _client.GetAllIndexesAsync().Result.Results.Select(x => x.Uid).Where(x=>x!="index_bindings").ToList();
_logger.LogInformation($"Fetched {result.Count} indexes from Meilisearch server.");
return result;
}
public void CreateIndex<T>(string indexName) where T : IDocument
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
var indexes = GetAllIndexes().Result;
if(indexes.Count>=1000)
throw new IndexLimitReachedException();
if (indexes.Any(x => x == indexName))
throw new IndexAlreadyExistsException(indexName);
var foldersBefore = Directory.GetDirectories(_indexBasePath);
_logger.LogTrace($"Creating index '{indexName}'...");
_client.CreateIndexAsync(indexName).Wait();
Task.Delay(5000).Wait();
var index = _client.GetIndexAsync(indexName).Result;
var test = index.GetFilterableAttributesAsync().Result;
index.UpdateFilterableAttributesAsync(GetPropertiesInCamelCase<T>()).Wait();
_logger.LogInformation($"{indexName} index created!");
var foldersAfter = Directory.GetDirectories(_indexBasePath);
var folder = Path.GetFileName(foldersAfter.Except(foldersBefore).FirstOrDefault());
if (folder != null)
{
_client.GetIndexAsync("index_bindings").Result.AddDocumentsAsync(new List<Models.Index>
{
new()
{
Name = indexName,
CreatedAt = DateTime.UtcNow,
SizeBeforeCompression = null,
FolderId = folder,
IsCompressed = false,
LastCompressedAt = null
}
}, "name").Wait();
}
}
public async Task CreateIndexAsync<T>(string indexName) where T : IDocument
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
var indexes = await GetAllIndexes();
if(indexes.Count>=1000)
throw new IndexLimitReachedException();
if (indexes.Any(x => x == indexName))
throw new IndexAlreadyExistsException(indexName);
var foldersBefore = Directory.GetDirectories(_indexBasePath);
_logger.LogTrace($"Creating index '{indexName}'...");
_client.CreateIndexAsync(indexName).Wait();
Task.Delay(5000).Wait();
var index = await _client.GetIndexAsync(indexName);
index.UpdateFilterableAttributesAsync(GetPropertiesInCamelCase<T>()).Wait();
_logger.LogInformation($"{indexName} index created!");
var foldersAfter = Directory.GetDirectories(_indexBasePath);
var folder = Path.GetFileName(foldersAfter.Except(foldersBefore).FirstOrDefault());
if (folder != null)
{
_client.GetIndexAsync("index_bindings").Result.AddDocumentsAsync(new List<Models.Index>
{
new()
{
Name = indexName,
CreatedAt = DateTime.UtcNow,
SizeBeforeCompression = null,
FolderId = folder,
IsCompressed = false,
LastCompressedAt = null
}
}, "name").Wait();
}
}
public void DeleteIndex(string indexName)
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
var indexes = _client.GetAllIndexesAsync().Result;
if (indexes.Results.Any(x => x.Uid == indexName)==false)
{
throw new IndexNotFoundException(indexName);
}
_logger.LogTrace($"Deleting index '{indexName}'...");
_client.DeleteIndexAsync(indexName).Wait();
_client.GetIndexAsync("index_bindings").Result.DeleteOneDocumentAsync(indexName).Wait();
_logger.LogInformation($"Deleted index '{indexName}'!");
}
public async Task DeleteIndexAsync(string indexName)
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
var indexes = _client.GetAllIndexesAsync().Result;
if (indexes.Results.Any(x => x.Uid == indexName)==false)
{
throw new IndexNotFoundException(indexName);
}
_logger.LogTrace($"Deleting index '{indexName}'...");
await _client.DeleteIndexAsync(indexName);
await _client.GetIndexAsync("index_bindings").Result.DeleteOneDocumentAsync(indexName);
_logger.LogInformation($"Deleted index '{indexName}'!");
}
public void SetIndexEnabled(string indexName, bool enabled)
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
_logger.LogTrace($"Updating index '{indexName}' status to {enabled}...");
if(enabled)
{
DecompressIndex(indexName).Wait();
}
else
{
CompressIndex(indexName).Wait();
}
_logger.LogInformation($"Updated index '{indexName}' status to {enabled}.");
}
public async Task SetIndexEnabledAsync(string indexName, bool enabled)
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
_logger.LogTrace($"Updating index '{indexName}' status to {enabled}...");
if(enabled)
{
await DecompressIndex(indexName);
}
else
{
await CompressIndex(indexName);
}
_logger.LogInformation($"Updated index '{indexName}' status to {enabled}.");
}
public long GetIndexStorageUsage(string indexName, bool useCompressedSize = true)
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
var doc = _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName).Result;
if (doc.IsCompressed)
{
if(!useCompressedSize)
return doc.SizeBeforeCompression ?? 0;
var indexPath = GetIndexFilePath(indexName).Result+".zip";
if (!File.Exists(indexPath))
{
throw new FileNotFoundException($"Compressed index not found at: {indexPath}");
}
return new FileInfo(indexPath).Length;
}
var path = Path.Combine(_indexBasePath, doc.FolderId);
if (!Directory.Exists(path))
{
throw new DirectoryNotFoundException($"Index directory not found at: {path}");
}
return new DirectoryInfo(path).GetFiles().Sum(f => f.Length);
}
public long GetTotalStorageUsage(bool useCompressedSize = true)
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
var result = _client.GetIndexAsync("index_bindings").Result.GetDocumentsAsync<Models.Index>(new DocumentsQuery(){Limit = 1000}).Result;
var total = 0L;
foreach (var index in result.Results)
{
var indexPath = GetIndexFilePath(index.Name).Result+".zip";
if (index.IsCompressed)
if (useCompressedSize)
total += new FileInfo(indexPath).Length;
else
total += index.SizeBeforeCompression ?? 0;
else
total += index.IsCompressed ? index.SizeBeforeCompression ?? 0 : new DirectoryInfo(Path.Combine(_indexBasePath, index.FolderId)).GetFiles().Sum(f => f.Length);
}
return total;
}
public async Task<long> GetIndexStorageUsageAsync(string indexName, bool useCompressedSize = true)
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
var doc = _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName).Result;
if (doc.IsCompressed)
{
if(!useCompressedSize)
return doc.SizeBeforeCompression ?? 0;
var indexPath = await GetIndexFilePath(indexName)+".zip";
if (!File.Exists(indexPath))
{
throw new FileNotFoundException($"Compressed index not found at: {indexPath}");
}
return new FileInfo(indexPath).Length;
}
var path = Path.Combine(_indexBasePath, doc.FolderId);
if (!Directory.Exists(path))
{
throw new DirectoryNotFoundException($"Index directory not found at: {path}");
}
return new DirectoryInfo(path).GetFiles().Sum(f => f.Length);
}
public async Task<long> GetTotalStorageUsageAsync(bool useCompressedSize = true)
{
if (!_meiliSearchProcessManager.IsProcessRunning())
throw new ProcessNotRunningException();
var result = _client.GetIndexAsync("index_bindings").Result.GetDocumentsAsync<Models.Index>(new DocumentsQuery(){Limit = 1000}).Result;
var total = 0L;
foreach (var index in result.Results)
{
var indexPath = await GetIndexFilePath(index.Name)+".zip";
if (index.IsCompressed)
if (useCompressedSize)
total += new FileInfo(indexPath).Length;
else
total += index.SizeBeforeCompression ?? 0;
else
total += index.IsCompressed ? index.SizeBeforeCompression ?? 0 : new DirectoryInfo(Path.Combine(_indexBasePath, index.FolderId)).GetFiles().Sum(f => f.Length);
}
return total;
}
#region Private Methods
private static string[] GetPropertiesInCamelCase<T>()
{
var properties = typeof(T).GetProperties(BindingFlags.Public | BindingFlags.Instance);
return properties
.Select(p => ToCamelCase(p.Name))
.ToArray();
}
private static string ToCamelCase(string input)
{
if (string.IsNullOrEmpty(input) || char.IsLower(input[0]))
{
return input;
}
return char.ToLowerInvariant(input[0]) + input.Substring(1);
}
private async Task<string> GetIndexFilePath(string folderId)
{
var doc = await _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(folderId);
return Path.Combine(_indexBasePath, doc.FolderId);
}
private async Task CompressIndex(string indexName)
{
var indexPath = await GetIndexFilePath(indexName);
if (!Directory.Exists(indexPath))
{
throw new DirectoryNotFoundException($"Index directory not found at: {indexPath}");
}
var compressedPath = indexPath + ".zip";
_logger.LogTrace($"Compressing index '{indexName}' to {compressedPath}...");
try
{
var size = new DirectoryInfo(indexPath).GetFiles().Sum(f => f.Length);
// Create temp directory to ensure we don't lose data if compression fails
var tempPath = compressedPath + ".temp";
ZipFile.CreateFromDirectory(indexPath, tempPath, CompressionLevel.SmallestSize, false);
// If compression succeeded, safely replace old zip (if exists) and remove original directory
if (File.Exists(compressedPath))
{
File.Delete(compressedPath);
}
File.Move(tempPath, compressedPath);
Directory.Delete(indexPath, true);
// Update index metadata
var indexBindings = await _client.GetIndexAsync("index_bindings");
var doc = await _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName);
var document = new Models.Index
{
Name = indexName,
IsCompressed = true,
FolderId = doc.FolderId,
CreatedAt = doc.CreatedAt,
SizeBeforeCompression = size,
LastCompressedAt = DateTime.UtcNow
};
await indexBindings.UpdateDocumentsAsync(new List<Models.Index> { document });
_logger.LogInformation($"Successfully compressed index '{indexName}'");
Directory.CreateDirectory(indexPath);
File.Copy(Path.Combine(AppContext.BaseDirectory,DefaultDataFilePath), Path.Combine(indexPath, DefaultDataFilePath));
File.Copy(Path.Combine(AppContext.BaseDirectory,DefaultLockFilePath), Path.Combine(indexPath, DefaultLockFilePath));
_logger.LogInformation($"Created placeholder data file for compressed index '{indexName}'");
}
catch (Exception ex)
{
throw new IndexCompressionException(indexName, "compress", ex);
}
_meiliSearchProcessManager.StopProcess();
}
private async Task DecompressIndex(string indexName)
{
var compressedPath = await GetIndexFilePath(indexName) + ".zip";
var extractPath = await GetIndexFilePath(indexName);
if (!File.Exists(compressedPath))
{
throw new FileNotFoundException($"Compressed index not found at: {compressedPath}");
}
_logger.LogTrace($"Decompressing index '{indexName}' to {extractPath}...");
try
{
if (Directory.Exists(extractPath))
{
Directory.Delete(extractPath, true);
}
// Create temp directory to ensure we don't lose data if decompression fails
var tempPath = extractPath + ".temp";
ZipFile.ExtractToDirectory(compressedPath, tempPath);
// If decompression succeeded, safely move to final location
if (Directory.Exists(extractPath))
{
Directory.Delete(extractPath, true);
}
Directory.Move(tempPath, extractPath);
File.Delete(compressedPath);
// Update index metadata
var indexBindings = await _client.GetIndexAsync("index_bindings");
var doc = await _client.GetIndexAsync("index_bindings").Result.GetDocumentAsync<Models.Index>(indexName);
var document = new Models.Index
{
Name = indexName,
FolderId = doc.FolderId,
CreatedAt = doc.CreatedAt,
SizeBeforeCompression = null,
IsCompressed = false,
LastCompressedAt = doc.LastCompressedAt
};
await indexBindings.UpdateDocumentsAsync(new List<Models.Index> { document });
_logger.LogInformation($"Successfully decompressed index '{indexName}'");
}
catch (Exception ex)
{
throw new IndexCompressionException(indexName, "decompress", ex);
}
}
#endregion
}

View File

@ -0,0 +1,97 @@
using System.Diagnostics;
using meilisearch.NET.Exceptions;
using meilisearch.NET.Models;
using Microsoft.Extensions.Logging;
namespace meilisearch.NET.Services.ProcessManagement;
public abstract class BaseProcessManager : IProcessManager
{
protected Process? Process;
protected readonly ILogger Logger;
protected BaseProcessManager(ILogger logger)
{
Logger = logger;
}
protected abstract ProcessStartInfo CreateProcessStartInfo();
protected abstract string GetProcessName();
public virtual async Task StartProcess()
{
var processStartInfo = CreateProcessStartInfo();
Process = new Process { StartInfo = processStartInfo, EnableRaisingEvents = true };
Process.Exited += (sender, e) =>
{
Logger.LogWarning($"{GetProcessName()} process has exited. Restarting...");
_ = StartProcess();
};
try
{
Process.Start();
await Task.Delay(5000); // Wait for process to start
Logger.LogInformation($"Started {GetProcessName()} process");
}
catch (Exception ex)
{
throw new ProcessStartException(ex,ex.Message);
}
}
public virtual void StopProcess()
{
try
{
Process?.Kill();
}
catch (Exception ex)
{
Logger.LogError($"Error stopping {GetProcessName()} process: {ex.Message}", ex);
}
}
public virtual bool IsProcessRunning()
{
return Process is { HasExited: false };
}
public virtual ProcessResourceStats GetResourceUsage()
{
if (Process == null || Process.HasExited)
{
return new ProcessResourceStats();
}
try
{
Process.Refresh();
TimeSpan cpuUsage = Process.TotalProcessorTime;
double cpuPercentage = cpuUsage.TotalMilliseconds /
(Environment.ProcessorCount * Process.TotalProcessorTime.TotalMilliseconds) * 100;
return new ProcessResourceStats
{
CpuPercentage = Math.Round(cpuPercentage, 2),
MemoryUsageBytes = Process.WorkingSet64,
DiskReadBytes = Process.StartInfo.RedirectStandardOutput ? Process.StandardOutput.BaseStream.Length : 0,
DiskWriteBytes = Process.StartInfo.RedirectStandardError ? Process.StandardError.BaseStream.Length : 0,
ProcessId = Process.Id,
ThreadCount = Process.Threads.Count
};
}
catch (Exception ex)
{
Logger.LogError($"Error getting process resource usage: {ex.Message}");
return new ProcessResourceStats();
}
}
public void Dispose()
{
StopProcess();
Process?.Dispose();
}
}

View File

@ -0,0 +1,33 @@
using System.Diagnostics;
using meilisearch.NET.Models;
namespace meilisearch.NET.Services.ProcessManagement;
/// <summary>
/// Defines the contract for managing a process lifecycle and monitoring its resources.
/// </summary>
public interface IProcessManager : IDisposable
{
/// <summary>
/// Starts the managed process asynchronously.
/// </summary>
/// <returns>A task representing the asynchronous operation.</returns>
Task StartProcess();
/// <summary>
/// Stops the managed process.
/// </summary>
void StopProcess();
/// <summary>
/// Checks if the managed process is currently running.
/// </summary>
/// <returns>True if the process is running, false otherwise.</returns>
bool IsProcessRunning();
/// <summary>
/// Retrieves current resource usage statistics for the managed process.
/// </summary>
/// <returns>A ProcessResourceStats object containing resource usage information.</returns>
ProcessResourceStats GetResourceUsage();
}

View File

@ -0,0 +1,83 @@
using System.Diagnostics;
using System.Runtime.InteropServices;
using Microsoft.Extensions.Logging;
using meilisearch.NET.Configurations;
namespace meilisearch.NET.Services.ProcessManagement;
public class MeiliSearchProcessManager : BaseProcessManager
{
private readonly MeiliSearchConfiguration _configuration;
private readonly string _binaryPath;
public MeiliSearchProcessManager(
ILogger<MeiliSearchProcessManager> logger,
MeiliSearchConfiguration configuration) : base(logger)
{
_configuration = configuration;
_binaryPath = Path.Combine(AppContext.BaseDirectory, GetMeilisearchBinaryName());
}
protected override string GetProcessName() => "Meilisearch";
protected override ProcessStartInfo CreateProcessStartInfo()
{
if (!File.Exists(_binaryPath))
{
throw new FileNotFoundException($"Could not find Meilisearch binary: {_binaryPath}");
}
SetExecutePermissionsIfNeeded();
var host = RuntimeInformation.IsOSPlatform(OSPlatform.Windows)
? "localhost"
: "127.0.0.1";
var args = $"--http-addr {host}:{_configuration.MeiliPort} " +
"--env development " +
$"--db-path {Path.Combine(AppContext.BaseDirectory, "db")}";
return new ProcessStartInfo
{
FileName = _binaryPath,
Arguments = args,
UseShellExecute = false,
RedirectStandardOutput = false,
RedirectStandardError = false,
CreateNoWindow = false,
};
}
private string GetMeilisearchBinaryName()
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
return "meilisearch-windows.exe";
if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
return RuntimeInformation.ProcessArchitecture == Architecture.Arm64
? "meilisearch-macos-arm"
: "meilisearch-macos-x64";
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
return RuntimeInformation.ProcessArchitecture == Architecture.Arm64
? "meilisearch-linux-arm"
: "meilisearch-linux-x64";
throw new PlatformNotSupportedException("Current platform and architecture combination is not supported");
}
private void SetExecutePermissionsIfNeeded()
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) return;
try
{
var chmod = Process.Start("chmod", $"+x {_binaryPath}");
chmod?.WaitForExit();
}
catch (Exception ex)
{
Logger.LogWarning($"Failed to set execute permissions on binary: {ex.Message}");
}
}
}