Channel-based work orchestration library with autoscaling, resilience, health checks, and OpenTelemetry support.
$ dotnet add package LevelUp.Bifrost.CoreA high-performance, production-ready Channel-based work orchestration library for .NET 10. Provides bounded work queues, worker pool management, autoscaling, resilience, and comprehensive observability.
ValueTask-based enqueue with no per-item allocations| Package | Description | NuGet |
|---|---|---|
LevelUp.Bifrost.Core | Core abstractions with zero dependencies | - |
LevelUp.Bifrost | Main implementation with Channel-based orchestration | - |
LevelUp.Bifrost.HealthChecks | ASP.NET Core health check integration | - |
LevelUp.Bifrost.OpenTelemetry | OpenTelemetry metrics support | - |
LevelUp.Bifrost.Resilience | Polly resilience integration | - |
dotnet add package LevelUp.Bifrost
dotnet add package LevelUp.Bifrost.HealthChecks # optional
dotnet add package LevelUp.Bifrost.OpenTelemetry # optional
dotnet add package LevelUp.Bifrost.Resilience # optional
// 1. Define your work type
public record EmailJob(string To, string Subject, string Body);
// 2. Implement a handler
public class EmailHandler : IWorkHandler<EmailJob>
{
private readonly IEmailService _emailService;
public EmailHandler(IEmailService emailService)
{
_emailService = emailService;
}
public async ValueTask HandleAsync(EmailJob job, CancellationToken ct)
{
await _emailService.SendAsync(job.To, job.Subject, job.Body, ct);
}
}
// 3. Register services
services.AddWorkOrchestrator<EmailJob>(options =>
{
options.Capacity = 128;
options.WorkerCount = 2;
})
.WithHandler<EmailHandler>();
// 4. Enqueue work
public class MyService
{
private readonly IWorkOrchestrator<EmailJob> _orchestrator;
public MyService(IWorkOrchestrator<EmailJob> orchestrator)
{
_orchestrator = orchestrator;
}
public async Task SendWelcomeEmailAsync(string email)
{
await _orchestrator.EnqueueAsync(
new EmailJob(email, "Welcome!", "Thanks for signing up!"));
}
}
services.AddWorkOrchestrator<EmailJob>(options =>
{
options.Capacity = 128;
options.WorkerCount = 2;
})
.WithHandler<EmailHandler>()
.WithAutoscaling(scaling =>
{
scaling.MinWorkers = 1;
scaling.MaxWorkers = 16;
scaling.HighWatermark = 0.8;
scaling.LowWatermark = 0.3;
scaling.CooldownPeriod = TimeSpan.FromSeconds(30);
});
services.AddWorkOrchestrator<EmailJob>(/* ... */)
.WithHealthChecks();
// In your health check endpoint configuration
app.MapHealthChecks("/health");
services.AddWorkOrchestrator<EmailJob>(/* ... */)
.WithOpenTelemetry();
services.AddWorkOrchestrator<EmailJob>(/* ... */)
.WithResilience(resilience =>
{
resilience.RetryCount = 3;
resilience.Timeout = TimeSpan.FromSeconds(30);
resilience.UseExponentialBackoff = true;
});
The main interface for enqueuing work:
public interface IWorkOrchestrator<TWork> : IAsyncDisposable
{
// Core operations - zero-allocation hot path
ValueTask EnqueueAsync(TWork work, CancellationToken ct = default);
bool TryEnqueue(TWork work);
// Observability (non-allocating property access)
int PendingCount { get; }
int ActiveWorkers { get; }
int Capacity { get; }
// Lifecycle
Task StopAsync(CancellationToken ct = default);
// Escape hatch for advanced scenarios
ChannelWriter<TWork> Writer { get; }
}
Implement this interface to handle work items:
public interface IWorkHandler<TWork>
{
ValueTask HandleAsync(TWork work, CancellationToken ct);
}
Subscribe to orchestrator events:
// Enable event streaming
services.AddWorkOrchestrator<EmailJob>(/* ... */)
.WithEventStream();
// Subscribe to events
var eventOrchestrator = serviceProvider
.GetRequiredService<IEventStreamOrchestrator<EmailJob>>();
await foreach (var evt in eventOrchestrator.GetEventStreamAsync<WorkCompletedEvent<EmailJob>>(ct))
{
Console.WriteLine($"Work completed in {evt.Duration}");
}
# Clone the repository
git clone https://github.com/lvlup-sw/bifrost.git
cd bifrost
# Build
dotnet build
# Run tests
dotnet test
See the design document for architectural details and implementation notes.
This project is licensed under the Apache License 2.0 - see the LICENSE file for details.
Contributions are welcome! Please feel free to submit a Pull Request.