How do you log the HTTP Requests that the Kentico Kontent .NET delivery API here: https://github.com/Kentico/kontent-delivery-sdk-net
Specifically what I am looking for is how to log the HTTP Get requests to delivery.kentico.ai (the end point that you retrieve your content JSON from).
You can enrich and inject an HttpClient to the DeliveryClient.
Enrich:
public class LoggingHandler : DelegatingHandler
{
public LoggingHandler(HttpMessageHandler innerHandler, Microsoft.Extensions.Logging.ILogger logger)
: base(innerHandler)
{
Logger = logger;
}
public Microsoft.Extensions.Logging.ILogger Logger { get; }
protected override async Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
Logger.LogInformation(request.Method + " " + request.RequestUri);
HttpResponseMessage response = await base.SendAsync(request, cancellationToken);
Logger.LogInformation(response.StatusCode + " " + response.Content.Headers);
return response;
}
}
Use e.g. Serilog
services.AddLogging(builder =>
{
// Add Serilog
builder.AddSerilog(new LoggerConfiguration()
.MinimumLevel.Information()
.WriteTo.File("logs\\log.log", rollingInterval: RollingInterval.Day)
.CreateLogger());
});
var serviceProvider = services.BuildServiceProvider();
var logger = serviceProvider.GetRequiredService<ILogger<Startup>>();
HttpClient httpClient = new HttpClient(new LoggingHandler(new HttpClientHandler(), logger));
var deliveryOptions = new DeliveryOptions();
Configuration.GetSection(nameof(DeliveryOptions)).Bind(deliveryOptions);
Inject:
var deliveryClient = DeliveryClientBuilder
.WithOptions(_ => deliveryOptions)
.WithHttpClient(httpClient)
.Build();
Additional resources:
https://merbla.com/2018/04/25/exploring-serilog-v2---using-the-http-client-factory/
UseSerilogRequestLogging()
Alternative approach using HttpClientFactory:
public void ConfigureServices(IServiceCollection services)
{
services.AddLogging(builder =>
{
// Add Serilog
builder.AddSerilog(new LoggerConfiguration()
.MinimumLevel.Information()
.WriteTo.File("logs\\log.log", rollingInterval: RollingInterval.Day)
.CreateLogger());
});
services.AddTransient<LoggingHandler>();
services.AddHttpClient("FactoryClient", c => { /* Do whatever else you wish here... */ })
.AddHttpMessageHandler<LoggingHandler>()
.AddTypedClient(c => DeliveryClientBuilder.WithOptions(...).WithHttpClient(c).Build());
services.AddControllersWithViews();
}
LoggingHandler.cs
public class LoggingHandler : DelegatingHandler
{
public Microsoft.Extensions.Logging.ILogger Logger { get; }
public LoggingHandler(ILoggerFactory loggerFactory) : base()
{
Logger = loggerFactory.CreateLogger<LoggingHandler>();
}
protected override async Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
Logger.LogInformation(request.Method + " " + request.RequestUri);
HttpResponseMessage response = await base.SendAsync(request, cancellationToken);
Logger.LogInformation(response.StatusCode + " " + response.Content.Headers);
return response;
}
}
Yet another approach is to use the Serilog.AspNetCore NuGet package: https://github.com/serilog/serilog-aspnetcore
Program.cs
public static IHostBuilder CreateHostBuilder(string[] args) =>
Host.CreateDefaultBuilder(args)
.ConfigureWebHostDefaults(webBuilder =>
{
webBuilder.UseStartup<Startup>();
})
.UseSerilog(); // <-- Add this line;
}
Startup.cs
public void Configure(IApplicationBuilder app)
{
app.UseSerilogRequestLogging();
}
Example code: https://github.com/Kentico/kontent-sample-app-net/commit/44f1a0e6b245b7ad0be2f0e48a1085adbf80584a
Related
Is it possible to send a form-url-encoded request with json in payload without actually encoding json ? Payload is of form jData=json.
I have tried various combination of form headers and BodyInserters, but it is not working, some time content header is wrong, other times body itself is totally JSON which again at the server API level is not desirable.
I have tried to overwrite request content in onRequestContent method in comment piece of code, hoping with this interception I would be able to override request, but still body is not changed.
Please help.
public class FinvasiaAuthenticationProvider implements BrokerAuthenticationProvider {
private static Logger LOGGER = LoggerFactory.getLogger(FinvasiaAuthenticationProvider.class);
private final WebClient client;
private final FinvasiaProperties properties;
private final ObjectMapper mapper;
public FinvasiaAuthenticationProvider(FinvasiaProperties properties,
ObjectMapper mapper) {
this.client = this.jettyHttpClient();
this.properties = properties;
this.mapper = mapper;
}
#Override
public Mono<BrokerAuthentication> authenticate(BrokerAuthenticationRequest req) {
if (!(req instanceof FinvasiaAuthenticationRequest)) {
return Mono.error(IllegalArgumentException::new);
}
var endpoint = String.format("%s/%s", properties.baseUrl(), FinvasiaUrls.LOGIN_URL.url());
var payload = new FinvasiaAuthenticationRequestAdapter(((FinvasiaAuthenticationRequest) req));
String json;
try {
json = mapper.writeValueAsString(payload);
} catch (JsonProcessingException e) {
return Mono.error(e);
}
var hello = "Hello";
Map<String, String> map = new HashMap<>();
map.put("jData", json);
return client.post()
.uri(endpoint)
.header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_FORM_URLENCODED_VALUE)
.body(BodyInserters.fromFormData("jData", json))
.retrieve()
.onStatus(HttpStatus::is4xxClientError, clientResponse -> {
clientResponse.bodyToMono(String.class).log().subscribe();
return Mono.error(IllegalArgumentException::new);
})
.bodyToMono(String.class)
.map(response -> {
return new FinvasiaAuthentication("1234", Arrays.asList());
});
}
private Request enhance(Request inboundRequest) {
StringBuilder log = new StringBuilder();
inboundRequest.onRequestBegin(request -> log.append("Request: \n")
.append("URI: ")
.append(request.getURI())
.append("\n")
.append("Method: ")
.append(request.getMethod()));
inboundRequest.onRequestHeaders(request -> {
log.append("\nRequest Headers:\n");
for (HttpField header : request.getHeaders()) {
log.append("\n" + header.getName() + ":" + header.getValue());
}
log.append("\n\n");
});
// inboundRequest.onRequestContent((request, content) -> {
//
//
// String b = StandardCharsets.UTF_8.decode(content).toString();
// String[] parts = StringUtils.split(b, '=');
// String decoded = UriUtils.decode(parts[1], StandardCharsets.UTF_8);
//
// content.clear();
// content.put(String.format("%s=%s", parts[0],decoded ).getBytes(StandardCharsets.UTF_8));
//
// request.content(n)
//
// });
inboundRequest.onRequestContent((request, content) ->
log.append("Body: \n\t")
.append(StandardCharsets.UTF_8.decode(content)));
log.append("\n");
inboundRequest.onResponseBegin(response -> {
log.append("Response:\n")
.append("Status: ")
.append(response.getStatus())
.append("\n");
});
inboundRequest.onResponseHeaders(response -> {
log.append("\nResponse Headers:\n");
for (HttpField header : response.getHeaders()) {
log.append("\n" + header.getName() + ":" + header.getValue());
}
log.append("\n\n");
});
inboundRequest.onResponseContent((respones, content) -> {
var bufferAsString = StandardCharsets.UTF_8.decode(content).toString();
log.append("Response Body:\n" + bufferAsString);
});
LOGGER.info("HTTP -> \n");
inboundRequest.onRequestSuccess(request -> LOGGER.info(log.toString()));
inboundRequest.onResponseSuccess(response -> LOGGER.info(log.toString()));
inboundRequest.onResponseFailure((response, throwable) -> LOGGER.info(log.toString()));
return inboundRequest;
}
public WebClient jettyHttpClient() {
SslContextFactory.Client sslContextFactory = new SslContextFactory.Client();
HttpClient httpClient = new HttpClient(sslContextFactory) {
#Override
public Request newRequest(URI uri) {
Request request = super.newRequest(uri);
return enhance(request);
}
};
return WebClient.builder().clientConnector(new JettyClientHttpConnector(httpClient))
// .defaultHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_FORM_URLENCODED_VALUE)
.build();
}
}
I have an azure function based on .Net core 3.1. It triggers when a file is uploaded into the Azure Blob Container, processes the CSV file and updates the database
Below is the code excerpt
FileTrigger.cs
namespace aspnetcore_azurefun_blob
{
[StorageAccount("AzureWebJobsStorage")]
public class FileTrigger
{
#region Property
private readonly IFileProcessor fileProcessor;
#endregion
#region Constructor
public FileTrigger(IFileProcessor fileProcessor)
{
this.fileProcessor = fileProcessor;
}
#endregion
[FunctionName("FileTrigger")]
public void ProcessFilesFromSamplesContainer([BlobTrigger("samples-workitems/{name}")]Stream myBlob, string name, ILogger log, ExecutionContext context)
{
log.LogInformation("Function: ProcessFilesFromSamplesContainer is called");
log.LogInformation($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes");
var result = fileProcessor.ProcessAsync(myBlob, name, log, context);
log.LogInformation($"Function Completed {result.Result} on {DateTime.Now.ToLongDateString()} # {DateTime.Now.ToShortTimeString()}.\n.");
}
}
}
FileProcessor.cs
namespace ClientCore.Processor
{
public interface IFileProcessor
{
Task<string> ProcessAsync(Stream myBlob, string name, ILogger log, ExecutionContext context);
}
public class FileProcessor : IFileProcessor
{
#region Property
private readonly IFileProcessingServiceFacade fileProcessingService;
#endregion
#region Constructor
public FileProcessor(IFileProcessingServiceFacade fileProcessingService)
{
this.fileProcessingService = fileProcessingService;
}
#endregion
#region Public Methods
public async Task<string> ProcessAsync(Stream myBlob, string name, ILogger log, ExecutionContext context)
{
var processingResult = fileProcessingService.ProcessAsync(myBlob, name, log, context);
return await Task.FromResult($"{processingResult.Result}");
}
#endregion
}
}
FileProcessingService.cs
namespace BusinessService.Services
{
public interface IFileProcessingServiceFacade
{
Task<string> ProcessAsync(Stream myBlob, string name, ILogger log, ExecutionContext context);
}
partial class FileProcessingServiceFacade : IFileProcessingServiceFacade
{
#region Public Methods
public async Task<string> ProcessAsync(Stream myBlob, string name, ILogger log, ExecutionContext context)
{
// Get the Contents from the CSV File
var csvData = GetCSVData(myBlob);
AppDbContext.FileRecords.Add(new FileRecords
{
FileName = name,
IsCompleted = DefaultValues.IsCompleted
});
AppDbContext.SaveChanges();
log.LogInformation($"Reading configuration from the configuration settings file: {Configurator.AzureSQLServerConfigurator.ConnnectionString}");
log.LogInformation("Database is updated..!");
return await Task.FromResult($"success");
}
#endregion
}
partial class FileProcessingServiceFacade : ServiceBase<FileProcessingServiceFacade>
{
#region Constructor
public FileProcessingServiceFacade(AppDbContext appDbContext, IOptions<AppConfigurator> configurator)
: base(appDbContext, configurator) { }
#endregion
#region Extract Data from CSV
protected static List<dynamic> GetCSVData(Stream jobData)
{
try
{
#region CsvHelper Mapping and Conversion
var csvHelperConfig = new CsvHelper.Configuration.CsvConfiguration(CultureInfo.InvariantCulture)
{
NewLine = Environment.NewLine,
PrepareHeaderForMatch = args => args.Header.ToUpper().Replace(" ", ""),
HasHeaderRecord = true
};
using var reader = new StreamReader(jobData);
using var csvHelper = new CsvReader(reader, CultureInfo.InvariantCulture);
csvHelper.Context.RegisterClassMap<StudentDetailsMap>();
var csvHelperRrecords = csvHelper.GetRecords<StudentDetails>()
.Where(x => !string.IsNullOrWhiteSpace(x.Name))
.OrderBy(x => x.ID);
return csvHelperRrecords.Cast<dynamic>().ToList();
#endregion
}
catch (Exception ex)
{
throw new Exception(ex.Message);
}
}
#endregion
}
}
How to write a Unit testing for the above Azure Function app? Just a head start is required.
I'm having a concurrency issue with MassTransit sagas.
I'm currently working on a POC with this flow:
One thread produces 100 event that are published to MassTransit in a sequence.
When the saga is instantiated it publishes another event to MassTransit.
The new event is picked up by a Consumer that perform some business logic and publishes one of two resulting event to MassTransit.
The resulting events from step 3. triggers a state change in the saga
I sometimes get exceptions like this Microsoft.EntityFrameworkCore.DbUpdateConcurrencyException: The database operation was expected to affect 1 row(s), but actually affected 0 row(s); data may have been modified or deleted since entities were loaded. in step 4, and the state change is not persisted.
Here is the business logic code:
public interface IInitialSagaEvent : CorrelatedBy<Guid> { }
public interface IExternalCheckRequest : CorrelatedBy<Guid> { }
public interface IExternalCheckOk : CorrelatedBy<Guid> { }
public interface IExternalCheckNotOk : CorrelatedBy<Guid> { }
public class MySaga : SagaStateMachineInstance
{
public Guid CorrelationId { get; set; }
public string CurrentState { get; set; }
public byte[] RowVersion { get; set; }
}
public class MyStateMachine : MassTransitStateMachine<MySaga>
{
public MyStateMachine()
{
InstanceState(instance => instance.CurrentState);
Initially(
When(InitialSagaEvent)
.ThenAsync(context => context.GetPayload<ConsumeContext>().Publish<IExternalCheckRequest>(new { context.Instance.CorrelationId }))
.TransitionTo(AwaitingExternalCheck)
);
During(AwaitingExternalCheck,
Ignore(InitialSagaEvent),
When(ExternalCheckOk)
.TransitionTo(CheckedOk),
When(ExternalCheckNotOk)
.TransitionTo(CheckedNotOk)
);
During(CheckedOk,
When(InitialSagaEvent)
.ThenAsync(context => context.GetPayload<ConsumeContext>().Publish<IExternalCheckRequest>(new { context.Instance.CorrelationId }))
.TransitionTo(AwaitingExternalCheck)
);
During(CheckedNotOk,
When(InitialSagaEvent)
.ThenAsync(context => context.GetPayload<ConsumeContext>().Publish<IExternalCheckRequest>(new { context.Instance.CorrelationId }))
.TransitionTo(AwaitingExternalCheck)
);
}
public Event<IInitialSagaEvent> InitialSagaEvent { get; private set; }
public Event<IExternalCheckOk> ExternalCheckOk { get; private set; }
public Event<IExternalCheckNotOk> ExternalCheckNotOk { get; private set; }
public State AwaitingExternalCheck { get; private set; }
public State CheckedOk { get; private set; }
public State CheckedNotOk { get; private set; }
}
public class ExternalCheckRequestConsumer : IConsumer<IExternalCheckRequest>
{
private readonly IExternalChecker externalChecker;
public ExternalCheckRequestConsumer(IExternalChecker externalChecker)
{
this.externalChecker = externalChecker;
}
public async Task Consume(ConsumeContext<IExternalCheckRequest> context)
{
var ok = await externalChecker.PerformCheck(context.Message, context.CancellationToken);
if (ok)
{
await context.Publish<IExternalCheckOk>(new { context.Message.CorrelationId }, context.CancellationToken);
}
else
{
await context.Publish<IExternalCheckNotOk>(new { context.Message.CorrelationId }, context.CancellationToken);
}
}
}
public interface IExternalChecker
{
Task<bool> PerformCheck(IExternalCheckRequest request, CancellationToken cancellationToken);
}
public class Publisher
{
private readonly IPublishEndpoint publishEndpoint;
public Publisher(IPublishEndpoint publishEndpoint)
{
this.publishEndpoint = publishEndpoint;
}
public async Task Publish(Guid correlationId, CancellationToken cancellationToken)
{
await publishEndpoint.Publish<IInitialSagaEvent>(new { CorrelationId = correlationId }, cancellationToken);
}
}
Here it the configuration code
public class MySagaDbContext : SagaDbContext
{
public MySagaDbContext(DbContextOptions<MySagaDbContext> options) : base(options) { }
protected override IEnumerable<ISagaClassMap> Configurations
{
get
{
yield return new MySagaClassMap();
}
}
}
public class MySagaClassMap : SagaClassMap<MySaga>
{
protected override void Configure(EntityTypeBuilder<MySaga> entity, ModelBuilder model)
{
entity.Property(x => x.CurrentState).HasMaxLength(128);
entity.Property(x => x.RowVersion).IsRowVersion();
}
}
public class ExternalCheckRequestConsumerDefinition : ConsumerDefinition<ExternalCheckRequestConsumer>
{
protected override void ConfigureConsumer(IReceiveEndpointConfigurator endpointConfigurator, IConsumerConfigurator<ExternalCheckRequestConsumer> consumerConfigurator) =>
endpointConfigurator.UseRetry(r =>
{
r.Handle<DbUpdateConcurrencyException>();
// This is the SQLServer error code for duplicate key
r.Handle<DbUpdateException>(y => y.InnerException is SqlException e && e.Number == 2627);
r.Interval(5, TimeSpan.FromMilliseconds(100));
});
}
public class Program
{
public static async Task Main(string[] args)
{
var services = new ServiceCollection();
services.AddDbContext<DbContext, MySagaDbContext>((provider, builder)
=> builder.UseSqlServer(connectionString, m =>
{
m.MigrationsAssembly(typeof(MySagaDbContext).Assembly.GetName().Name);
m.MigrationsHistoryTable($"__EFMigrationsHistory_Sagas");
}));
services.AddMassTransit(configureMassTransit =>
{
configureMassTransit.AddConsumer<ExternalCheckRequestConsumer, ExternalCheckRequestConsumerDefinition>();
configureMassTransit.AddSagaStateMachine<MyStateMachine, MySaga>()
.EntityFrameworkRepository(r =>
{
r.ConcurrencyMode = ConcurrencyMode.Optimistic;
r.ExistingDbContext<MySagaDbContext>();
});
configureMassTransit.SetEndpointNameFormatter(new DefaultEndpointNameFormatter(true));
configureMassTransit.UsingActiveMq((context, config) =>
{
config.Host("artemis", 61616, configureHost =>
{
configureHost.Username("admin");
configureHost.Password("admin");
});
config.UseInMemoryOutbox(); // ref https://masstransit-project.com/articles/outbox.html#the-in-memory-outbox
config.EnableArtemisCompatibility();
config.ConfigureEndpoints(context);
});
});
var serviceProvider = services.BuildServiceProvider();
var busControl = serviceProvider.GetRequiredService<IBusControl>();
await busControl.StartAsync();
await RunPoc(serviceProvider);
}
private static async Task RunPoc(IServiceProvider serviceProvider)
{
await Task.CompletedTask;
}
static string connectionString = string.Empty;
}
My guess is that I need to get in a UseRetry at the correct point, so I've tried to configure the AddSagaStateMachine with UseRetry like this:
configureMassTransit.AddSagaStateMachine<MyStateMachine, MySaga>(
configure =>
{
configure.UseRetry(r =>
{
r.Handle<DbUpdateConcurrencyException>();
// This is the SQLServer error code for duplicate key
r.Handle<DbUpdateException>(y => y.InnerException is SqlException e && e.Number == 2627);
r.Interval(5, TimeSpan.FromMilliseconds(100));
});
})
.EntityFrameworkRepository(r =>
{
r.ConcurrencyMode = ConcurrencyMode.Optimistic;
r.ExistingDbContext<MySagaDbContext>();
});
But with this UseRetry in AddSagaStateMachine nothing works, I just get loads of exception like this:
fail: MassTransit.ReceiveTransport[0]
R - FAULT activemq://artemis:61616/XXXX
System.ArgumentException: THe message could not be retrieved: IInitialSagaEvent(Parameter 'context')
at MassTransit.Saga.Pipeline.Pipes.SagaMergePipe`2.Send(SagaConsumeContext`1 context)
at GreenPipes.Filters.RetryFilter`1.GreenPipes.IFilter<TContext>.Send(TContext context, IPipe`1 next)
at GreenPipes.Filters.RetryFilter`1.GreenPipes.IFilter<TContext>.Send(TContext context, IPipe`1 next)
at MassTransit.Saga.SendSagaPipe`2.Send(SagaRepositoryContext`2 context)
at MassTransit.Saga.SendSagaPipe`2.Send(SagaRepositoryContext`2 context)
at MassTransit.EntityFrameworkCoreIntegration.Saga.Context.EntityFrameworkSagaRepositoryContextFactory`1.<> c__DisplayClass5_0`1.<< Send > b__1 > d.MoveNext()
-- - End of stack trace from previous location ---
at MassTransit.EntityFrameworkCoreIntegration.Saga.Context.EntityFrameworkSagaRepositoryContextFactory`1.<> c__DisplayClass8_0.<< WithinTransaction > g__Create | 0 > d.MoveNext()
-- - End of stack trace from previous location ---
at MassTransit.EntityFrameworkCoreIntegration.Saga.Context.EntityFrameworkSagaRepositoryContextFactory`1.WithinTransaction[T](DbContext context, CancellationToken cancellationToken, Func`1 callback)
at MassTransit.EntityFrameworkCoreIntegration.Saga.Context.EntityFrameworkSagaRepositoryContextFactory`1.WithinTransaction[T](DbContext context, CancellationToken cancellationToken, Func`1 callback)
at MassTransit.EntityFrameworkCoreIntegration.Saga.Context.EntityFrameworkSagaRepositoryContextFactory`1.WithinTransaction[T](DbContext context, CancellationToken cancellationToken, Func`1 callback)
at MassTransit.EntityFrameworkCoreIntegration.Saga.Context.EntityFrameworkSagaRepositoryContextFactory`1.Send[T](ConsumeContext`1 context, IPipe`1 next)
at MassTransit.EntityFrameworkCoreIntegration.Saga.Context.EntityFrameworkSagaRepositoryContextFactory`1.Send[T](ConsumeContext`1 context, IPipe`1 next)
at MassTransit.ExtensionsDependencyInjectionIntegration.ScopeProviders.DependencyInjectionSagaRepositoryContextFactory`1.<> c__DisplayClass6_0`1.<< Send > g__CreateScope | 0 > d.MoveNext()
-- - End of stack trace from previous location ---
at MassTransit.ExtensionsDependencyInjectionIntegration.ScopeProviders.DependencyInjectionSagaRepositoryContextFactory`1.<> c__DisplayClass6_0`1.<< Send > g__CreateScope | 0 > d.MoveNext()
-- - End of stack trace from previous location ---
at MassTransit.Saga.Pipeline.Filters.CorrelatedSagaFilter`2.GreenPipes.IFilter<MassTransit.ConsumeContext<TMessage>>.Send(ConsumeContext`1 context, IPipe`1 next)
I'm using .Net 6 and have tried MassTransit v 7.3.1 and v 8.0.0-develop.391, but both has the same behavior.
I've tried defining the messages as interfaces and publishing them both as anonymous classes and as actual implementations, and also tried to define the messages as classes, but with no luck.
My hope it that there is just some small configuration detail I'm missing, but I'm out of ideas, so any help is deeply appreciated.
The proper configuration in your SagaDefinition is shown below. Note the use of UseMessageRetry, instead of UseRetry.
public class ExternalCheckRequestConsumerDefinition :
ConsumerDefinition<ExternalCheckRequestConsumer>
{
protected override void ConfigureConsumer(IReceiveEndpointConfigurator endpointConfigurator,
IConsumerConfigurator<ExternalCheckRequestConsumer> consumerConfigurator) =>
endpointConfigurator.UseMessageRetry(r =>
{
r.Handle<DbUpdateConcurrencyException>();
// This is the SQLServer error code for duplicate key
r.Handle<DbUpdateException>(y => y.InnerException is SqlException e && e.Number == 2627);
r.Interval(5, TimeSpan.FromMilliseconds(100));
});
}
UPDATE
The above Consumer definition isn't used by the saga. You'd need to create a Saga definition, and specify it when adding the saga, for the retry to apply to the saga. Which would do the same as configuring it inline when adding the saga:
.AddSagaStateMachine<MyStateMachine, MySaga, MySagaDefinition>(
Also, in your state machine, replace the overly noisy:
.ThenAsync(context => context.GetPayload<ConsumeContext>().Publish<IExternalCheckRequest>(new { context.Instance.CorrelationId }))
With:
.PublishAsync(context => context.Init<IExternalCheckRequest>(new { context.Instance.CorrelationId }))
Here is the .AddSagaStateMachine I used, ref Chris Pattersons solution in the other answer.
configureMassTransit.AddSagaStateMachine<MyStateMachine, MySaga>(
configure =>
{
configure.UseMessageRetry(r =>
{
r.Handle<DbUpdateConcurrencyException>();
// This is the SQLServer error code for duplicate key
r.Handle<DbUpdateException>(y => y.InnerException is SqlException e && e.Number == 2627);
r.Interval(5, TimeSpan.FromMilliseconds(100));
});
})
.EntityFrameworkRepository(r =>
{
r.ConcurrencyMode = ConcurrencyMode.Optimistic;
r.ExistingDbContext<MySagaDbContext>();
});
My Create handler uses AutoMapper to map from command to entity.
public async Task<int> Handle(Command command, CancellationToken cancellationToken)
{
var entity = _mapper.Map<ExampleEntity>(command);
await _db.ExampleEntity.AddAsync(entity, cancellationToken);
await _db.SaveChangesAsync(cancellationToken);
return entity.Id;
}
My test looks like this
var command = new Create.Command
{
Name = "Example 1",
...
};
var id = await _fixture.SendAsync(command, mapper);
id.ShouldNotBeNull();
This is the SliceFixture
[CollectionDefinition(nameof(SliceFixture))]
public class SliceFixtureCollection : ICollectionFixture<SliceFixture> { }
public class SliceFixture : IAsyncLifetime
{
private readonly Checkpoint _checkpoint;
private readonly IConfiguration _configuration;
private readonly IServiceScopeFactory _scopeFactory;
private readonly WebApplicationFactory<Startup> _factory;
public SliceFixture()
{
_factory = new ContosoTestApplicationFactory();
_configuration = _factory.Services.GetRequiredService<IConfiguration>();
_scopeFactory = _factory.Services.GetRequiredService<IServiceScopeFactory>();
_checkpoint = new Checkpoint();
}
public class ContosoTestApplicationFactory : WebApplicationFactory<Startup>
{
protected override void ConfigureWebHost(IWebHostBuilder builder)
{
builder.ConfigureAppConfiguration((_, configBuilder) =>
{
configBuilder.AddInMemoryCollection(new Dictionary<string, string>
{
{"ConnectionStrings:DefaultConnection", _connectionString}
});
});
}
private readonly string _connectionString = "";
}
public async Task ExecuteScopeAsync(Func<IServiceProvider, Task> action)
{
using var scope = _scopeFactory.CreateScope();
var dbContext = scope.ServiceProvider.GetRequiredService<SchoolContext>();
try
{
await dbContext.BeginTransactionAsync();
await action(scope.ServiceProvider);
await dbContext.CommitTransactionAsync();
}
catch (Exception)
{
dbContext.RollbackTransaction();
throw;
}
}
public async Task<T> ExecuteScopeAsync<T>(Func<IServiceProvider, Task<T>> action)
{
using var scope = _scopeFactory.CreateScope();
var dbContext = scope.ServiceProvider.GetRequiredService<SchoolContext>();
try
{
await dbContext.BeginTransactionAsync();
var result = await action(scope.ServiceProvider);
await dbContext.CommitTransactionAsync();
return result;
}
catch (Exception)
{
dbContext.RollbackTransaction();
throw;
}
}
public Task ExecuteDbContextAsync(Func<SchoolContext, Task> action) => ExecuteScopeAsync(sp => action(sp.GetService<SchoolContext>()));
public Task ExecuteDbContextAsync(Func<SchoolContext, ValueTask> action) => ExecuteScopeAsync(sp => action(sp.GetService<SchoolContext>()).AsTask());
public Task ExecuteDbContextAsync(Func<SchoolContext, IMediator, Task> action) => ExecuteScopeAsync(sp => action(sp.GetService<SchoolContext>(), sp.GetService<IMediator>()));
public Task<T> ExecuteDbContextAsync<T>(Func<SchoolContext, Task<T>> action) => ExecuteScopeAsync(sp => action(sp.GetService<SchoolContext>()));
public Task<T> ExecuteDbContextAsync<T>(Func<SchoolContext, ValueTask<T>> action) => ExecuteScopeAsync(sp => action(sp.GetService<SchoolContext>()).AsTask());
public Task<T> ExecuteDbContextAsync<T>(Func<SchoolContext, IMediator, Task<T>> action) => ExecuteScopeAsync(sp => action(sp.GetService<SchoolContext>(), sp.GetService<IMediator>()));
public Task InsertAsync<T>(params T[] entities) where T : class
{
return ExecuteDbContextAsync(db =>
{
foreach (var entity in entities)
{
db.Set<T>().Add(entity);
}
return db.SaveChangesAsync();
});
}
public Task<T> FindAsync<T>(int id)
where T : class, IEntity
{
return ExecuteDbContextAsync(db => db.Set<T>().FindAsync(id).AsTask());
}
public Task<TResponse> SendAsync<TResponse>(IRequest<TResponse> request)
{
return ExecuteScopeAsync(sp =>
{
var mediator = sp.GetRequiredService<IMediator>();
return mediator.Send(request);
});
}
public Task SendAsync(IRequest request)
{
return ExecuteScopeAsync(sp =>
{
var mediator = sp.GetRequiredService<IMediator>();
return mediator.Send(request);
});
}
private int _courseNumber = 1;
public int NextCourseNumber() => Interlocked.Increment(ref _courseNumber);
public Task InitializeAsync() => _checkpoint.Reset(_configuration.GetConnectionString("DefaultConnection"));
public Task DisposeAsync()
{
_factory?.Dispose();
return Task.CompletedTask;
}
}
When I run the test I get -
Object reference not set to an instance of an object.
because _mapper is null.
I know I probably need to modify the SliceFixture but can't figure out what to do.
I was looking to implement Jimmy’s example here with the additional AutoMapper implementation.
I am new to ReactiveUI and trying to test a view model that looks like this:
public interface IService
{
Task<SessionModel> GetData(string id);
}
/// Provides a group of schedulers available to be used
public interface ISchedulers
{
IScheduler Default { get; }
IScheduler Dispatcher { get; }
}
public class MyVm : ReactiveObject
{
IService service;
public MyVm(ISchedulers schedulers, IService service)
{
this.service = service;
this.session = this.WhenAnyValue(x => x.SessionId)
.SelectMany(SearchSession)
.ObserveOn(schedulers.Default)
.ToProperty(this, x => x.Session);
}
private async Task<SessionModel> SearchSession(string id)
{
return await this.service.GetData(id);
}
private string sessionId;
public string SessionId
{
get => sessionId;
set => this.RaiseAndSetIfChanged(ref sessionId, value);
}
readonly ObservableAsPropertyHelper<SessionModel> session;
public SessionModel Session
{
get { return session.Value; }
}
}
public class SessionModel { }
I'm mocking the service call to return dummy data, but not sure what I need to do with a TestScheduler in order to get the SelectMany to work.
Here's a test class that shows how i would create a test for the view model. The goal is to eventually be able to check that the model got set:
[TestClass]
public class MyVmTests
{
[TestMethod]
public void CreateClass
{
var subject = new MyVm(/*pass in mocks*/);
subject.SessionId="test";
Assert.IsNotNull(subject.Session);
}
}
I don't think using TestScheduler is necessary. The following passes for me (using Moq):
var mockSchedulers = new Mock<ISchedulers>();
mockSchedulers.Setup(s => s.Default).Returns(Scheduler.Immediate);
var id = "123";
var mockService = new Mock<IService>();
var returnSession = new SessionModel();
mockService.Setup(s => s.GetData(It.Is<string>(i => i == id)))
.ReturnsAsync(returnSession);
var target = new MyVm(mockSchedulers.Object, mockService.Object);
target.SessionId = id;
Assert.IsNotNull(target.Session);
Assert.AreEqual(returnSession, target.Session);
TestScheduler is best when you're trying to test something with time (like a Delay, proving that the Delay actually happened). You're not really doing that here.