Producer / Consumer / Collector rewrite (#160)

* Converted metadata service to redis

* move to postgres instead

* fix global usings

* [skip ci] optimize wolverine by prebuilding static types

* [skip ci] Stop indexing mac folder indexes

* [skip ci] producer, metadata and migrations

removed mongodb
added redis cache
imdb meta in postgres
Enable pgtrm
Create trigrams index
Add search meta postgres function

* [skip ci] get rid of node folder, replace mongo with redis in consumer

also wire up postgres metadata searches

* [skip ci] change mongo to redis in the addon

* [skip ci] jackettio to redis

* Rest of mongo removed...

* Cleaner rerunning of metadata - without conflicts

* Add akas import as well as basic metadata

* Include episodes file too

* cascade truncate pre-import

* reverse order to avoid cascadeing

* separate out clean to separate handler

* Switch producer to use metadata matching pre-preocessing dmm

* More work

* Still porting PTN

* PTN port, adding tests

* [skip ci] Codec tests

* [skip ci] Complete Collection handler tests

* [skip ci] container tests

* [skip ci] Convert handlers tests

* [skip ci] DateHandler tests

* [skip ci] Dual Audio matching tests

* [skip ci] episode code tests

* [skip ci] Extended handler tests

* [skip ci] group handler tests

* [skip ci] some broken stuff right now

* [skip ci] more ptn

* [skip ci] PTN now in a separate nuget package, rebased this on the redis changes - i need them.

* [skip ci] Wire up PTN port. Tired - will test tomorrow

* [skip ci] Needs a lot of work - too many titles being missed now

* cleaner. done?

* Handle the date in the imdb search

- add integer function to confirm its a valid integer
- use the input date as a range of -+1 year

* [skip ci] Start of collector service for RD

[skip ci] WIP

Implemented metadata saga, along with channels to process up to a maximum of 100 infohashes each time
The saga will rety for each infohas by requeuing up to three times, before just marking as complete for that infoHash - meaning no data will be updated in the db for that torrent.

[skip ci] Ready to test with queue publishing

Will provision a fanout exchange if it doesn't exist, and create and bind a queue to it. Listens to the queue with 50 prefetch count.
Still needs PTN rewrite bringing in to parse the filename response from real debrid, and extract season and episode numbers if the file is a tvshow

[skip ci] Add Debrid Collector Build Job

Debrid Collector ready for testing

New consumer, new collector, producer has meta lookup and anti porn measures

[skip ci] WIP - moving from wolverine to MassTransit.

 not happy that wolverine cannot effectively control saga concurrency. we need to really.

[skip ci] Producer and new Consumer moved to MassTransit

Just the debrid collector to go now, then to write the optional qbit collector.

Collector now switched to mass transit too

hide porn titles in logs, clean up cache name in redis for imdb titles

[skip ci] Allow control of queues

[skip ci] Update deployment

Remove old consumer, fix deployment files, fix dockerfiles for shared project import

fix base deployment

* Add collector missing env var

* edits to kick off builds

* Add optional qbit deployment which qbit collector will use

* Qbit collector done

* reorder compose, and bring both qbit and qbitcollector into the compose, with 0 replicas as default

* Clean up compose file

* Ensure debrid collector errors if no debrid api key
This commit is contained in:
iPromKnight
2024-03-25 23:32:28 +00:00
committed by GitHub
parent 9c6c1ac249
commit 9a831e92d0
443 changed files with 4154 additions and 476262 deletions

View File

@@ -9,22 +9,29 @@ public static class ServiceCollectionExtensions
return services;
}
internal static IServiceCollection AddMongoDb(this IServiceCollection services)
internal static IServiceCollection AddDatabase(this IServiceCollection services)
{
services.LoadConfigurationFromEnv<MongoConfiguration>();
services.AddTransient<ImdbMongoDbService>();
services.LoadConfigurationFromEnv<PostgresConfiguration>();
services.AddScoped<ImdbDbService>();
return services;
}
internal static IServiceCollection AddJobSupport(this IServiceCollection services)
internal static IServiceCollection AddImporters(this IServiceCollection services)
{
services.LoadConfigurationFromEnv<JobConfiguration>();
services.AddScheduler()
.AddTransient<DownloadImdbDataJob>()
.AddHostedService<JobScheduler>();
services.AddScoped<IFileImport<ImdbBasicEntry>, BasicsFile>();
services.AddScoped<IFileImport<ImdbAkaEntry>, AkasFile>();
services.AddScoped<IFileImport<ImdbEpisodeEntry>, EpisodesFile>();
return services;
}
internal static IServiceCollection AddServiceConfiguration(this IServiceCollection services)
{
services.LoadConfigurationFromEnv<ServiceConfiguration>();
services.AddScoped<IImdbFileDownloader, ImdbFileDownloader>();
services.AddHostedService<DownloadImdbDataJob>();
return services;
}
}

View File

@@ -12,6 +12,8 @@ internal static class WebApplicationBuilderExtensions
options =>
{
options.DefaultExecutionTimeout = 6.Hours();
options.CodeGeneration.TypeLoadMode = TypeLoadMode.Static;
options.Services.AssertAllExpectedPreBuiltTypesExistOnStartUp();
});
return builder;

View File

@@ -0,0 +1,3 @@
namespace Metadata.Features.ClearExistingImdbData;
public record ClearExistingImdbDataRequest(string TitleBasicsFilePath, string TitleAkasFilePath, string EpisodesFilePath);

View File

@@ -0,0 +1,16 @@
namespace Metadata.Features.ClearExistingImdbData;
public class ClearExistingImdbDataRequestHandler(ILogger<ClearExistingImdbDataRequestHandler> logger, ImdbDbService dbService)
{
public async Task<ImportImdbDataRequest> Handle(ClearExistingImdbDataRequest request, CancellationToken _)
{
logger.LogInformation("Clearing existing IMDB data from database");
await dbService.DropPgtrmIndex();
await dbService.TruncateTable(TableNames.EpisodesTable);
await dbService.TruncateTable(TableNames.AkasTable);
await dbService.TruncateTable(TableNames.MetadataTable, cascade: true);
logger.LogInformation("Existing IMDB data cleared from database");
return new(request.TitleBasicsFilePath, request.TitleAkasFilePath, request.EpisodesFilePath);
}
}

View File

@@ -1,13 +0,0 @@
namespace Metadata.Features.Configuration;
public class JobConfiguration
{
private const string Prefix = "METADATA";
private const string DownloadImdbDataVariable = "DOWNLOAD_IMDB_DATA_SCHEDULE";
private const string DownloadImdbDataOnceVariable = "DOWNLOAD_IMDB_DATA_ONCE";
private const string InsertBatchSizeVariable = "INSERT_BATCH_SIZE";
public int InsertBatchSize { get; init; } = Prefix.GetEnvironmentVariableAsInt(InsertBatchSizeVariable, 25_000);
public string DownloadImdbCronSchedule { get; init; } = Prefix.GetOptionalEnvironmentVariableAsString(DownloadImdbDataVariable, CronExpressions.EveryHour);
public bool DownloadImdbOnce { get; init; } = Prefix.GetEnvironmentVariableAsBool(DownloadImdbDataOnceVariable);
}

View File

@@ -1,20 +1,19 @@
namespace Metadata.Features.Configuration;
public class MongoConfiguration
public class PostgresConfiguration
{
private const string Prefix = "MONGODB";
private const string Prefix = "POSTGRES";
private const string HostVariable = "HOST";
private const string PortVariable = "PORT";
private const string DbVariable = "DB";
private const string UsernameVariable = "USER";
private const string PasswordVariable = "PASSWORD";
private const string DatabaseVariable = "DB";
private const string PortVariable = "PORT";
private string Host { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(HostVariable);
private int Port { get; init; } = Prefix.GetEnvironmentVariableAsInt(PortVariable, 27017);
private string Username { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(UsernameVariable);
private string Password { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(PasswordVariable);
public string DbName { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(DbVariable);
private string Database { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(DatabaseVariable);
private int PORT { get; init; } = Prefix.GetEnvironmentVariableAsInt(PortVariable, 5432);
public string ConnectionString => $"mongodb://{Username}:{Password}@{Host}:{Port}/{DbName}?tls=false&directConnection=true&authSource=admin";
public string StorageConnectionString => $"Host={Host};Port={PORT};Username={Username};Password={Password};Database={Database};";
}

View File

@@ -0,0 +1,9 @@
namespace Metadata.Features.Configuration;
public class ServiceConfiguration
{
private const string Prefix = "METADATA";
private const string InsertBatchSizeVariable = "INSERT_BATCH_SIZE";
public int InsertBatchSize { get; init; } = Prefix.GetEnvironmentVariableAsInt(InsertBatchSizeVariable, 25_000);
}

View File

@@ -1,3 +1,3 @@
namespace Metadata.Features.DeleteDownloadedImdbData;
public record DeleteDownloadedImdbDataRequest(string FilePath);
public record DeleteDownloadedImdbDataRequest(string TitleBasicsFilePath, string TitleAkasFilePath, string EpisodesFilePath);

View File

@@ -1,21 +1,23 @@
namespace Metadata.Features.DeleteDownloadedImdbData;
public class DeleteDownloadedImdbDataRequestHandler(ILogger<DeleteDownloadedImdbDataRequestHandler> logger, JobConfiguration configuration)
public class DeleteDownloadedImdbDataRequestHandler(ILogger<DeleteDownloadedImdbDataRequestHandler> logger)
{
public Task Handle(DeleteDownloadedImdbDataRequest request, CancellationToken _)
{
logger.LogInformation("Deleting file {FilePath}", request.FilePath);
DeleteFile(request.TitleBasicsFilePath);
DeleteFile(request.TitleAkasFilePath);
DeleteFile(request.EpisodesFilePath);
logger.LogInformation("Processing Completed");
File.Delete(request.FilePath);
logger.LogInformation("File Deleted");
if (configuration.DownloadImdbOnce)
{
logger.LogInformation("Processing Completed: Exiting application as DownloadImdbOnce is set to true");
Environment.Exit(0);
}
Environment.Exit(0);
return Task.CompletedTask;
}
private void DeleteFile(string file)
{
logger.LogInformation("Deleting file {FilePath}", file);
File.Delete(file);
logger.LogInformation("File Deleted");
}
}

View File

@@ -1,8 +0,0 @@
namespace Metadata.Features.DownloadImdbData;
public class DownloadImdbDataJob(IMessageBus messageBus, JobConfiguration configuration) : BaseJob
{
public override bool IsScheduelable => !configuration.DownloadImdbOnce && !string.IsNullOrEmpty(configuration.DownloadImdbCronSchedule);
public override string JobName => nameof(DownloadImdbDataJob);
public override async Task Invoke() => await messageBus.SendAsync(new GetImdbDataRequest());
}

View File

@@ -1,30 +1,20 @@
namespace Metadata.Features.DownloadImdbData;
public class GetImdbDataRequestHandler(IHttpClientFactory clientFactory, ILogger<GetImdbDataRequestHandler> logger)
public class GetImdbDataRequestHandler(IHttpClientFactory clientFactory, IImdbFileDownloader downloader, ILogger<GetImdbDataRequestHandler> logger)
{
private const string TitleBasicsFileName = "title.basics.tsv";
private const string TitleAkasFileName = "title.akas.tsv";
private const string EpisodesFileName = "title.episode.tsv";
public async Task<ImportImdbDataRequest> Handle(GetImdbDataRequest _, CancellationToken cancellationToken)
public async Task<ClearExistingImdbDataRequest> Handle(GetImdbDataRequest _, CancellationToken cancellationToken)
{
logger.LogInformation("Downloading IMDB data");
var client = clientFactory.CreateClient("imdb-data");
var response = await client.GetAsync($"{TitleBasicsFileName}.gz", cancellationToken);
var tempBasicsFile = await downloader.DownloadFileToTempPath(client, TitleBasicsFileName, cancellationToken);
var tempAkasFile = await downloader.DownloadFileToTempPath(client, TitleAkasFileName, cancellationToken);
var tempEpisodesFile = await downloader.DownloadFileToTempPath(client, EpisodesFileName, cancellationToken);
var tempFile = Path.Combine(Path.GetTempPath(), TitleBasicsFileName);
response.EnsureSuccessStatusCode();
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken);
await using var gzipStream = new GZipStream(stream, CompressionMode.Decompress);
await using var fileStream = File.Create(tempFile);
await gzipStream.CopyToAsync(fileStream, cancellationToken);
logger.LogInformation("Downloaded IMDB data to {TempFile}", tempFile);
fileStream.Close();
return new(tempFile);
return new(tempBasicsFile, tempAkasFile, tempEpisodesFile);
}
}

View File

@@ -0,0 +1,6 @@
namespace Metadata.Features.DownloadImdbData;
public interface IImdbFileDownloader
{
Task<string> DownloadFileToTempPath(HttpClient client, string fileName, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,24 @@
namespace Metadata.Features.DownloadImdbData;
public class ImdbFileDownloader(ILogger<ImdbFileDownloader> logger) : IImdbFileDownloader
{
public async Task<string> DownloadFileToTempPath(HttpClient client, string fileName, CancellationToken cancellationToken)
{
var response = await client.GetAsync($"{fileName}.gz", cancellationToken);
var tempFile = Path.Combine(Path.GetTempPath(), fileName);
response.EnsureSuccessStatusCode();
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken);
await using var gzipStream = new GZipStream(stream, CompressionMode.Decompress);
await using var fileStream = File.Create(tempFile);
await gzipStream.CopyToAsync(fileStream, cancellationToken);
logger.LogInformation("Downloaded IMDB data '{Filename}' to {TempFile}", fileName, tempFile);
fileStream.Close();
return tempFile;
}
}

View File

@@ -0,0 +1,89 @@
namespace Metadata.Features.Files;
public class AkasFile(ILogger<AkasFile> logger, ImdbDbService dbService) : IFileImport<ImdbAkaEntry>
{
public async Task Import(string fileName, int batchSize, CancellationToken cancellationToken)
{
logger.LogInformation("Importing Downloaded IMDB AKAs data from {FilePath}", fileName);
var csvConfig = new CsvConfiguration(CultureInfo.InvariantCulture)
{
Delimiter = "\t",
BadDataFound = null, // Skip Bad Data from Imdb
MissingFieldFound = null, // Skip Missing Fields from Imdb
};
using var reader = new StreamReader(fileName);
using var csv = new CsvReader(reader, csvConfig);
var channel = Channel.CreateBounded<ImdbAkaEntry>(new BoundedChannelOptions(batchSize)
{
FullMode = BoundedChannelFullMode.Wait,
});
await csv.ReadAsync();
var batchInsertTask = CreateBatchOfAkaEntries(channel, batchSize, cancellationToken);
await ReadAkaEntries(csv, channel, cancellationToken);
channel.Writer.Complete();
await batchInsertTask;
}
private Task CreateBatchOfAkaEntries(Channel<ImdbAkaEntry, ImdbAkaEntry> channel, int batchSize, CancellationToken cancellationToken) =>
Task.Run(async () =>
{
await foreach (var movieData in channel.Reader.ReadAllAsync(cancellationToken))
{
if (cancellationToken.IsCancellationRequested)
{
return;
}
var batch = new List<ImdbAkaEntry>
{
movieData,
};
while (batch.Count < batchSize && channel.Reader.TryRead(out var nextMovieData))
{
batch.Add(nextMovieData);
}
if (batch.Count > 0)
{
await dbService.InsertImdbAkaEntries(batch);
logger.LogInformation("Imported batch of {BatchSize} aka entries starting with ImdbId {FirstImdbId}", batch.Count, batch.First().ImdbId);
}
}
}, cancellationToken);
private static async Task ReadAkaEntries(CsvReader csv, Channel<ImdbAkaEntry, ImdbAkaEntry> channel, CancellationToken cancellationToken)
{
while (await csv.ReadAsync())
{
var data = new ImdbAkaEntry
{
ImdbId = csv.GetField(0),
Ordering = csv.GetField<int>(1),
LocalizedTitle = csv.GetField(2),
Region = csv.GetField(3),
Language = csv.GetField(4),
Types = csv.GetField(5),
Attributes = csv.GetField(6),
};
var isOriginalTitle = int.TryParse(csv.GetField(7), out var original);
data.IsOriginalTitle = isOriginalTitle && original == 1;
if (cancellationToken.IsCancellationRequested)
{
return;
}
await channel.Writer.WriteAsync(data, cancellationToken);
}
}
}

View File

@@ -0,0 +1,86 @@
namespace Metadata.Features.Files;
public class BasicsFile(ILogger<BasicsFile> logger, ImdbDbService dbService): IFileImport<ImdbBasicEntry>
{
public async Task Import(string fileName, int batchSize, CancellationToken cancellationToken)
{
logger.LogInformation("Importing Downloaded IMDB Basics data from {FilePath}", fileName);
var csvConfig = new CsvConfiguration(CultureInfo.InvariantCulture)
{
Delimiter = "\t",
BadDataFound = null, // Skip Bad Data from Imdb
MissingFieldFound = null, // Skip Missing Fields from Imdb
};
using var reader = new StreamReader(fileName);
using var csv = new CsvReader(reader, csvConfig);
var channel = Channel.CreateBounded<ImdbBasicEntry>(new BoundedChannelOptions(batchSize)
{
FullMode = BoundedChannelFullMode.Wait,
});
await csv.ReadAsync();
var batchInsertTask = CreateBatchOfBasicEntries(channel, batchSize, cancellationToken);
await ReadBasicEntries(csv, channel, cancellationToken);
channel.Writer.Complete();
await batchInsertTask;
}
private Task CreateBatchOfBasicEntries(Channel<ImdbBasicEntry, ImdbBasicEntry> channel, int batchSize, CancellationToken cancellationToken) =>
Task.Run(async () =>
{
await foreach (var movieData in channel.Reader.ReadAllAsync(cancellationToken))
{
if (cancellationToken.IsCancellationRequested)
{
return;
}
var batch = new List<ImdbBasicEntry>
{
movieData,
};
while (batch.Count < batchSize && channel.Reader.TryRead(out var nextMovieData))
{
batch.Add(nextMovieData);
}
if (batch.Count > 0)
{
await dbService.InsertImdbEntries(batch);
logger.LogInformation("Imported batch of {BatchSize} basics starting with ImdbId {FirstImdbId}", batch.Count, batch.First().ImdbId);
}
}
}, cancellationToken);
private static async Task ReadBasicEntries(CsvReader csv, Channel<ImdbBasicEntry, ImdbBasicEntry> channel, CancellationToken cancellationToken)
{
while (await csv.ReadAsync())
{
var isAdultSet = int.TryParse(csv.GetField(4), out var adult);
var movieData = new ImdbBasicEntry
{
ImdbId = csv.GetField(0),
Category = csv.GetField(1),
Title = csv.GetField(2),
Adult = isAdultSet && adult == 1,
Year = csv.GetField(5),
};
if (cancellationToken.IsCancellationRequested)
{
return;
}
await channel.Writer.WriteAsync(movieData, cancellationToken);
}
}
}

View File

@@ -0,0 +1,83 @@
namespace Metadata.Features.Files;
public class EpisodesFile(ILogger<EpisodesFile> logger, ImdbDbService dbService): IFileImport<ImdbEpisodeEntry>
{
public async Task Import(string fileName, int batchSize, CancellationToken cancellationToken)
{
logger.LogInformation("Importing Downloaded IMDB Episodes data from {FilePath}", fileName);
var csvConfig = new CsvConfiguration(CultureInfo.InvariantCulture)
{
Delimiter = "\t",
BadDataFound = null, // Skip Bad Data from Imdb
MissingFieldFound = null, // Skip Missing Fields from Imdb
};
using var reader = new StreamReader(fileName);
using var csv = new CsvReader(reader, csvConfig);
var channel = Channel.CreateBounded<ImdbEpisodeEntry>(new BoundedChannelOptions(batchSize)
{
FullMode = BoundedChannelFullMode.Wait,
});
await csv.ReadAsync();
var batchInsertTask = CreateBatchOfAkaEntries(channel, batchSize, cancellationToken);
await ReadAkaEntries(csv, channel, cancellationToken);
channel.Writer.Complete();
await batchInsertTask;
}
private Task CreateBatchOfAkaEntries(Channel<ImdbEpisodeEntry, ImdbEpisodeEntry> channel, int batchSize, CancellationToken cancellationToken) =>
Task.Run(async () =>
{
await foreach (var movieData in channel.Reader.ReadAllAsync(cancellationToken))
{
if (cancellationToken.IsCancellationRequested)
{
return;
}
var batch = new List<ImdbEpisodeEntry>
{
movieData,
};
while (batch.Count < batchSize && channel.Reader.TryRead(out var nextMovieData))
{
batch.Add(nextMovieData);
}
if (batch.Count > 0)
{
await dbService.InsertImdbEpisodeEntries(batch);
logger.LogInformation("Imported batch of {BatchSize} episodes starting with ImdbId {FirstImdbId}", batch.Count, batch.First().EpisodeImdbId);
}
}
}, cancellationToken);
private static async Task ReadAkaEntries(CsvReader csv, Channel<ImdbEpisodeEntry, ImdbEpisodeEntry> channel, CancellationToken cancellationToken)
{
while (await csv.ReadAsync())
{
var data = new ImdbEpisodeEntry
{
EpisodeImdbId = csv.GetField(0),
ParentImdbId = csv.GetField(1),
SeasonNumber = csv.GetField(2),
EpisodeNumber = csv.GetField(3),
};
if (cancellationToken.IsCancellationRequested)
{
return;
}
await channel.Writer.WriteAsync(data, cancellationToken);
}
}
}

View File

@@ -0,0 +1,6 @@
namespace Metadata.Features.Files;
public interface IFileImport<TImportType>
{
Task Import(string fileName, int batchSize, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,13 @@
namespace Metadata.Features.Files;
public class ImdbAkaEntry
{
public string ImdbId { get; set; } = default!;
public int Ordering { get; set; }
public string? LocalizedTitle { get; set; }
public string? Region { get; set; }
public string? Language { get; set; }
public string? Types { get; set; }
public string? Attributes { get; set; }
public bool IsOriginalTitle { get; set; }
}

View File

@@ -0,0 +1,10 @@
namespace Metadata.Features.Files;
public class ImdbBasicEntry
{
public string ImdbId { get; set; } = default!;
public string? Category { get; set; }
public string? Title { get; set; }
public bool Adult { get; set; }
public string? Year { get; set; }
}

View File

@@ -0,0 +1,9 @@
namespace Metadata.Features.Files;
public class ImdbEpisodeEntry
{
public string EpisodeImdbId { get; set; } = default!;
public string? ParentImdbId { get; set; }
public string? SeasonNumber { get; set; }
public string? EpisodeNumber { get; set; }
}

View File

@@ -0,0 +1,174 @@
namespace Metadata.Features.ImportImdbData;
public class ImdbDbService(PostgresConfiguration configuration, ILogger<ImdbDbService> logger)
{
public Task InsertImdbEntries(IEnumerable<ImdbBasicEntry> entries) =>
ExecuteCommandAsync(
async connection =>
{
await using var writer = await connection.BeginBinaryImportAsync(
$"COPY {TableNames.MetadataTable} (\"imdb_id\", \"category\", \"title\", \"year\", \"adult\") FROM STDIN (FORMAT BINARY)");
foreach (var entry in entries)
{
try
{
await writer.StartRowAsync();
await writer.WriteAsync(entry.ImdbId, NpgsqlDbType.Text);
await writer.WriteAsync(entry.Category, NpgsqlDbType.Text);
await writer.WriteAsync(entry.Title, NpgsqlDbType.Text);
await writer.WriteAsync(entry.Year, NpgsqlDbType.Text);
await writer.WriteAsync(entry.Adult, NpgsqlDbType.Boolean);
}
catch (Npgsql.PostgresException e)
{
if (e.Message.Contains("duplicate key value violates unique constraint", StringComparison.OrdinalIgnoreCase))
{
continue;
}
throw;
}
}
await writer.CompleteAsync();
}, "Error while inserting imdb entries into database");
public Task InsertImdbAkaEntries(IEnumerable<ImdbAkaEntry> entries) =>
ExecuteCommandAsync(
async connection =>
{
await using var writer = await connection.BeginBinaryImportAsync(
$"COPY {TableNames.AkasTable} (\"imdb_id\", \"ordering\", \"localized_title\", \"region\", \"language\", \"types\", \"attributes\", \"is_original_title\") FROM STDIN (FORMAT BINARY)");
foreach (var entry in entries.Where(x=>x.LocalizedTitle?.Length <= 8000))
{
try
{
await writer.StartRowAsync();
await writer.WriteAsync(entry.ImdbId, NpgsqlDbType.Text);
await writer.WriteAsync(entry.Ordering, NpgsqlDbType.Integer);
await writer.WriteAsync(entry.LocalizedTitle, NpgsqlDbType.Text);
await writer.WriteAsync(entry.Region, NpgsqlDbType.Text);
await writer.WriteAsync(entry.Language, NpgsqlDbType.Text);
await writer.WriteAsync(entry.Types, NpgsqlDbType.Text);
await writer.WriteAsync(entry.Attributes, NpgsqlDbType.Text);
await writer.WriteAsync(entry.IsOriginalTitle, NpgsqlDbType.Boolean);
}
catch (PostgresException e)
{
if (e.Message.Contains("value too long for type character", StringComparison.OrdinalIgnoreCase))
{
continue;
}
throw;
}
}
await writer.CompleteAsync();
}, "Error while inserting imdb entries into database");
public Task InsertImdbEpisodeEntries(IEnumerable<ImdbEpisodeEntry> entries) =>
ExecuteCommandAsync(
async connection =>
{
await using var writer = await connection.BeginBinaryImportAsync(
$"COPY {TableNames.EpisodesTable} (\"episode_id\", \"parent_id\", \"season\", \"episode\") FROM STDIN (FORMAT BINARY)");
foreach (var entry in entries)
{
try
{
await writer.StartRowAsync();
await writer.WriteAsync(entry.EpisodeImdbId, NpgsqlDbType.Text);
await writer.WriteAsync(entry.ParentImdbId, NpgsqlDbType.Text);
await writer.WriteAsync(entry.SeasonNumber, NpgsqlDbType.Text);
await writer.WriteAsync(entry.EpisodeNumber, NpgsqlDbType.Text);
}
catch (PostgresException e)
{
if (e.Message.Contains("value too long for type character", StringComparison.OrdinalIgnoreCase))
{
continue;
}
throw;
}
}
await writer.CompleteAsync();
}, "Error while inserting imdb entries into database");
public Task TruncateTable(string table, bool cascade = false) =>
ExecuteCommandAsync(
async connection =>
{
var cascadeOption = cascade ? "CASCADE" : string.Empty;
logger.LogInformation("Truncating '{Table}' table", table);
await using var command = new NpgsqlCommand($"TRUNCATE TABLE {table} {cascadeOption}", connection);
await command.ExecuteNonQueryAsync();
}, $"Error while clearing '{table}' table");
public Task CreatePgtrmIndex() =>
ExecuteCommandAsync(
async connection =>
{
await using var command = new NpgsqlCommand($"CREATE INDEX title_gist ON {TableNames.MetadataTable} USING gist(title gist_trgm_ops)", connection);
await command.ExecuteNonQueryAsync();
}, "Error while creating index on imdb_metadata table");
public Task DropPgtrmIndex() =>
ExecuteCommandAsync(
async connection =>
{
logger.LogInformation("Dropping Trigrams index if it exists already");
await using var dropCommand = new NpgsqlCommand("DROP INDEX if exists title_gist", connection);
await dropCommand.ExecuteNonQueryAsync();
}, $"Error while dropping index on {TableNames.MetadataTable} table");
private async Task ExecuteCommandAsync(Func<NpgsqlConnection, Task> operation, string errorMessage)
{
try
{
await using var connection = CreateNpgsqlConnection();
await connection.OpenAsync();
await operation(connection);
}
catch (Exception e)
{
logger.LogError(e, errorMessage);
}
}
private NpgsqlConnection CreateNpgsqlConnection()
{
var connectionStringBuilder = new NpgsqlConnectionStringBuilder(configuration.StorageConnectionString)
{
CommandTimeout = 3000,
};
return new(connectionStringBuilder.ConnectionString);
}
private async Task ExecuteCommandWithTransactionAsync(Func<NpgsqlConnection, NpgsqlTransaction, Task> operation, NpgsqlTransaction transaction, string errorMessage)
{
try
{
await operation(transaction.Connection, transaction);
}
catch (PostgresException)
{
await transaction.RollbackAsync();
throw;
}
catch (Exception e)
{
logger.LogError(e, errorMessage);
}
}
}

View File

@@ -1,15 +0,0 @@
namespace Metadata.Features.ImportImdbData;
public class ImdbEntry
{
[BsonId]
public string ImdbId { get; set; } = default!;
public string? TitleType { get; set; }
public string? PrimaryTitle { get; set; }
public string? OriginalTitle { get; set; }
public string? IsAdult { get; set; }
public string? StartYear { get; set; }
public string? EndYear { get; set; }
public string? RuntimeMinutes { get; set; }
public string? Genres { get; set; }
}

View File

@@ -1,64 +0,0 @@
namespace Metadata.Features.ImportImdbData;
public class ImdbMongoDbService
{
private readonly ILogger<ImdbMongoDbService> _logger;
private readonly IMongoCollection<ImdbEntry> _imdbCollection;
public ImdbMongoDbService(MongoConfiguration configuration, ILogger<ImdbMongoDbService> logger)
{
_logger = logger;
var client = new MongoClient(configuration.ConnectionString);
var database = client.GetDatabase(configuration.DbName);
_imdbCollection = database.GetCollection<ImdbEntry>("imdb-entries");
}
public async Task InsertImdbEntries(IEnumerable<ImdbEntry> entries)
{
var operations = new List<WriteModel<ImdbEntry>>();
foreach (var entry in entries)
{
var filter = Builders<ImdbEntry>.Filter.Eq(e => e.ImdbId, entry.ImdbId);
var update = Builders<ImdbEntry>.Update
.SetOnInsert(e => e.TitleType, entry.TitleType)
.SetOnInsert(e => e.PrimaryTitle, entry.PrimaryTitle)
.SetOnInsert(e => e.OriginalTitle, entry.OriginalTitle)
.SetOnInsert(e => e.IsAdult, entry.IsAdult)
.SetOnInsert(e => e.StartYear, entry.StartYear)
.SetOnInsert(e => e.EndYear, entry.EndYear)
.SetOnInsert(e => e.RuntimeMinutes, entry.RuntimeMinutes)
.SetOnInsert(e => e.Genres, entry.Genres);
operations.Add(new UpdateOneModel<ImdbEntry>(filter, update) { IsUpsert = true });
}
await _imdbCollection.BulkWriteAsync(operations);
}
public bool IsDatabaseInitialized()
{
try
{
// Create compound index for PrimaryTitle, TitleType, and StartYear
var indexKeysDefinition = Builders<ImdbEntry>.IndexKeys
.Text(e => e.PrimaryTitle)
.Ascending(e => e.TitleType)
.Ascending(e => e.StartYear);
var createIndexOptions = new CreateIndexOptions { Background = true };
var indexModel = new CreateIndexModel<ImdbEntry>(indexKeysDefinition, createIndexOptions);
_imdbCollection.Indexes.CreateOne(indexModel);
return true;
}
catch (Exception e)
{
_logger.LogError(e, "Error initializing database");
return false;
}
}
}

View File

@@ -1,3 +1,3 @@
namespace Metadata.Features.ImportImdbData;
public record ImportImdbDataRequest(string FilePath);
public record ImportImdbDataRequest(string TitleBasicsFilePath, string TitleAkasFilePath, string EpisodesFilePath);

View File

@@ -1,92 +1,17 @@
namespace Metadata.Features.ImportImdbData;
public class ImportImdbDataRequestHandler(ILogger<ImportImdbDataRequestHandler> logger, ImdbMongoDbService mongoDbService, JobConfiguration configuration)
public class ImportImdbDataRequestHandler(
ServiceConfiguration configuration,
IFileImport<ImdbBasicEntry> basicsFile,
IFileImport<ImdbAkaEntry> akasFile,
IFileImport<ImdbEpisodeEntry> episodesFile)
{
public async Task<DeleteDownloadedImdbDataRequest> Handle(ImportImdbDataRequest request, CancellationToken cancellationToken)
public async Task<IndexImdbDataRequest> Handle(ImportImdbDataRequest request, CancellationToken cancellationToken)
{
logger.LogInformation("Importing Downloaded IMDB data from {FilePath}", request.FilePath);
var config = new CsvConfiguration(CultureInfo.InvariantCulture)
{
Delimiter = "\t",
BadDataFound = null, // Skip Bad Data from Imdb
MissingFieldFound = null, // Skip Missing Fields from Imdb
};
using var reader = new StreamReader(request.FilePath);
using var csv = new CsvReader(reader, config);
var channel = Channel.CreateBounded<ImdbEntry>(new BoundedChannelOptions(configuration.InsertBatchSize)
{
FullMode = BoundedChannelFullMode.Wait,
});
// Skip the header row
await csv.ReadAsync();
var batchInsertTask = CreateBatchOfEntries(channel, cancellationToken);
await ReadEntries(csv, channel, cancellationToken);
channel.Writer.Complete();
await batchInsertTask;
return new(request.FilePath);
await basicsFile.Import(request.TitleBasicsFilePath, configuration.InsertBatchSize, cancellationToken);
await akasFile.Import(request.TitleAkasFilePath, configuration.InsertBatchSize, cancellationToken);
await episodesFile.Import(request.EpisodesFilePath, configuration.InsertBatchSize, cancellationToken);
return new(request.TitleBasicsFilePath, request.TitleAkasFilePath, request.EpisodesFilePath);
}
private Task CreateBatchOfEntries(Channel<ImdbEntry, ImdbEntry> channel, CancellationToken cancellationToken) =>
Task.Run(async () =>
{
await foreach (var movieData in channel.Reader.ReadAllAsync(cancellationToken))
{
if (cancellationToken.IsCancellationRequested)
{
return;
}
var batch = new List<ImdbEntry>
{
movieData,
};
while (batch.Count < configuration.InsertBatchSize && channel.Reader.TryRead(out var nextMovieData))
{
batch.Add(nextMovieData);
}
if (batch.Count > 0)
{
await mongoDbService.InsertImdbEntries(batch);
logger.LogInformation("Imported batch of {BatchSize} starting with ImdbId {FirstImdbId}", batch.Count, batch.First().ImdbId);
}
}
}, cancellationToken);
private static async Task ReadEntries(CsvReader csv, Channel<ImdbEntry, ImdbEntry> channel, CancellationToken cancellationToken)
{
while (await csv.ReadAsync())
{
var movieData = new ImdbEntry
{
ImdbId = csv.GetField(0),
TitleType = csv.GetField(1),
PrimaryTitle = csv.GetField(2),
OriginalTitle = csv.GetField(3),
IsAdult = csv.GetField(4),
StartYear = csv.GetField(5),
EndYear = csv.GetField(6),
RuntimeMinutes = csv.GetField(7),
Genres = csv.GetField(8),
};
if (cancellationToken.IsCancellationRequested)
{
return;
}
await channel.Writer.WriteAsync(movieData, cancellationToken);
}
}
}
}

View File

@@ -0,0 +1,8 @@
namespace Metadata.Features.ImportImdbData;
public static class TableNames
{
public const string MetadataTable = "imdb_metadata";
public const string EpisodesTable = "imdb_metadata_episodes";
public const string AkasTable = "imdb_metadata_akas";
}

View File

@@ -0,0 +1,3 @@
namespace Metadata.Features.IndexImdbData;
public record IndexImdbDataRequest(string TitleBasicsFilePath, string TitleAkasFilePath, string EpisodesFilePath);

View File

@@ -0,0 +1,13 @@
namespace Metadata.Features.IndexImdbData;
public class IndexImdbDataRequestHandler(ILogger<IndexImdbDataRequestHandler> logger, ImdbDbService dbService)
{
public async Task<DeleteDownloadedImdbDataRequest> Handle(IndexImdbDataRequest request, CancellationToken _)
{
logger.LogInformation("Creating Trigram Indexes for IMDB data");
await dbService.CreatePgtrmIndex();
return new(request.TitleBasicsFilePath, request.TitleAkasFilePath, request.EpisodesFilePath);
}
}

View File

@@ -1,10 +0,0 @@
namespace Metadata.Features.Jobs;
public abstract class BaseJob : IMetadataJob
{
public abstract bool IsScheduelable { get; }
public abstract string JobName { get; }
public abstract Task Invoke();
}

View File

@@ -0,0 +1,9 @@
namespace Metadata.Features.Jobs;
public class DownloadImdbDataJob(IMessageBus messageBus) : IHostedService
{
public async Task StartAsync(CancellationToken cancellationToken) =>
await messageBus.SendAsync(new GetImdbDataRequest());
public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask;
}

View File

@@ -1,7 +0,0 @@
namespace Metadata.Features.Jobs;
public interface IMetadataJob : IInvocable
{
bool IsScheduelable { get; }
string JobName { get; }
}

View File

@@ -1,34 +0,0 @@
namespace Metadata.Features.Jobs;
public class JobScheduler(IServiceProvider serviceProvider) : IHostedService
{
public Task StartAsync(CancellationToken cancellationToken)
{
using var scope = serviceProvider.CreateAsyncScope();
var mongoDbService = scope.ServiceProvider.GetRequiredService<ImdbMongoDbService>();
if (!mongoDbService.IsDatabaseInitialized())
{
throw new InvalidOperationException("MongoDb is not initialized");
}
var jobConfigurations = scope.ServiceProvider.GetRequiredService<JobConfiguration>();
var downloadJob = scope.ServiceProvider.GetRequiredService<DownloadImdbDataJob>();
if (!downloadJob.IsScheduelable)
{
return downloadJob.Invoke();
}
var scheduler = scope.ServiceProvider.GetRequiredService<IScheduler>();
scheduler.Schedule<DownloadImdbDataJob>()
.Cron(jobConfigurations.DownloadImdbCronSchedule)
.PreventOverlapping(nameof(downloadJob.JobName));
return Task.CompletedTask;
}
public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask;
}

View File

@@ -4,23 +4,26 @@ global using System.Globalization;
global using System.IO.Compression;
global using System.Text.Json;
global using System.Threading.Channels;
global using Coravel;
global using Coravel.Invocable;
global using Coravel.Scheduling.Schedule.Interfaces;
global using CsvHelper;
global using CsvHelper.Configuration;
global using JasperFx.CodeGeneration;
global using JasperFx.CodeGeneration.Commands;
global using JasperFx.Core;
global using Metadata.Extensions;
global using Metadata.Features.ClearExistingImdbData;
global using Metadata.Features.Configuration;
global using Metadata.Features.DeleteDownloadedImdbData;
global using Metadata.Features.DownloadImdbData;
global using Metadata.Features.Files;
global using Metadata.Features.ImportImdbData;
global using Metadata.Features.IndexImdbData;
global using Metadata.Features.Jobs;
global using Metadata.Features.Literals;
global using Microsoft.AspNetCore.Builder;
global using Microsoft.Extensions.DependencyInjection;
global using Microsoft.Extensions.DependencyInjection.Extensions;
global using MongoDB.Bson.Serialization.Attributes;
global using MongoDB.Driver;
global using Npgsql;
global using NpgsqlTypes;
global using Oakton;
global using Serilog;
global using Wolverine;

View File

@@ -0,0 +1,47 @@
// <auto-generated/>
#pragma warning disable
using Metadata.Features.Configuration;
using Microsoft.Extensions.Logging;
namespace Internal.Generated.WolverineHandlers
{
// START: ClearExistingImdbDataRequestHandler2085209125
public class ClearExistingImdbDataRequestHandler2085209125 : Wolverine.Runtime.Handlers.MessageHandler
{
private readonly Microsoft.Extensions.Logging.ILogger<Metadata.Features.ClearExistingImdbData.ClearExistingImdbDataRequestHandler> _logger1;
private readonly Microsoft.Extensions.Logging.ILogger<Metadata.Features.ImportImdbData.ImdbDbService> _logger2;
private readonly Metadata.Features.Configuration.PostgresConfiguration _postgresConfiguration;
public ClearExistingImdbDataRequestHandler2085209125(Microsoft.Extensions.Logging.ILogger<Metadata.Features.ClearExistingImdbData.ClearExistingImdbDataRequestHandler> __logger1, Microsoft.Extensions.Logging.ILogger<Metadata.Features.ImportImdbData.ImdbDbService> __logger2, Metadata.Features.Configuration.PostgresConfiguration postgresConfiguration)
{
_logger1 = __logger1;
_logger2 = __logger2;
_postgresConfiguration = postgresConfiguration;
}
public override async System.Threading.Tasks.Task HandleAsync(Wolverine.Runtime.MessageContext context, System.Threading.CancellationToken cancellation)
{
var imdbDbService = new Metadata.Features.ImportImdbData.ImdbDbService(_postgresConfiguration, _logger2);
var clearExistingImdbDataRequestHandler = new Metadata.Features.ClearExistingImdbData.ClearExistingImdbDataRequestHandler(_logger1, imdbDbService);
// The actual message body
var clearExistingImdbDataRequest = (Metadata.Features.ClearExistingImdbData.ClearExistingImdbDataRequest)context.Envelope.Message;
// The actual message execution
var outgoing1 = await clearExistingImdbDataRequestHandler.Handle(clearExistingImdbDataRequest, cancellation).ConfigureAwait(false);
// Outgoing, cascaded message
await context.EnqueueCascadingAsync(outgoing1).ConfigureAwait(false);
}
}
// END: ClearExistingImdbDataRequestHandler2085209125
}

View File

@@ -0,0 +1,37 @@
// <auto-generated/>
#pragma warning disable
using Microsoft.Extensions.Logging;
namespace Internal.Generated.WolverineHandlers
{
// START: DeleteDownloadedImdbDataRequestHandler52235941
public class DeleteDownloadedImdbDataRequestHandler52235941 : Wolverine.Runtime.Handlers.MessageHandler
{
private readonly Microsoft.Extensions.Logging.ILogger<Metadata.Features.DeleteDownloadedImdbData.DeleteDownloadedImdbDataRequestHandler> _logger;
public DeleteDownloadedImdbDataRequestHandler52235941(Microsoft.Extensions.Logging.ILogger<Metadata.Features.DeleteDownloadedImdbData.DeleteDownloadedImdbDataRequestHandler> logger)
{
_logger = logger;
}
public override async System.Threading.Tasks.Task HandleAsync(Wolverine.Runtime.MessageContext context, System.Threading.CancellationToken cancellation)
{
var deleteDownloadedImdbDataRequestHandler = new Metadata.Features.DeleteDownloadedImdbData.DeleteDownloadedImdbDataRequestHandler(_logger);
// The actual message body
var deleteDownloadedImdbDataRequest = (Metadata.Features.DeleteDownloadedImdbData.DeleteDownloadedImdbDataRequest)context.Envelope.Message;
// The actual message execution
await deleteDownloadedImdbDataRequestHandler.Handle(deleteDownloadedImdbDataRequest, cancellation).ConfigureAwait(false);
}
}
// END: DeleteDownloadedImdbDataRequestHandler52235941
}

View File

@@ -0,0 +1,47 @@
// <auto-generated/>
#pragma warning disable
using Microsoft.Extensions.Logging;
using System.Net.Http;
namespace Internal.Generated.WolverineHandlers
{
// START: GetImdbDataRequestHandler1674247617
public class GetImdbDataRequestHandler1674247617 : Wolverine.Runtime.Handlers.MessageHandler
{
private readonly Microsoft.Extensions.Logging.ILogger<Metadata.Features.DownloadImdbData.GetImdbDataRequestHandler> _logger2;
private readonly Microsoft.Extensions.Logging.ILogger<Metadata.Features.DownloadImdbData.ImdbFileDownloader> _logger1;
private readonly System.Net.Http.IHttpClientFactory _httpClientFactory;
public GetImdbDataRequestHandler1674247617(Microsoft.Extensions.Logging.ILogger<Metadata.Features.DownloadImdbData.GetImdbDataRequestHandler> __logger2, Microsoft.Extensions.Logging.ILogger<Metadata.Features.DownloadImdbData.ImdbFileDownloader> __logger1, System.Net.Http.IHttpClientFactory httpClientFactory)
{
_logger2 = __logger2;
_logger1 = __logger1;
_httpClientFactory = httpClientFactory;
}
public override async System.Threading.Tasks.Task HandleAsync(Wolverine.Runtime.MessageContext context, System.Threading.CancellationToken cancellation)
{
var imdbFileDownloader = new Metadata.Features.DownloadImdbData.ImdbFileDownloader(_logger1);
var getImdbDataRequestHandler = new Metadata.Features.DownloadImdbData.GetImdbDataRequestHandler(_httpClientFactory, imdbFileDownloader, _logger2);
// The actual message body
var getImdbDataRequest = (Metadata.Features.DownloadImdbData.GetImdbDataRequest)context.Envelope.Message;
// The actual message execution
var outgoing1 = await getImdbDataRequestHandler.Handle(getImdbDataRequest, cancellation).ConfigureAwait(false);
// Outgoing, cascaded message
await context.EnqueueCascadingAsync(outgoing1).ConfigureAwait(false);
}
}
// END: GetImdbDataRequestHandler1674247617
}

View File

@@ -0,0 +1,56 @@
// <auto-generated/>
#pragma warning disable
using Metadata.Features.Configuration;
using Microsoft.Extensions.Logging;
namespace Internal.Generated.WolverineHandlers
{
// START: ImportImdbDataRequestHandler968947017
public class ImportImdbDataRequestHandler968947017 : Wolverine.Runtime.Handlers.MessageHandler
{
private readonly Microsoft.Extensions.Logging.ILogger<Metadata.Features.Files.EpisodesFile> _logger4;
private readonly Microsoft.Extensions.Logging.ILogger<Metadata.Features.Files.BasicsFile> _logger1;
private readonly Metadata.Features.Configuration.ServiceConfiguration _serviceConfiguration;
private readonly Microsoft.Extensions.Logging.ILogger<Metadata.Features.ImportImdbData.ImdbDbService> _logger2;
private readonly Microsoft.Extensions.Logging.ILogger<Metadata.Features.Files.AkasFile> _logger3;
private readonly Metadata.Features.Configuration.PostgresConfiguration _postgresConfiguration;
public ImportImdbDataRequestHandler968947017(Microsoft.Extensions.Logging.ILogger<Metadata.Features.Files.EpisodesFile> __logger4, Microsoft.Extensions.Logging.ILogger<Metadata.Features.Files.BasicsFile> __logger1, Metadata.Features.Configuration.ServiceConfiguration serviceConfiguration, Microsoft.Extensions.Logging.ILogger<Metadata.Features.ImportImdbData.ImdbDbService> __logger2, Microsoft.Extensions.Logging.ILogger<Metadata.Features.Files.AkasFile> __logger3, Metadata.Features.Configuration.PostgresConfiguration postgresConfiguration)
{
_logger4 = __logger4;
_logger1 = __logger1;
_serviceConfiguration = serviceConfiguration;
_logger2 = __logger2;
_logger3 = __logger3;
_postgresConfiguration = postgresConfiguration;
}
public override async System.Threading.Tasks.Task HandleAsync(Wolverine.Runtime.MessageContext context, System.Threading.CancellationToken cancellation)
{
var imdbDbService = new Metadata.Features.ImportImdbData.ImdbDbService(_postgresConfiguration, _logger2);
var inline_episodesFile = new Metadata.Features.Files.EpisodesFile(_logger4, imdbDbService);
var inline_akasFile = new Metadata.Features.Files.AkasFile(_logger3, imdbDbService);
var inline_basicsFile = new Metadata.Features.Files.BasicsFile(_logger1, imdbDbService);
var importImdbDataRequestHandler = new Metadata.Features.ImportImdbData.ImportImdbDataRequestHandler(_serviceConfiguration, inline_basicsFile, inline_akasFile, inline_episodesFile);
// The actual message body
var importImdbDataRequest = (Metadata.Features.ImportImdbData.ImportImdbDataRequest)context.Envelope.Message;
// The actual message execution
var outgoing1 = await importImdbDataRequestHandler.Handle(importImdbDataRequest, cancellation).ConfigureAwait(false);
// Outgoing, cascaded message
await context.EnqueueCascadingAsync(outgoing1).ConfigureAwait(false);
}
}
// END: ImportImdbDataRequestHandler968947017
}

View File

@@ -0,0 +1,47 @@
// <auto-generated/>
#pragma warning disable
using Metadata.Features.Configuration;
using Microsoft.Extensions.Logging;
namespace Internal.Generated.WolverineHandlers
{
// START: IndexImdbDataRequestHandler411355483
public class IndexImdbDataRequestHandler411355483 : Wolverine.Runtime.Handlers.MessageHandler
{
private readonly Microsoft.Extensions.Logging.ILogger<Metadata.Features.ImportImdbData.ImdbDbService> _logger2;
private readonly Metadata.Features.Configuration.PostgresConfiguration _postgresConfiguration;
private readonly Microsoft.Extensions.Logging.ILogger<Metadata.Features.IndexImdbData.IndexImdbDataRequestHandler> _logger1;
public IndexImdbDataRequestHandler411355483(Microsoft.Extensions.Logging.ILogger<Metadata.Features.ImportImdbData.ImdbDbService> __logger2, Metadata.Features.Configuration.PostgresConfiguration postgresConfiguration, Microsoft.Extensions.Logging.ILogger<Metadata.Features.IndexImdbData.IndexImdbDataRequestHandler> __logger1)
{
_logger2 = __logger2;
_postgresConfiguration = postgresConfiguration;
_logger1 = __logger1;
}
public override async System.Threading.Tasks.Task HandleAsync(Wolverine.Runtime.MessageContext context, System.Threading.CancellationToken cancellation)
{
var imdbDbService = new Metadata.Features.ImportImdbData.ImdbDbService(_postgresConfiguration, _logger2);
var indexImdbDataRequestHandler = new Metadata.Features.IndexImdbData.IndexImdbDataRequestHandler(_logger1, imdbDbService);
// The actual message body
var indexImdbDataRequest = (Metadata.Features.IndexImdbData.IndexImdbDataRequest)context.Envelope.Message;
// The actual message execution
var outgoing1 = await indexImdbDataRequestHandler.Handle(indexImdbDataRequest, cancellation).ConfigureAwait(false);
// Outgoing, cascaded message
await context.EnqueueCascadingAsync(outgoing1).ConfigureAwait(false);
}
}
// END: IndexImdbDataRequestHandler411355483
}

View File

@@ -9,15 +9,15 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Coravel" Version="5.0.3" />
<PackageReference Include="CsvHelper" Version="31.0.0" />
<PackageReference Include="Dapper" Version="2.1.35" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
<PackageReference Include="MongoDB.Driver" Version="2.24.0" />
<PackageReference Include="Npgsql" Version="8.0.2" />
<PackageReference Include="Serilog" Version="3.1.1" />
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />
<PackageReference Include="WolverineFx" Version="1.20.1" />
<PackageReference Include="WolverineFx" Version="2.1.2" />
</ItemGroup>
<ItemGroup>

View File

@@ -6,8 +6,10 @@ builder.SetupWolverine();
builder.Services
.AddHttpClients()
.AddJobSupport();
.AddServiceConfiguration()
.AddDatabase()
.AddImporters();
var host = builder.Build();
await host.RunAsync();
return await host.RunOaktonCommands(args);