Compare commits
16 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3c8ffd5082 | ||
|
|
79e0a0f102 | ||
|
|
6181207513 | ||
|
|
684dbba2f0 | ||
|
|
c75ecd2707 | ||
|
|
c493ef3376 | ||
|
|
655a39e35c | ||
|
|
cfeee62f6b | ||
|
|
c6d4c06d70 | ||
|
|
08639a3254 | ||
|
|
d430850749 | ||
|
|
82c0ea459b | ||
|
|
1e83b4c5d8 | ||
|
|
66609c2a46 | ||
|
|
2d78dc2735 | ||
|
|
527d6cdf15 |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -612,3 +612,7 @@ fabric.properties
|
||||
# Mac directory indexes
|
||||
.DS_Store
|
||||
deployment/docker/stack.env
|
||||
|
||||
src/producer/src/python/
|
||||
src/debrid-collector/python/
|
||||
src/qbit-collector/python/
|
||||
|
||||
@@ -12,8 +12,11 @@ enabled=false
|
||||
program=
|
||||
|
||||
[BitTorrent]
|
||||
Session\AnonymousModeEnabled=true
|
||||
Session\BTProtocol=TCP
|
||||
Session\DefaultSavePath=/downloads/
|
||||
Session\ExcludedFileNames=
|
||||
Session\MaxActiveCheckingTorrents=5
|
||||
Session\MaxActiveDownloads=10
|
||||
Session\MaxActiveTorrents=50
|
||||
Session\MaxActiveUploads=50
|
||||
@@ -50,9 +53,10 @@ MailNotification\req_auth=true
|
||||
WebUI\Address=*
|
||||
WebUI\AuthSubnetWhitelist=0.0.0.0/0
|
||||
WebUI\AuthSubnetWhitelistEnabled=true
|
||||
WebUI\HostHeaderValidation=false
|
||||
WebUI\LocalHostAuth=false
|
||||
WebUI\ServerDomains=*
|
||||
|
||||
[RSS]
|
||||
AutoDownloader\DownloadRepacks=true
|
||||
AutoDownloader\SmartEpisodeFilter=s(\\d+)e(\\d+), (\\d+)x(\\d+), "(\\d{4}[.\\-]\\d{1,2}[.\\-]\\d{1,2})", "(\\d{1,2}[.\\-]\\d{1,2}[.\\-]\\d{4})"
|
||||
AutoDownloader\SmartEpisodeFilter=s(\\d+)e(\\d+), (\\d+)x(\\d+), "(\\d{4}[.\\-]\\d{1,2}[.\\-]\\d{1,2})", "(\\d{1,2}[.\\-]\\d{1,2}[.\\-]\\d{4})"
|
||||
@@ -9,7 +9,7 @@ networks:
|
||||
|
||||
volumes:
|
||||
postgres:
|
||||
rabbitmq:
|
||||
lavinmq:
|
||||
redis:
|
||||
|
||||
services:
|
||||
@@ -55,28 +55,29 @@ services:
|
||||
volumes:
|
||||
- redis:/data
|
||||
|
||||
## RabbitMQ is used as a message broker for the services.
|
||||
## LavinMQ is used as a message broker for the services.
|
||||
## It is a high performance drop in replacement for RabbitMQ.
|
||||
## It is used to communicate between the services.
|
||||
rabbitmq:
|
||||
lavinmq:
|
||||
env_file: stack.env
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "rabbitmq-diagnostics -q ping"]
|
||||
timeout: 10s
|
||||
interval: 10s
|
||||
retries: 3
|
||||
start_period: 10s
|
||||
# # If you need the database to be accessible from outside, please open the below port.
|
||||
# # Furthermore, please, please, please, look at the documentation for rabbit on how to secure the service.
|
||||
# # Furthermore, please, please, please, look at the documentation for lavinmq / rabbitmq on how to secure the service.
|
||||
# ports:
|
||||
# - "5672:5672"
|
||||
# - "15672:15672"
|
||||
# - "15692:15692"
|
||||
image: rabbitmq:3-management
|
||||
image: cloudamqp/lavinmq:latest
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "lavinmqctl status"]
|
||||
timeout: 10s
|
||||
interval: 10s
|
||||
retries: 3
|
||||
start_period: 10s
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- knightcrawler-network
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- rabbitmq:/var/lib/rabbitmq
|
||||
- lavinmq:/var/lib/lavinmq/
|
||||
|
||||
## The addon. This is what is used in stremio
|
||||
addon:
|
||||
@@ -87,13 +88,13 @@ services:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
lavinmq:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
env_file: stack.env
|
||||
hostname: knightcrawler-addon
|
||||
image: gabisonfire/knightcrawler-addon:2.0.8
|
||||
image: gabisonfire/knightcrawler-addon:2.0.21
|
||||
labels:
|
||||
logging: promtail
|
||||
networks:
|
||||
@@ -111,12 +112,12 @@ services:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
lavinmq:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
env_file: stack.env
|
||||
image: gabisonfire/knightcrawler-consumer:2.0.8
|
||||
image: gabisonfire/knightcrawler-consumer:2.0.21
|
||||
labels:
|
||||
logging: promtail
|
||||
networks:
|
||||
@@ -132,12 +133,12 @@ services:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
lavinmq:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
env_file: stack.env
|
||||
image: gabisonfire/knightcrawler-debrid-collector:2.0.8
|
||||
image: gabisonfire/knightcrawler-debrid-collector:2.0.21
|
||||
labels:
|
||||
logging: promtail
|
||||
networks:
|
||||
@@ -151,7 +152,7 @@ services:
|
||||
migrator:
|
||||
condition: service_completed_successfully
|
||||
env_file: stack.env
|
||||
image: gabisonfire/knightcrawler-metadata:2.0.8
|
||||
image: gabisonfire/knightcrawler-metadata:2.0.21
|
||||
networks:
|
||||
- knightcrawler-network
|
||||
restart: "no"
|
||||
@@ -162,7 +163,7 @@ services:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
env_file: stack.env
|
||||
image: gabisonfire/knightcrawler-migrator:2.0.8
|
||||
image: gabisonfire/knightcrawler-migrator:2.0.21
|
||||
networks:
|
||||
- knightcrawler-network
|
||||
restart: "no"
|
||||
@@ -176,12 +177,12 @@ services:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
lavinmq:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
env_file: stack.env
|
||||
image: gabisonfire/knightcrawler-producer:2.0.8
|
||||
image: gabisonfire/knightcrawler-producer:2.0.21
|
||||
labels:
|
||||
logging: promtail
|
||||
networks:
|
||||
@@ -191,12 +192,22 @@ services:
|
||||
## QBit collector utilizes QBitTorrent to download metadata.
|
||||
qbitcollector:
|
||||
depends_on:
|
||||
metadata:
|
||||
condition: service_completed_successfully
|
||||
migrator:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
lavinmq:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
qbittorrent:
|
||||
condition: service_healthy
|
||||
deploy:
|
||||
replicas: ${QBIT_REPLICAS:-0}
|
||||
env_file: stack.env
|
||||
image: gabisonfire/knightcrawler-qbit-collector:2.0.8
|
||||
image: gabisonfire/knightcrawler-qbit-collector:2.0.21
|
||||
labels:
|
||||
logging: promtail
|
||||
networks:
|
||||
|
||||
@@ -16,7 +16,7 @@ rule_files:
|
||||
scrape_configs:
|
||||
- job_name: "rabbitmq"
|
||||
static_configs:
|
||||
- targets: ["rabbitmq:15692"]
|
||||
- targets: ["lavinmq:15692"]
|
||||
- job_name: "postgres-exporter"
|
||||
static_configs:
|
||||
- targets: ["postgres-exporter:9187"]
|
||||
|
||||
@@ -4,8 +4,8 @@ x-basehealth: &base-health
|
||||
retries: 3
|
||||
start_period: 10s
|
||||
|
||||
x-rabbithealth: &rabbitmq-health
|
||||
test: rabbitmq-diagnostics -q ping
|
||||
x-lavinhealth: &lavinmq-health
|
||||
test: [ "CMD-SHELL", "lavinmqctl status" ]
|
||||
<<: *base-health
|
||||
|
||||
x-redishealth: &redis-health
|
||||
@@ -52,21 +52,19 @@ services:
|
||||
networks:
|
||||
- knightcrawler-network
|
||||
|
||||
rabbitmq:
|
||||
image: rabbitmq:3-management
|
||||
lavinmq:
|
||||
env_file: stack.env
|
||||
# # If you need the database to be accessible from outside, please open the below port.
|
||||
# # Furthermore, please, please, please, look at the documentation for rabbit on how to secure the service.
|
||||
# # Furthermore, please, please, please, look at the documentation for lavinmq / rabbitmq on how to secure the service.
|
||||
# ports:
|
||||
# - "5672:5672"
|
||||
# - "15672:15672"
|
||||
# - "15692:15692"
|
||||
volumes:
|
||||
- rabbitmq:/var/lib/rabbitmq
|
||||
image: cloudamqp/lavinmq:latest
|
||||
healthcheck: *lavinmq-health
|
||||
restart: unless-stopped
|
||||
healthcheck: *rabbitmq-health
|
||||
env_file: ../../.env
|
||||
networks:
|
||||
- knightcrawler-network
|
||||
volumes:
|
||||
- lavinmq:/var/lib/lavinmq/
|
||||
|
||||
## QBitTorrent is a torrent client that can be used to download torrents. In this case its used to download metadata.
|
||||
## The QBit collector requires this.
|
||||
|
||||
@@ -11,7 +11,7 @@ x-depends: &knightcrawler-app-depends
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
lavinmq:
|
||||
condition: service_healthy
|
||||
migrator:
|
||||
condition: service_completed_successfully
|
||||
@@ -20,7 +20,7 @@ x-depends: &knightcrawler-app-depends
|
||||
|
||||
services:
|
||||
metadata:
|
||||
image: gabisonfire/knightcrawler-metadata:2.0.8
|
||||
image: gabisonfire/knightcrawler-metadata:2.0.21
|
||||
env_file: ../../.env
|
||||
networks:
|
||||
- knightcrawler-network
|
||||
@@ -30,7 +30,7 @@ services:
|
||||
condition: service_completed_successfully
|
||||
|
||||
migrator:
|
||||
image: gabisonfire/knightcrawler-migrator:2.0.8
|
||||
image: gabisonfire/knightcrawler-migrator:2.0.21
|
||||
env_file: ../../.env
|
||||
networks:
|
||||
- knightcrawler-network
|
||||
@@ -40,7 +40,7 @@ services:
|
||||
condition: service_healthy
|
||||
|
||||
addon:
|
||||
image: gabisonfire/knightcrawler-addon:2.0.8
|
||||
image: gabisonfire/knightcrawler-addon:2.0.21
|
||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||
restart: unless-stopped
|
||||
hostname: knightcrawler-addon
|
||||
@@ -48,22 +48,22 @@ services:
|
||||
- "7000:7000"
|
||||
|
||||
consumer:
|
||||
image: gabisonfire/knightcrawler-consumer:2.0.8
|
||||
image: gabisonfire/knightcrawler-consumer:2.0.21
|
||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||
restart: unless-stopped
|
||||
|
||||
debridcollector:
|
||||
image: gabisonfire/knightcrawler-debrid-collector:2.0.8
|
||||
image: gabisonfire/knightcrawler-debrid-collector:2.0.21
|
||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||
restart: unless-stopped
|
||||
|
||||
producer:
|
||||
image: gabisonfire/knightcrawler-producer:2.0.8
|
||||
image: gabisonfire/knightcrawler-producer:2.0.21
|
||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||
restart: unless-stopped
|
||||
|
||||
qbitcollector:
|
||||
image: gabisonfire/knightcrawler-qbit-collector:2.0.8
|
||||
image: gabisonfire/knightcrawler-qbit-collector:2.0.21
|
||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
volumes:
|
||||
postgres:
|
||||
redis:
|
||||
rabbitmq:
|
||||
lavinmq:
|
||||
@@ -13,8 +13,8 @@ REDIS_HOST=redis
|
||||
REDIS_PORT=6379
|
||||
REDIS_EXTRA=abortConnect=false,allowAdmin=true
|
||||
|
||||
# RabbitMQ
|
||||
RABBITMQ_HOST=rabbitmq
|
||||
# AMQP
|
||||
RABBITMQ_HOST=lavinmq
|
||||
RABBITMQ_USER=guest
|
||||
RABBITMQ_PASSWORD=guest
|
||||
RABBITMQ_CONSUMER_QUEUE_NAME=ingested
|
||||
@@ -38,6 +38,3 @@ QBIT_REPLICAS=0
|
||||
|
||||
# Addon
|
||||
DEBUG_MODE=false
|
||||
|
||||
# Producer
|
||||
GITHUB_PAT=
|
||||
|
||||
@@ -14,13 +14,12 @@ const Torrent = database.define('torrent',
|
||||
{
|
||||
infoHash: { type: Sequelize.STRING(64), primaryKey: true },
|
||||
provider: { type: Sequelize.STRING(32), allowNull: false },
|
||||
torrentId: { type: Sequelize.STRING(128) },
|
||||
ingestedTorrentId: { type: Sequelize.BIGINT, allowNull: false },
|
||||
title: { type: Sequelize.STRING(256), allowNull: false },
|
||||
size: { type: Sequelize.BIGINT },
|
||||
type: { type: Sequelize.STRING(16), allowNull: false },
|
||||
uploadDate: { type: Sequelize.DATE, allowNull: false },
|
||||
seeders: { type: Sequelize.SMALLINT },
|
||||
trackers: { type: Sequelize.STRING(4096) },
|
||||
languages: { type: Sequelize.STRING(4096) },
|
||||
resolution: { type: Sequelize.STRING(16) }
|
||||
}
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="8.0.3" />
|
||||
<PackageReference Include="Polly" Version="8.3.1" />
|
||||
<PackageReference Include="PromKnight.ParseTorrentTitle" Version="1.0.4" />
|
||||
<PackageReference Include="Serilog" Version="3.1.1" />
|
||||
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
|
||||
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />
|
||||
@@ -29,10 +28,30 @@
|
||||
<None Include="Configuration\logging.json">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Update="requirements.txt">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<Content Remove="eng\**" />
|
||||
<None Remove="eng\**" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup Condition="'$(Configuration)' == 'Debug'">
|
||||
<Content Remove="python\**" />
|
||||
<None Include="python\**">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\shared\SharedContracts.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Compile Remove="eng\**" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Remove="eng\**" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -6,6 +6,12 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharedContracts", "..\share
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "shared", "shared", "{2C0A0F53-28E6-404F-9EFE-DADFBEF8338B}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "eng", "eng", "{72A042C3-B4F3-45C5-AC20-041FE8F41EFC}"
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
eng\install-python-reqs.ps1 = eng\install-python-reqs.ps1
|
||||
eng\install-python-reqs.sh = eng\install-python-reqs.sh
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
|
||||
@@ -9,12 +9,23 @@ RUN dotnet restore -a $TARGETARCH
|
||||
RUN dotnet publish -c Release --no-restore -o /src/out -a $TARGETARCH
|
||||
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine3.19
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
|
||||
|
||||
COPY --from=build /src/out .
|
||||
|
||||
RUN rm -rf /app/python && mkdir -p /app/python
|
||||
|
||||
RUN pip3 install -r /app/requirements.txt -t /app/python
|
||||
|
||||
RUN addgroup -S debrid && adduser -S -G debrid debrid
|
||||
USER debrid
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||
CMD pgrep -f dotnet || exit 1
|
||||
ENV PYTHONNET_PYDLL=/usr/lib/libpython3.11.so.1.0
|
||||
ENTRYPOINT ["dotnet", "DebridCollector.dll"]
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
using DebridCollector.Features.Configuration;
|
||||
|
||||
namespace DebridCollector.Extensions;
|
||||
|
||||
public static class ServiceCollectionExtensions
|
||||
@@ -17,7 +15,8 @@ public static class ServiceCollectionExtensions
|
||||
var serviceConfiguration = services.LoadConfigurationFromEnv<DebridCollectorConfiguration>();
|
||||
|
||||
services.AddRealDebridClient(serviceConfiguration);
|
||||
services.AddSingleton<IParseTorrentTitle, ParseTorrentTitle>();
|
||||
services.RegisterPythonEngine();
|
||||
services.AddSingleton<IRankTorrentName, RankTorrentName>();
|
||||
services.AddHostedService<DebridRequestProcessor>();
|
||||
|
||||
return services;
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
using DebridCollector.Features.Configuration;
|
||||
|
||||
namespace DebridCollector.Features.Debrid;
|
||||
namespace DebridCollector.Features.Debrid;
|
||||
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
|
||||
@@ -3,10 +3,11 @@ namespace DebridCollector.Features.Worker;
|
||||
public static class DebridMetaToTorrentMeta
|
||||
{
|
||||
public static IReadOnlyList<TorrentFile> MapMetadataToFilesCollection(
|
||||
IParseTorrentTitle torrentTitle,
|
||||
IRankTorrentName rankTorrentName,
|
||||
Torrent torrent,
|
||||
string ImdbId,
|
||||
FileDataDictionary Metadata)
|
||||
FileDataDictionary Metadata,
|
||||
ILogger<WriteMetadataConsumer> logger)
|
||||
{
|
||||
try
|
||||
{
|
||||
@@ -15,34 +16,42 @@ public static class DebridMetaToTorrentMeta
|
||||
foreach (var metadataEntry in Metadata.Where(m => Filetypes.VideoFileExtensions.Any(ext => m.Value.Filename.EndsWith(ext))))
|
||||
{
|
||||
var validFileIndex = int.TryParse(metadataEntry.Key, out var fileIndex);
|
||||
var fileIndexMinusOne = Math.Max(0, fileIndex - 1);
|
||||
|
||||
var file = new TorrentFile
|
||||
{
|
||||
ImdbId = ImdbId,
|
||||
KitsuId = 0,
|
||||
InfoHash = torrent.InfoHash,
|
||||
FileIndex = validFileIndex ? fileIndex : 0,
|
||||
FileIndex = validFileIndex ? fileIndexMinusOne : 0,
|
||||
Title = metadataEntry.Value.Filename,
|
||||
Size = metadataEntry.Value.Filesize.GetValueOrDefault(),
|
||||
};
|
||||
|
||||
var parsedTitle = torrentTitle.Parse(file.Title);
|
||||
var parsedTitle = rankTorrentName.Parse(file.Title, false);
|
||||
|
||||
file.ImdbSeason = parsedTitle.Seasons.FirstOrDefault();
|
||||
file.ImdbEpisode = parsedTitle.Episodes.FirstOrDefault();
|
||||
if (!parsedTitle.Success)
|
||||
{
|
||||
logger.LogWarning("Failed to parse title {Title} for metadata mapping", file.Title);
|
||||
continue;
|
||||
}
|
||||
|
||||
file.ImdbSeason = parsedTitle.Response?.Season?.FirstOrDefault() ?? 0;
|
||||
file.ImdbEpisode = parsedTitle.Response?.Episode?.FirstOrDefault() ?? 0;
|
||||
|
||||
files.Add(file);
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
catch (Exception)
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning("Failed to map metadata to files collection: {Exception}", ex.Message);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task<IReadOnlyList<SubtitleFile>> MapMetadataToSubtitlesCollection(IDataStorage storage, string InfoHash, FileDataDictionary Metadata)
|
||||
public static async Task<IReadOnlyList<SubtitleFile>> MapMetadataToSubtitlesCollection(IDataStorage storage, string InfoHash, FileDataDictionary Metadata, ILogger<WriteMetadataConsumer> logger)
|
||||
{
|
||||
try
|
||||
{
|
||||
@@ -58,13 +67,14 @@ public static class DebridMetaToTorrentMeta
|
||||
foreach (var metadataEntry in Metadata.Where(m => Filetypes.SubtitleFileExtensions.Any(ext => m.Value.Filename.EndsWith(ext))))
|
||||
{
|
||||
var validFileIndex = int.TryParse(metadataEntry.Key, out var fileIndex);
|
||||
var fileIndexMinusOne = Math.Max(0, fileIndex - 1);
|
||||
var fileId = torrentFiles.FirstOrDefault(
|
||||
t => Path.GetFileNameWithoutExtension(t.Title) == Path.GetFileNameWithoutExtension(metadataEntry.Value.Filename))?.Id ?? 0;
|
||||
|
||||
var file = new SubtitleFile
|
||||
{
|
||||
InfoHash = InfoHash,
|
||||
FileIndex = validFileIndex ? fileIndex : 0,
|
||||
FileIndex = validFileIndex ? fileIndexMinusOne : 0,
|
||||
FileId = fileId,
|
||||
Title = metadataEntry.Value.Filename,
|
||||
};
|
||||
@@ -74,8 +84,9 @@ public static class DebridMetaToTorrentMeta
|
||||
|
||||
return files;
|
||||
}
|
||||
catch (Exception)
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning("Failed to map metadata to subtitles collection: {Exception}", ex.Message);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,6 +53,12 @@ public class InfohashMetadataSagaStateMachine : MassTransitStateMachine<Infohash
|
||||
.Then(
|
||||
context =>
|
||||
{
|
||||
if (!context.Message.WithFiles)
|
||||
{
|
||||
logger.LogInformation("No files written for torrent {InfoHash} in Saga {SagaId}", context.Saga.Torrent.InfoHash, context.Saga.CorrelationId);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogInformation("Metadata Written for torrent {InfoHash} in Saga {SagaId}", context.Saga.Torrent.InfoHash, context.Saga.CorrelationId);
|
||||
})
|
||||
.TransitionTo(Completed)
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
namespace DebridCollector.Features.Worker;
|
||||
|
||||
[EntityName("perform-metadata-request")]
|
||||
[EntityName("perform-metadata-request-debrid-collector")]
|
||||
public record PerformMetadataRequest(Guid CorrelationId, string InfoHash) : CorrelatedBy<Guid>;
|
||||
|
||||
[EntityName("torrent-metadata-response")]
|
||||
[EntityName("torrent-metadata-response-debrid-collector")]
|
||||
public record GotMetadata(TorrentMetadataResponse Metadata) : CorrelatedBy<Guid>
|
||||
{
|
||||
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;
|
||||
}
|
||||
|
||||
[EntityName("write-metadata")]
|
||||
[EntityName("write-metadata-debrid-collector")]
|
||||
public record WriteMetadata(Torrent Torrent, TorrentMetadataResponse Metadata, string ImdbId) : CorrelatedBy<Guid>
|
||||
{
|
||||
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;
|
||||
}
|
||||
|
||||
[EntityName("metadata-written")]
|
||||
public record MetadataWritten(TorrentMetadataResponse Metadata) : CorrelatedBy<Guid>
|
||||
[EntityName("metadata-written-debrid-colloctor")]
|
||||
public record MetadataWritten(TorrentMetadataResponse Metadata, bool WithFiles) : CorrelatedBy<Guid>
|
||||
{
|
||||
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;
|
||||
}
|
||||
@@ -1,25 +1,28 @@
|
||||
namespace DebridCollector.Features.Worker;
|
||||
|
||||
public class WriteMetadataConsumer(IParseTorrentTitle parseTorrentTitle, IDataStorage dataStorage) : IConsumer<WriteMetadata>
|
||||
public class WriteMetadataConsumer(IRankTorrentName rankTorrentName, IDataStorage dataStorage, ILogger<WriteMetadataConsumer> logger) : IConsumer<WriteMetadata>
|
||||
{
|
||||
public async Task Consume(ConsumeContext<WriteMetadata> context)
|
||||
{
|
||||
var request = context.Message;
|
||||
|
||||
var torrentFiles = DebridMetaToTorrentMeta.MapMetadataToFilesCollection(parseTorrentTitle, request.Torrent, request.ImdbId, request.Metadata.Metadata);
|
||||
var torrentFiles = DebridMetaToTorrentMeta.MapMetadataToFilesCollection(rankTorrentName, request.Torrent, request.ImdbId, request.Metadata.Metadata, logger);
|
||||
|
||||
if (torrentFiles.Any())
|
||||
if (!torrentFiles.Any())
|
||||
{
|
||||
await dataStorage.InsertFiles(torrentFiles);
|
||||
|
||||
var subtitles = await DebridMetaToTorrentMeta.MapMetadataToSubtitlesCollection(dataStorage, request.Torrent.InfoHash, request.Metadata.Metadata);
|
||||
|
||||
if (subtitles.Any())
|
||||
{
|
||||
await dataStorage.InsertSubtitles(subtitles);
|
||||
}
|
||||
await context.Publish(new MetadataWritten(request.Metadata, false));
|
||||
return;
|
||||
}
|
||||
|
||||
await context.Publish(new MetadataWritten(request.Metadata));
|
||||
await dataStorage.InsertFiles(torrentFiles);
|
||||
|
||||
var subtitles = await DebridMetaToTorrentMeta.MapMetadataToSubtitlesCollection(dataStorage, request.Torrent.InfoHash, request.Metadata.Metadata, logger);
|
||||
|
||||
if (subtitles.Any())
|
||||
{
|
||||
await dataStorage.InsertSubtitles(subtitles);
|
||||
}
|
||||
|
||||
await context.Publish(new MetadataWritten(request.Metadata, true));
|
||||
}
|
||||
}
|
||||
@@ -4,17 +4,18 @@ global using System.Text.Json;
|
||||
global using System.Text.Json.Serialization;
|
||||
global using System.Threading.Channels;
|
||||
global using DebridCollector.Extensions;
|
||||
global using DebridCollector.Features.Configuration;
|
||||
global using DebridCollector.Features.Debrid;
|
||||
global using DebridCollector.Features.Worker;
|
||||
global using MassTransit;
|
||||
global using MassTransit.Mediator;
|
||||
global using Microsoft.AspNetCore.Builder;
|
||||
global using Microsoft.Extensions.DependencyInjection;
|
||||
global using Polly;
|
||||
global using Polly.Extensions.Http;
|
||||
global using PromKnight.ParseTorrentTitle;
|
||||
global using SharedContracts.Configuration;
|
||||
global using SharedContracts.Dapper;
|
||||
global using SharedContracts.Extensions;
|
||||
global using SharedContracts.Models;
|
||||
global using SharedContracts.Python;
|
||||
global using SharedContracts.Python.RTN;
|
||||
global using SharedContracts.Requests;
|
||||
2
src/debrid-collector/eng/install-python-reqs.ps1
Normal file
2
src/debrid-collector/eng/install-python-reqs.ps1
Normal file
@@ -0,0 +1,2 @@
|
||||
mkdir -p ../python
|
||||
python -m pip install -r ../requirements.txt -t ../python/
|
||||
5
src/debrid-collector/eng/install-python-reqs.sh
Normal file
5
src/debrid-collector/eng/install-python-reqs.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
rm -rf ../python
|
||||
mkdir -p ../python
|
||||
python3 -m pip install -r ../requirements.txt -t ../python/
|
||||
1
src/debrid-collector/requirements.txt
Normal file
1
src/debrid-collector/requirements.txt
Normal file
@@ -0,0 +1 @@
|
||||
rank-torrent-name==0.2.13
|
||||
@@ -72,7 +72,7 @@ public class BasicsFile(ILogger<BasicsFile> logger, ImdbDbService dbService): IF
|
||||
Category = csv.GetField(1),
|
||||
Title = csv.GetField(2),
|
||||
Adult = isAdultSet && adult == 1,
|
||||
Year = csv.GetField(5),
|
||||
Year = csv.GetField(5) == @"\N" ? 0 : int.Parse(csv.GetField(5)),
|
||||
};
|
||||
|
||||
if (cancellationToken.IsCancellationRequested)
|
||||
|
||||
@@ -6,5 +6,5 @@ public class ImdbBasicEntry
|
||||
public string? Category { get; set; }
|
||||
public string? Title { get; set; }
|
||||
public bool Adult { get; set; }
|
||||
public string? Year { get; set; }
|
||||
public int Year { get; set; }
|
||||
}
|
||||
@@ -17,7 +17,7 @@ public class ImdbDbService(PostgresConfiguration configuration, ILogger<ImdbDbSe
|
||||
await writer.WriteAsync(entry.ImdbId, NpgsqlDbType.Text);
|
||||
await writer.WriteAsync(entry.Category, NpgsqlDbType.Text);
|
||||
await writer.WriteAsync(entry.Title, NpgsqlDbType.Text);
|
||||
await writer.WriteAsync(entry.Year, NpgsqlDbType.Text);
|
||||
await writer.WriteAsync(entry.Year, NpgsqlDbType.Integer);
|
||||
await writer.WriteAsync(entry.Adult, NpgsqlDbType.Boolean);
|
||||
}
|
||||
catch (Npgsql.PostgresException e)
|
||||
@@ -116,7 +116,7 @@ public class ImdbDbService(PostgresConfiguration configuration, ILogger<ImdbDbSe
|
||||
ExecuteCommandAsync(
|
||||
async connection =>
|
||||
{
|
||||
await using var command = new NpgsqlCommand($"CREATE INDEX title_gist ON {TableNames.MetadataTable} USING gist(title gist_trgm_ops)", connection);
|
||||
await using var command = new NpgsqlCommand($"CREATE INDEX title_gin ON {TableNames.MetadataTable} USING gin(title gin_trgm_ops)", connection);
|
||||
await command.ExecuteNonQueryAsync();
|
||||
}, "Error while creating index on imdb_metadata table");
|
||||
|
||||
@@ -125,7 +125,7 @@ public class ImdbDbService(PostgresConfiguration configuration, ILogger<ImdbDbSe
|
||||
async connection =>
|
||||
{
|
||||
logger.LogInformation("Dropping Trigrams index if it exists already");
|
||||
await using var dropCommand = new NpgsqlCommand("DROP INDEX if exists title_gist", connection);
|
||||
await using var dropCommand = new NpgsqlCommand("DROP INDEX if exists title_gin", connection);
|
||||
await dropCommand.ExecuteNonQueryAsync();
|
||||
}, $"Error while dropping index on {TableNames.MetadataTable} table");
|
||||
|
||||
|
||||
35
src/migrator/migrations/009_imdb_year_column_int.sql
Normal file
35
src/migrator/migrations/009_imdb_year_column_int.sql
Normal file
@@ -0,0 +1,35 @@
|
||||
-- Purpose: Change the year column to integer and add a search function that allows for searching by year.
|
||||
ALTER TABLE imdb_metadata
|
||||
ALTER COLUMN year TYPE integer USING (CASE WHEN year = '\N' THEN 0 ELSE year::integer END);
|
||||
|
||||
-- Remove the old search function
|
||||
DROP FUNCTION IF EXISTS search_imdb_meta(TEXT, TEXT, TEXT, INT);
|
||||
|
||||
-- Add the new search function that allows for searching by year with a plus/minus one year range
|
||||
CREATE OR REPLACE FUNCTION search_imdb_meta(search_term TEXT, category_param TEXT DEFAULT NULL, year_param INT DEFAULT NULL, limit_param INT DEFAULT 10)
|
||||
RETURNS TABLE(imdb_id character varying(16), title character varying(1000),category character varying(50),year INT, score REAL) AS $$
|
||||
BEGIN
|
||||
SET pg_trgm.similarity_threshold = 0.9;
|
||||
RETURN QUERY
|
||||
SELECT imdb_metadata.imdb_id, imdb_metadata.title, imdb_metadata.category, imdb_metadata.year, similarity(imdb_metadata.title, search_term) as score
|
||||
FROM imdb_metadata
|
||||
WHERE (imdb_metadata.title % search_term)
|
||||
AND (imdb_metadata.adult = FALSE)
|
||||
AND (category_param IS NULL OR imdb_metadata.category = category_param)
|
||||
AND (year_param IS NULL OR imdb_metadata.year BETWEEN year_param - 1 AND year_param + 1)
|
||||
ORDER BY score DESC
|
||||
LIMIT limit_param;
|
||||
END; $$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
-- Drop the old indexes
|
||||
DROP INDEX IF EXISTS idx_imdb_metadata_adult;
|
||||
DROP INDEX IF EXISTS idx_imdb_metadata_category;
|
||||
DROP INDEX IF EXISTS idx_imdb_metadata_year;
|
||||
DROP INDEX IF EXISTS title_gist;
|
||||
|
||||
-- Add indexes for the new columns
|
||||
CREATE INDEX idx_imdb_metadata_adult ON imdb_metadata(adult);
|
||||
CREATE INDEX idx_imdb_metadata_category ON imdb_metadata(category);
|
||||
CREATE INDEX idx_imdb_metadata_year ON imdb_metadata(year);
|
||||
CREATE INDEX title_gin ON imdb_metadata USING gin(title gin_trgm_ops);
|
||||
@@ -0,0 +1,40 @@
|
||||
-- Purpose: Add the jsonb column to the ingested_torrents table to store the response from RTN
|
||||
ALTER TABLE ingested_torrents
|
||||
ADD COLUMN IF NOT EXISTS rtn_response jsonb;
|
||||
|
||||
-- Purpose: Drop torrentId column from torrents table
|
||||
ALTER TABLE torrents
|
||||
DROP COLUMN IF EXISTS "torrentId";
|
||||
|
||||
-- Purpose: Drop Trackers column from torrents table
|
||||
ALTER TABLE torrents
|
||||
DROP COLUMN IF EXISTS "trackers";
|
||||
|
||||
-- Purpose: Create a foreign key relationsship if it does not already exist between torrents and the source table ingested_torrents, but do not cascade on delete.
|
||||
ALTER TABLE torrents
|
||||
ADD COLUMN IF NOT EXISTS "ingestedTorrentId" bigint;
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
IF EXISTS (
|
||||
SELECT 1
|
||||
FROM information_schema.table_constraints
|
||||
WHERE constraint_name = 'fk_torrents_info_hash'
|
||||
)
|
||||
THEN
|
||||
ALTER TABLE torrents
|
||||
DROP CONSTRAINT fk_torrents_info_hash;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
ALTER TABLE torrents
|
||||
ADD CONSTRAINT fk_torrents_info_hash
|
||||
FOREIGN KEY ("ingestedTorrentId")
|
||||
REFERENCES ingested_torrents("id")
|
||||
ON DELETE NO ACTION;
|
||||
|
||||
UPDATE torrents
|
||||
SET "ingestedTorrentId" = ingested_torrents."id"
|
||||
FROM ingested_torrents
|
||||
WHERE torrents."infoHash" = ingested_torrents."info_hash"
|
||||
AND torrents."provider" = ingested_torrents."source";
|
||||
@@ -0,0 +1,55 @@
|
||||
DROP FUNCTION IF EXISTS kc_maintenance_reconcile_dmm_imdb_ids();
|
||||
CREATE OR REPLACE FUNCTION kc_maintenance_reconcile_dmm_imdb_ids()
|
||||
RETURNS INTEGER AS $$
|
||||
DECLARE
|
||||
rec RECORD;
|
||||
imdb_rec RECORD;
|
||||
rows_affected INTEGER := 0;
|
||||
BEGIN
|
||||
RAISE NOTICE 'Starting Reconciliation of DMM IMDB Ids...';
|
||||
FOR rec IN
|
||||
SELECT
|
||||
it."id" as "ingestion_id",
|
||||
t."infoHash",
|
||||
it."category" as "ingestion_category",
|
||||
f."id" as "file_Id",
|
||||
f."title" as "file_Title",
|
||||
(rtn_response->>'raw_title')::text as "raw_title",
|
||||
(rtn_response->>'parsed_title')::text as "parsed_title",
|
||||
(rtn_response->>'year')::int as "year"
|
||||
FROM torrents t
|
||||
JOIN ingested_torrents it ON t."ingestedTorrentId" = it."id"
|
||||
JOIN files f ON t."infoHash" = f."infoHash"
|
||||
WHERE t."provider" = 'DMM'
|
||||
LOOP
|
||||
RAISE NOTICE 'Processing record with file_Id: %', rec."file_Id";
|
||||
FOR imdb_rec IN
|
||||
SELECT * FROM search_imdb_meta(
|
||||
rec."parsed_title",
|
||||
CASE
|
||||
WHEN rec."ingestion_category" = 'tv' THEN 'tvSeries'
|
||||
WHEN rec."ingestion_category" = 'movies' THEN 'movie'
|
||||
END,
|
||||
CASE
|
||||
WHEN rec."year" = 0 THEN NULL
|
||||
ELSE rec."year" END,
|
||||
1)
|
||||
LOOP
|
||||
IF imdb_rec IS NOT NULL THEN
|
||||
RAISE NOTICE 'Updating file_Id: % with imdbId: %, parsed title: %, imdb title: %', rec."file_Id", imdb_rec."imdb_id", rec."parsed_title", imdb_rec."title";
|
||||
UPDATE "files"
|
||||
SET "imdbId" = imdb_rec."imdb_id"
|
||||
WHERE "id" = rec."file_Id";
|
||||
rows_affected := rows_affected + 1;
|
||||
ELSE
|
||||
RAISE NOTICE 'No IMDB ID found for file_Id: %, parsed title: %, imdb title: %, setting imdbId to NULL', rec."file_Id", rec."parsed_title", imdb_rec."title";
|
||||
UPDATE "files"
|
||||
SET "imdbId" = NULL
|
||||
WHERE "id" = rec."file_Id";
|
||||
END IF;
|
||||
END LOOP;
|
||||
END LOOP;
|
||||
RAISE NOTICE 'Finished reconciliation. Total rows affected: %', rows_affected;
|
||||
RETURN rows_affected;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
19
src/migrator/migrations/012_imdb_change_ratio.sql
Normal file
19
src/migrator/migrations/012_imdb_change_ratio.sql
Normal file
@@ -0,0 +1,19 @@
|
||||
-- Remove the old search function
|
||||
DROP FUNCTION IF EXISTS search_imdb_meta(TEXT, TEXT, INT, INT);
|
||||
|
||||
-- Add the new search function that allows for searching by year with a plus/minus one year range
|
||||
CREATE OR REPLACE FUNCTION search_imdb_meta(search_term TEXT, category_param TEXT DEFAULT NULL, year_param INT DEFAULT NULL, limit_param INT DEFAULT 10, similarity_threshold REAL DEFAULT 0.95)
|
||||
RETURNS TABLE(imdb_id character varying(16), title character varying(1000),category character varying(50),year INT, score REAL) AS $$
|
||||
BEGIN
|
||||
SET pg_trgm.similarity_threshold = similarity_threshold;
|
||||
RETURN QUERY
|
||||
SELECT imdb_metadata.imdb_id, imdb_metadata.title, imdb_metadata.category, imdb_metadata.year, similarity(imdb_metadata.title, search_term) as score
|
||||
FROM imdb_metadata
|
||||
WHERE (imdb_metadata.title % search_term)
|
||||
AND (imdb_metadata.adult = FALSE)
|
||||
AND (category_param IS NULL OR imdb_metadata.category = category_param)
|
||||
AND (year_param IS NULL OR imdb_metadata.year BETWEEN year_param - 1 AND year_param + 1)
|
||||
ORDER BY score DESC
|
||||
LIMIT limit_param;
|
||||
END; $$
|
||||
LANGUAGE plpgsql;
|
||||
19
src/migrator/migrations/013_imdb_change_ratio_fix.sql
Normal file
19
src/migrator/migrations/013_imdb_change_ratio_fix.sql
Normal file
@@ -0,0 +1,19 @@
|
||||
-- Remove the old search function
|
||||
DROP FUNCTION IF EXISTS search_imdb_meta(TEXT, TEXT, INT, INT);
|
||||
|
||||
-- Add the new search function that allows for searching by year with a plus/minus one year range
|
||||
CREATE OR REPLACE FUNCTION search_imdb_meta(search_term TEXT, category_param TEXT DEFAULT NULL, year_param INT DEFAULT NULL, limit_param INT DEFAULT 10, similarity_threshold REAL DEFAULT 0.95)
|
||||
RETURNS TABLE(imdb_id character varying(16), title character varying(1000),category character varying(50),year INT, score REAL) AS $$
|
||||
BEGIN
|
||||
EXECUTE format('SET pg_trgm.similarity_threshold = %L', similarity_threshold);
|
||||
RETURN QUERY
|
||||
SELECT imdb_metadata.imdb_id, imdb_metadata.title, imdb_metadata.category, imdb_metadata.year, similarity(imdb_metadata.title, search_term) as score
|
||||
FROM imdb_metadata
|
||||
WHERE (imdb_metadata.title % search_term)
|
||||
AND (imdb_metadata.adult = FALSE)
|
||||
AND (category_param IS NULL OR imdb_metadata.category = category_param)
|
||||
AND (year_param IS NULL OR imdb_metadata.year BETWEEN year_param - 1 AND year_param + 1)
|
||||
ORDER BY score DESC
|
||||
LIMIT limit_param;
|
||||
END; $$
|
||||
LANGUAGE plpgsql;
|
||||
@@ -0,0 +1,43 @@
|
||||
-- Drop Duplicate Files in Files Table
|
||||
DELETE FROM public.files
|
||||
WHERE id NOT IN (
|
||||
SELECT MAX(id)
|
||||
FROM public.files
|
||||
GROUP BY "infoHash", "fileIndex"
|
||||
);
|
||||
|
||||
-- Add Index to files table
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM pg_constraint
|
||||
WHERE conname = 'files_unique_infohash_fileindex'
|
||||
) THEN
|
||||
ALTER TABLE public.files
|
||||
ADD CONSTRAINT files_unique_infohash_fileindex UNIQUE ("infoHash", "fileIndex");
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
|
||||
-- Drop Duplicate subtitles in Subtitles Table
|
||||
DELETE FROM public.subtitles
|
||||
WHERE id NOT IN (
|
||||
SELECT MAX(id)
|
||||
FROM public.subtitles
|
||||
GROUP BY "infoHash", "fileIndex"
|
||||
);
|
||||
|
||||
-- Add Index to subtitles table
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM pg_constraint
|
||||
WHERE conname = 'subtitles_unique_infohash_fileindex'
|
||||
) THEN
|
||||
ALTER TABLE public.subtitles
|
||||
ADD CONSTRAINT subtitles_unique_infohash_fileindex UNIQUE ("infoHash", "fileIndex");
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
remove-item -recurse -force ../src/python
|
||||
mkdir -p ../src/python
|
||||
pip install --force-reinstall rank-torrent-name==0.1.6 -t ../src/python/
|
||||
pip install -r ../src/requirements.txt -t ../src/python/
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
rm -rf ../src/python
|
||||
mkdir -p ../src/python
|
||||
pip install --force-reinstall rank-torrent-name==0.1.6 -t ../src/python/
|
||||
python3 -m pip install -r ../src/requirements.txt -t ../src/python/
|
||||
@@ -28,7 +28,7 @@
|
||||
},
|
||||
{
|
||||
"Name": "SyncDmmJob",
|
||||
"IntervalSeconds": 1800,
|
||||
"IntervalSeconds": 10800,
|
||||
"Enabled": true
|
||||
},
|
||||
{
|
||||
|
||||
@@ -13,13 +13,19 @@ FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine3.19
|
||||
WORKDIR /app
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
|
||||
|
||||
COPY --from=build /src/out .
|
||||
|
||||
RUN rm -rf /app/python && mkdir -p /app/python
|
||||
RUN pip3 install --force-reinstall rank-torrent-name==0.1.6 -t /app/python
|
||||
|
||||
RUN pip3 install -r /app/requirements.txt -t /app/python
|
||||
|
||||
RUN addgroup -S producer && adduser -S -G producer producer
|
||||
|
||||
USER producer
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||
CMD pgrep -f dotnet || exit 1
|
||||
|
||||
|
||||
70
src/producer/src/Features/Crawlers/Dmm/DMMFileDownloader.cs
Normal file
70
src/producer/src/Features/Crawlers/Dmm/DMMFileDownloader.cs
Normal file
@@ -0,0 +1,70 @@
|
||||
namespace Producer.Features.Crawlers.Dmm;
|
||||
|
||||
public class DMMFileDownloader(HttpClient client, ILogger<DMMFileDownloader> logger) : IDMMFileDownloader
|
||||
{
|
||||
private const string Filename = "main.zip";
|
||||
private readonly IReadOnlyCollection<string> _filesToIgnore = [
|
||||
"index.html",
|
||||
"404.html",
|
||||
"dedupe.sh",
|
||||
"CNAME",
|
||||
];
|
||||
|
||||
public const string ClientName = "DmmFileDownloader";
|
||||
|
||||
public async Task<string> DownloadFileToTempPath(CancellationToken cancellationToken)
|
||||
{
|
||||
logger.LogInformation("Downloading DMM Hashlists");
|
||||
|
||||
var response = await client.GetAsync(Filename, cancellationToken);
|
||||
|
||||
var tempDirectory = Path.Combine(Path.GetTempPath(), "DMMHashlists");
|
||||
|
||||
EnsureDirectoryIsClean(tempDirectory);
|
||||
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken);
|
||||
using var archive = new ZipArchive(stream);
|
||||
|
||||
logger.LogInformation("Extracting DMM Hashlists to {TempDirectory}", tempDirectory);
|
||||
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
var entryPath = Path.Combine(tempDirectory, Path.GetFileName(entry.FullName));
|
||||
if (!entry.FullName.EndsWith('/')) // It's a file
|
||||
{
|
||||
entry.ExtractToFile(entryPath, true);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var file in _filesToIgnore)
|
||||
{
|
||||
CleanRepoExtras(tempDirectory, file);
|
||||
}
|
||||
|
||||
logger.LogInformation("Downloaded and extracted Repository to {TempDirectory}", tempDirectory);
|
||||
|
||||
return tempDirectory;
|
||||
}
|
||||
|
||||
private static void CleanRepoExtras(string tempDirectory, string fileName)
|
||||
{
|
||||
var repoIndex = Path.Combine(tempDirectory, fileName);
|
||||
|
||||
if (File.Exists(repoIndex))
|
||||
{
|
||||
File.Delete(repoIndex);
|
||||
}
|
||||
}
|
||||
|
||||
private static void EnsureDirectoryIsClean(string tempDirectory)
|
||||
{
|
||||
if (Directory.Exists(tempDirectory))
|
||||
{
|
||||
Directory.Delete(tempDirectory, true);
|
||||
}
|
||||
|
||||
Directory.CreateDirectory(tempDirectory);
|
||||
}
|
||||
}
|
||||
6
src/producer/src/Features/Crawlers/Dmm/DMMHttpClient.cs
Normal file
6
src/producer/src/Features/Crawlers/Dmm/DMMHttpClient.cs
Normal file
@@ -0,0 +1,6 @@
|
||||
namespace Producer.Features.Crawlers.Dmm;
|
||||
|
||||
public class DMMHttpClient
|
||||
{
|
||||
|
||||
}
|
||||
@@ -1,67 +1,99 @@
|
||||
using Microsoft.VisualBasic;
|
||||
|
||||
namespace Producer.Features.Crawlers.Dmm;
|
||||
|
||||
public partial class DebridMediaManagerCrawler(
|
||||
IHttpClientFactory httpClientFactory,
|
||||
IDMMFileDownloader dmmFileDownloader,
|
||||
ILogger<DebridMediaManagerCrawler> logger,
|
||||
IDataStorage storage,
|
||||
GithubConfiguration githubConfiguration,
|
||||
IRankTorrentName rankTorrentName,
|
||||
IDistributedCache cache) : BaseCrawler(logger, storage)
|
||||
{
|
||||
[GeneratedRegex("""<iframe src="https:\/\/debridmediamanager.com\/hashlist#(.*)"></iframe>""")]
|
||||
private static partial Regex HashCollectionMatcher();
|
||||
private LengthAwareRatioScorer _lengthAwareRatioScorer = new();
|
||||
|
||||
private const string DownloadBaseUrl = "https://raw.githubusercontent.com/debridmediamanager/hashlists/main";
|
||||
protected override string Url => "";
|
||||
protected override IReadOnlyDictionary<string, string> Mappings => new Dictionary<string, string>();
|
||||
protected override string Url => "https://api.github.com/repos/debridmediamanager/hashlists/git/trees/main?recursive=1";
|
||||
protected override string Source => "DMM";
|
||||
|
||||
private const int ParallelismCount = 4;
|
||||
|
||||
public override async Task Execute()
|
||||
{
|
||||
var client = httpClientFactory.CreateClient("Scraper");
|
||||
client.DefaultRequestHeaders.Authorization = new("Bearer", githubConfiguration.PAT);
|
||||
client.DefaultRequestHeaders.UserAgent.ParseAdd("curl");
|
||||
var tempDirectory = await dmmFileDownloader.DownloadFileToTempPath(CancellationToken.None);
|
||||
|
||||
var jsonBody = await client.GetStringAsync(Url);
|
||||
var files = Directory.GetFiles(tempDirectory, "*.html", SearchOption.AllDirectories);
|
||||
|
||||
var json = JsonDocument.Parse(jsonBody);
|
||||
logger.LogInformation("Found {Files} files to parse", files.Length);
|
||||
|
||||
var entriesArray = json.RootElement.GetProperty("tree");
|
||||
|
||||
logger.LogInformation("Found {Entries} total DMM pages", entriesArray.GetArrayLength());
|
||||
|
||||
foreach (var entry in entriesArray.EnumerateArray())
|
||||
var options = new ParallelOptions { MaxDegreeOfParallelism = ParallelismCount };
|
||||
|
||||
await Parallel.ForEachAsync(files, options, async (file, token) =>
|
||||
{
|
||||
await ParsePage(entry, client);
|
||||
}
|
||||
var fileName = Path.GetFileName(file);
|
||||
var torrentDictionary = await ExtractPageContents(file, fileName);
|
||||
|
||||
if (torrentDictionary == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
await ParseTitlesWithRtn(fileName, torrentDictionary);
|
||||
var results = await ParseTorrents(torrentDictionary);
|
||||
|
||||
if (results.Count <= 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
await InsertTorrents(results);
|
||||
await Storage.MarkPageAsIngested(fileName, token);
|
||||
});
|
||||
}
|
||||
|
||||
private async Task ParsePage(JsonElement entry, HttpClient client)
|
||||
private async Task ParseTitlesWithRtn(string fileName, IDictionary<string, DmmContent> page)
|
||||
{
|
||||
var (pageIngested, name) = await IsAlreadyIngested(entry);
|
||||
logger.LogInformation("Parsing titles for {Page}", fileName);
|
||||
|
||||
if (string.IsNullOrEmpty(name) || pageIngested)
|
||||
var batchProcessables = page.Select(value => new RtnBatchProcessable(value.Key, value.Value.Filename)).ToList();
|
||||
var parsedResponses = rankTorrentName.BatchParse(
|
||||
batchProcessables.Select<RtnBatchProcessable, string>(bp => bp.Filename).ToList(), trashGarbage: false);
|
||||
|
||||
// Filter out unsuccessful responses and match RawTitle to requesting title
|
||||
var successfulResponses = parsedResponses
|
||||
.Where(response => response != null && response.Success)
|
||||
.GroupBy(response => response.Response.RawTitle!)
|
||||
.ToDictionary(group => group.Key, group => group.First());
|
||||
|
||||
var options = new ParallelOptions { MaxDegreeOfParallelism = ParallelismCount };
|
||||
|
||||
await Parallel.ForEachAsync(batchProcessables.Select(t => t.InfoHash), options, (infoHash, _) =>
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var pageSource = await client.GetStringAsync($"{DownloadBaseUrl}/{name}");
|
||||
|
||||
await ExtractPageContents(pageSource, name);
|
||||
if (page.TryGetValue(infoHash, out var dmmContent) &&
|
||||
successfulResponses.TryGetValue(dmmContent.Filename, out var parsedResponse))
|
||||
{
|
||||
page[infoHash] = dmmContent with {ParseResponse = parsedResponse};
|
||||
}
|
||||
|
||||
return ValueTask.CompletedTask;
|
||||
});
|
||||
}
|
||||
|
||||
private async Task ExtractPageContents(string pageSource, string name)
|
||||
private async Task<ConcurrentDictionary<string, DmmContent>?> ExtractPageContents(string filePath, string filenameOnly)
|
||||
{
|
||||
var (pageIngested, name) = await IsAlreadyIngested(filenameOnly);
|
||||
|
||||
if (pageIngested)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
var pageSource = await File.ReadAllTextAsync(filePath);
|
||||
|
||||
var match = HashCollectionMatcher().Match(pageSource);
|
||||
|
||||
if (!match.Success)
|
||||
{
|
||||
logger.LogWarning("Failed to match hash collection for {Name}", name);
|
||||
await Storage.MarkPageAsIngested(name);
|
||||
return;
|
||||
await Storage.MarkPageAsIngested(filenameOnly);
|
||||
return [];
|
||||
}
|
||||
|
||||
var encodedJson = match.Groups.Values.ElementAtOrDefault(1);
|
||||
@@ -69,132 +101,112 @@ public partial class DebridMediaManagerCrawler(
|
||||
if (string.IsNullOrEmpty(encodedJson?.Value))
|
||||
{
|
||||
logger.LogWarning("Failed to extract encoded json for {Name}", name);
|
||||
return;
|
||||
return [];
|
||||
}
|
||||
|
||||
await ProcessExtractedContentsAsTorrentCollection(encodedJson.Value, name);
|
||||
}
|
||||
|
||||
private async Task ProcessExtractedContentsAsTorrentCollection(string encodedJson, string name)
|
||||
{
|
||||
var decodedJson = LZString.DecompressFromEncodedURIComponent(encodedJson);
|
||||
var decodedJson = LZString.DecompressFromEncodedURIComponent(encodedJson.Value);
|
||||
|
||||
var json = JsonDocument.Parse(decodedJson);
|
||||
|
||||
var torrents = await json.RootElement.EnumerateArray()
|
||||
.ToAsyncEnumerable()
|
||||
.Select(ParsePageContent)
|
||||
.Where(t => t is not null)
|
||||
.ToListAsync();
|
||||
|
||||
await InsertTorrentsForPage(json);
|
||||
|
||||
var result = await Storage.MarkPageAsIngested(name);
|
||||
|
||||
if (!result.IsSuccess)
|
||||
if (torrents.Count == 0)
|
||||
{
|
||||
logger.LogWarning("Failed to mark page as ingested: [{Error}]", result.Failure.ErrorMessage);
|
||||
return;
|
||||
logger.LogWarning("No torrents found in {Name}", name);
|
||||
await Storage.MarkPageAsIngested(filenameOnly);
|
||||
return [];
|
||||
}
|
||||
|
||||
var torrentDictionary = torrents
|
||||
.Where(x => x is not null)
|
||||
.GroupBy(x => x.InfoHash)
|
||||
.ToConcurrentDictionary(g => g.Key, g => new DmmContent(g.First().Filename, g.First().Bytes, null));
|
||||
|
||||
logger.LogInformation("Successfully marked page as ingested");
|
||||
logger.LogInformation("Parsed {Torrents} torrents for {Name}", torrentDictionary.Count, name);
|
||||
|
||||
return torrentDictionary;
|
||||
}
|
||||
|
||||
private async Task<IngestedTorrent?> ParseTorrent(JsonElement item)
|
||||
private async Task<List<IngestedTorrent>> ParseTorrents(IDictionary<string, DmmContent> page)
|
||||
{
|
||||
var ingestedTorrents = new List<IngestedTorrent>();
|
||||
|
||||
if (!item.TryGetProperty("filename", out var filenameElement) ||
|
||||
!item.TryGetProperty("bytes", out var bytesElement) ||
|
||||
!item.TryGetProperty("hash", out var hashElement))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
var options = new ParallelOptions { MaxDegreeOfParallelism = ParallelismCount };
|
||||
|
||||
var torrentTitle = filenameElement.GetString();
|
||||
await Parallel.ForEachAsync(page, options, async (kvp, ct) =>
|
||||
{
|
||||
var (infoHash, dmmContent) = kvp;
|
||||
var parsedTorrent = dmmContent.ParseResponse;
|
||||
if (parsedTorrent is not {Success: true})
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (torrentTitle.IsNullOrEmpty())
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var parsedTorrent = rankTorrentName.Parse(torrentTitle.CleanTorrentTitleForImdb());
|
||||
|
||||
if (!parsedTorrent.Success)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var (cached, cachedResult) = await CheckIfInCacheAndReturn(parsedTorrent.ParsedTitle);
|
||||
|
||||
if (cached)
|
||||
{
|
||||
logger.LogInformation("[{ImdbId}] Found cached imdb result for {Title}", cachedResult.ImdbId, parsedTorrent.ParsedTitle);
|
||||
return MapToTorrent(cachedResult, bytesElement, hashElement, parsedTorrent);
|
||||
}
|
||||
var torrentType = parsedTorrent.Response.IsMovie ? "movie" : "tvSeries";
|
||||
var cacheKey = GetCacheKey(torrentType, parsedTorrent.Response.ParsedTitle, parsedTorrent.Response.Year);
|
||||
var (cached, cachedResult) = await CheckIfInCacheAndReturn(cacheKey);
|
||||
|
||||
var year = parsedTorrent.Year != 0 ? parsedTorrent.Year.ToString() : null;
|
||||
var imdbEntries = await Storage.FindImdbMetadata(parsedTorrent.ParsedTitle, parsedTorrent.IsMovie ? "movies" : "tv", year);
|
||||
if (cached)
|
||||
{
|
||||
logger.LogInformation("[{ImdbId}] Found cached imdb result for {Title}", cachedResult.ImdbId, parsedTorrent.Response.ParsedTitle);
|
||||
lock (ingestedTorrents)
|
||||
{
|
||||
ingestedTorrents.Add(MapToTorrent(cachedResult, dmmContent.Bytes, infoHash, parsedTorrent));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (imdbEntries.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var scoredTitles = await ScoreTitles(parsedTorrent, imdbEntries);
|
||||
|
||||
if (!scoredTitles.Success)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
logger.LogInformation("[{ImdbId}] Found best match for {Title}: {BestMatch} with score {Score}", scoredTitles.BestMatch.Value.ImdbId, parsedTorrent.ParsedTitle, scoredTitles.BestMatch.Value.Title, scoredTitles.BestMatch.Score);
|
||||
int? year = parsedTorrent.Response.Year != 0 ? parsedTorrent.Response.Year : null;
|
||||
var imdbEntry = await Storage.FindImdbMetadata(parsedTorrent.Response.ParsedTitle, torrentType, year, ct);
|
||||
|
||||
return MapToTorrent(scoredTitles.BestMatch.Value, bytesElement, hashElement, parsedTorrent);
|
||||
if (imdbEntry is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
await AddToCache(cacheKey, imdbEntry);
|
||||
logger.LogInformation("[{ImdbId}] Found best match for {Title}: {BestMatch} with score {Score}", imdbEntry.ImdbId, parsedTorrent.Response.ParsedTitle, imdbEntry.Title, imdbEntry.Score);
|
||||
lock (ingestedTorrents)
|
||||
{
|
||||
ingestedTorrents.Add(MapToTorrent(imdbEntry, dmmContent.Bytes, infoHash, parsedTorrent));
|
||||
}
|
||||
});
|
||||
|
||||
return ingestedTorrents;
|
||||
}
|
||||
|
||||
private IngestedTorrent MapToTorrent(ImdbEntry result, JsonElement bytesElement, JsonElement hashElement, ParseTorrentTitleResponse parsedTorrent) =>
|
||||
private IngestedTorrent MapToTorrent(ImdbEntry result, long size, string infoHash, ParseTorrentTitleResponse parsedTorrent) =>
|
||||
new()
|
||||
{
|
||||
Source = Source,
|
||||
Name = result.Title,
|
||||
Imdb = result.ImdbId,
|
||||
Size = bytesElement.GetInt64().ToString(),
|
||||
InfoHash = hashElement.ToString(),
|
||||
Size = size.ToString(),
|
||||
InfoHash = infoHash,
|
||||
Seeders = 0,
|
||||
Leechers = 0,
|
||||
Category = parsedTorrent.IsMovie switch
|
||||
{
|
||||
true => "movies",
|
||||
false => "tv",
|
||||
},
|
||||
Category = AssignCategory(result),
|
||||
RtnResponse = parsedTorrent.Response.ToJson(),
|
||||
};
|
||||
|
||||
|
||||
private async Task<(bool Success, ExtractedResult<ImdbEntry>? BestMatch)> ScoreTitles(ParseTorrentTitleResponse parsedTorrent, List<ImdbEntry> imdbEntries)
|
||||
{
|
||||
var lowerCaseTitle = parsedTorrent.ParsedTitle.ToLowerInvariant();
|
||||
|
||||
// Scoring directly operates on the List<ImdbEntry>, no need for lookup table.
|
||||
var scoredResults = Process.ExtractAll(new(){Title = lowerCaseTitle}, imdbEntries, x => x.Title?.ToLowerInvariant(), scorer: _lengthAwareRatioScorer, cutoff: 90);
|
||||
|
||||
var best = scoredResults.MaxBy(x => x.Score);
|
||||
|
||||
if (best is null)
|
||||
{
|
||||
return (false, null);
|
||||
}
|
||||
|
||||
await AddToCache(lowerCaseTitle, best);
|
||||
|
||||
return (true, best);
|
||||
}
|
||||
|
||||
private Task AddToCache(string lowerCaseTitle, ExtractedResult<ImdbEntry> best)
|
||||
private Task AddToCache(string cacheKey, ImdbEntry best)
|
||||
{
|
||||
var cacheOptions = new DistributedCacheEntryOptions
|
||||
{
|
||||
AbsoluteExpirationRelativeToNow = TimeSpan.FromDays(1),
|
||||
};
|
||||
|
||||
return cache.SetStringAsync(lowerCaseTitle, JsonSerializer.Serialize(best.Value), cacheOptions);
|
||||
return cache.SetStringAsync(cacheKey, JsonSerializer.Serialize(best), cacheOptions);
|
||||
}
|
||||
|
||||
private async Task<(bool Success, ImdbEntry? Entry)> CheckIfInCacheAndReturn(string title)
|
||||
private async Task<(bool Success, ImdbEntry? Entry)> CheckIfInCacheAndReturn(string cacheKey)
|
||||
{
|
||||
var cachedImdbId = await cache.GetStringAsync(title.ToLowerInvariant());
|
||||
var cachedImdbId = await cache.GetStringAsync(cacheKey);
|
||||
|
||||
if (!string.IsNullOrEmpty(cachedImdbId))
|
||||
{
|
||||
@@ -204,34 +216,36 @@ public partial class DebridMediaManagerCrawler(
|
||||
return (false, null);
|
||||
}
|
||||
|
||||
private async Task InsertTorrentsForPage(JsonDocument json)
|
||||
private async Task<(bool Success, string? Name)> IsAlreadyIngested(string filename)
|
||||
{
|
||||
var torrents = await json.RootElement.EnumerateArray()
|
||||
.ToAsyncEnumerable()
|
||||
.SelectAwait(async x => await ParseTorrent(x))
|
||||
.Where(t => t is not null)
|
||||
.ToListAsync();
|
||||
var pageIngested = await Storage.PageIngested(filename);
|
||||
|
||||
if (torrents.Count == 0)
|
||||
{
|
||||
logger.LogWarning("No torrents found in {Source} response", Source);
|
||||
return;
|
||||
}
|
||||
|
||||
await InsertTorrents(torrents!);
|
||||
return (pageIngested, filename);
|
||||
}
|
||||
|
||||
private async Task<(bool Success, string? Name)> IsAlreadyIngested(JsonElement entry)
|
||||
|
||||
private static string AssignCategory(ImdbEntry entry) =>
|
||||
entry.Category.ToLower() switch
|
||||
{
|
||||
var category when string.Equals(category, "movie", StringComparison.OrdinalIgnoreCase) => "movies",
|
||||
var category when string.Equals(category, "tvSeries", StringComparison.OrdinalIgnoreCase) => "tv",
|
||||
_ => "unknown",
|
||||
};
|
||||
|
||||
private static string GetCacheKey(string category, string title, int year) => $"{category.ToLowerInvariant()}:{year}:{title.ToLowerInvariant()}";
|
||||
|
||||
private static ExtractedDMMContent? ParsePageContent(JsonElement item)
|
||||
{
|
||||
var name = entry.GetProperty("path").GetString();
|
||||
|
||||
if (string.IsNullOrEmpty(name))
|
||||
if (!item.TryGetProperty("filename", out var filenameElement) ||
|
||||
!item.TryGetProperty("bytes", out var bytesElement) ||
|
||||
!item.TryGetProperty("hash", out var hashElement))
|
||||
{
|
||||
return (false, null);
|
||||
return null;
|
||||
}
|
||||
|
||||
var pageIngested = await Storage.PageIngested(name);
|
||||
|
||||
return (pageIngested, name);
|
||||
|
||||
return new(filenameElement.GetString(), bytesElement.GetInt64(), hashElement.GetString());
|
||||
}
|
||||
|
||||
private record DmmContent(string Filename, long Bytes, ParseTorrentTitleResponse? ParseResponse);
|
||||
private record ExtractedDMMContent(string Filename, long Bytes, string InfoHash);
|
||||
private record RtnBatchProcessable(string InfoHash, string Filename);
|
||||
}
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
namespace Producer.Features.Crawlers.Dmm;
|
||||
|
||||
public class GithubConfiguration
|
||||
{
|
||||
private const string Prefix = "GITHUB";
|
||||
private const string PatVariable = "PAT";
|
||||
|
||||
public string? PAT { get; init; } = Prefix.GetOptionalEnvironmentVariableAsString(PatVariable);
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
namespace Producer.Features.Crawlers.Dmm;
|
||||
|
||||
public interface IDMMFileDownloader
|
||||
{
|
||||
Task<string> DownloadFileToTempPath(CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
namespace Producer.Features.Crawlers.Dmm;
|
||||
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection AddDmmSupport(this IServiceCollection services)
|
||||
{
|
||||
services.AddHttpClient<IDMMFileDownloader, DMMFileDownloader>(DMMFileDownloader.ClientName, client =>
|
||||
{
|
||||
client.BaseAddress = new("https://github.com/debridmediamanager/hashlists/zipball/main/");
|
||||
client.DefaultRequestHeaders.Add("Accept-Encoding", "gzip");
|
||||
client.DefaultRequestHeaders.UserAgent.ParseAdd("curl");
|
||||
});
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,6 @@ internal static class ServiceCollectionExtensions
|
||||
internal static IServiceCollection AddQuartz(this IServiceCollection services, IConfiguration configuration)
|
||||
{
|
||||
var scrapeConfiguration = services.LoadConfigurationFromConfig<ScrapeConfiguration>(configuration, ScrapeConfiguration.SectionName);
|
||||
var githubConfiguration = services.LoadConfigurationFromEnv<GithubConfiguration>();
|
||||
var rabbitConfiguration = services.LoadConfigurationFromEnv<RabbitMqConfiguration>();
|
||||
|
||||
var jobTypes = Assembly.GetAssembly(typeof(BaseJob))
|
||||
@@ -19,18 +18,13 @@ internal static class ServiceCollectionExtensions
|
||||
services.AddTransient(type);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(githubConfiguration.PAT))
|
||||
{
|
||||
services.AddTransient<SyncDmmJob>();
|
||||
}
|
||||
|
||||
var openMethod = typeof(ServiceCollectionExtensions).GetMethod(nameof(AddJobWithTrigger), BindingFlags.NonPublic | BindingFlags.Static | BindingFlags.Instance);
|
||||
|
||||
services.AddQuartz(
|
||||
quartz =>
|
||||
{
|
||||
RegisterAutomaticRegistrationJobs(jobTypes, openMethod, quartz, scrapeConfiguration);
|
||||
RegisterDmmJob(githubConfiguration, quartz, scrapeConfiguration);
|
||||
RegisterDmmJob(quartz, scrapeConfiguration);
|
||||
RegisterTorrentioJob(services, quartz, configuration, scrapeConfiguration);
|
||||
RegisterPublisher(quartz, rabbitConfiguration);
|
||||
});
|
||||
@@ -64,13 +58,8 @@ internal static class ServiceCollectionExtensions
|
||||
}
|
||||
}
|
||||
|
||||
private static void RegisterDmmJob(GithubConfiguration githubConfiguration, IServiceCollectionQuartzConfigurator quartz, ScrapeConfiguration scrapeConfiguration)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(githubConfiguration.PAT))
|
||||
{
|
||||
AddJobWithTrigger<SyncDmmJob>(quartz, SyncDmmJob.Key, SyncDmmJob.Trigger, scrapeConfiguration);
|
||||
}
|
||||
}
|
||||
private static void RegisterDmmJob(IServiceCollectionQuartzConfigurator quartz, ScrapeConfiguration scrapeConfiguration) =>
|
||||
AddJobWithTrigger<SyncDmmJob>(quartz, SyncDmmJob.Key, SyncDmmJob.Trigger, scrapeConfiguration);
|
||||
|
||||
private static void RegisterTorrentioJob(
|
||||
IServiceCollection services,
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
// Global using directives
|
||||
|
||||
global using System.Collections.Concurrent;
|
||||
global using System.IO.Compression;
|
||||
global using System.Reflection;
|
||||
global using System.Text;
|
||||
global using System.Text.Json;
|
||||
global using System.Text.RegularExpressions;
|
||||
global using System.Xml.Linq;
|
||||
global using FuzzySharp;
|
||||
global using FuzzySharp.Extractor;
|
||||
global using FuzzySharp.PreProcess;
|
||||
global using FuzzySharp.SimilarityRatio.Scorer;
|
||||
global using FuzzySharp.SimilarityRatio.Scorer.StrategySensitive;
|
||||
|
||||
@@ -33,6 +33,9 @@
|
||||
<None Include="Configuration\*.json">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Update="requirements.txt">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup Condition="'$(Configuration)' == 'Debug'">
|
||||
|
||||
@@ -12,7 +12,8 @@ builder.Services
|
||||
.RegisterMassTransit()
|
||||
.AddDataStorage()
|
||||
.AddCrawlers()
|
||||
.AddDmmSupport()
|
||||
.AddQuartz(builder.Configuration);
|
||||
|
||||
var app = builder.Build();
|
||||
app.Run();
|
||||
app.Run();
|
||||
1
src/producer/src/requirements.txt
Normal file
1
src/producer/src/requirements.txt
Normal file
@@ -0,0 +1 @@
|
||||
rank-torrent-name==0.2.13
|
||||
@@ -9,12 +9,23 @@ RUN dotnet restore -a $TARGETARCH
|
||||
RUN dotnet publish -c Release --no-restore -o /src/out -a $TARGETARCH
|
||||
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine3.19
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
|
||||
|
||||
COPY --from=build /src/out .
|
||||
|
||||
RUN rm -rf /app/python && mkdir -p /app/python
|
||||
|
||||
RUN pip3 install -r /app/requirements.txt -t /app/python
|
||||
|
||||
RUN addgroup -S qbit && adduser -S -G qbit qbit
|
||||
USER qbit
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||
CMD pgrep -f dotnet || exit 1
|
||||
ENV PYTHONNET_PYDLL=/usr/lib/libpython3.11.so.1.0
|
||||
ENTRYPOINT ["dotnet", "QBitCollector.dll"]
|
||||
|
||||
@@ -13,11 +13,13 @@ public static class ServiceCollectionExtensions
|
||||
internal static IServiceCollection AddServiceConfiguration(this IServiceCollection services)
|
||||
{
|
||||
services.AddQBitTorrentClient();
|
||||
services.AddSingleton<IParseTorrentTitle, ParseTorrentTitle>();
|
||||
services.RegisterPythonEngine();
|
||||
services.AddSingleton<IRankTorrentName, RankTorrentName>();
|
||||
services.AddSingleton<QbitRequestProcessor>();
|
||||
services.AddHttpClient();
|
||||
services.AddSingleton<ITrackersService, TrackersService>();
|
||||
services.AddHostedService<TrackersBackgroundService>();
|
||||
services.AddHostedService<HousekeepingBackgroundService>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
namespace QBitCollector.Features.Qbit;
|
||||
|
||||
public class HousekeepingBackgroundService(IQBittorrentClient client, ILogger<HousekeepingBackgroundService> logger) : BackgroundService
|
||||
{
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
logger.LogInformation("Service is Running.");
|
||||
|
||||
await DoWork();
|
||||
|
||||
using PeriodicTimer timer = new(TimeSpan.FromMinutes(2));
|
||||
|
||||
try
|
||||
{
|
||||
while (await timer.WaitForNextTickAsync(stoppingToken))
|
||||
{
|
||||
await DoWork();
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
logger.LogInformation("Service stopping.");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task DoWork()
|
||||
{
|
||||
try
|
||||
{
|
||||
logger.LogInformation("Cleaning Stale Entries in Qbit...");
|
||||
|
||||
var torrents = await client.GetTorrentListAsync();
|
||||
|
||||
foreach (var torrentInfo in torrents)
|
||||
{
|
||||
if (!(torrentInfo.AddedOn < DateTimeOffset.UtcNow.AddMinutes(-1)))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
logger.LogInformation("Torrent [{InfoHash}] Identified as stale because was added at {AddedOn}", torrentInfo.Hash, torrentInfo.AddedOn);
|
||||
|
||||
await client.DeleteAsync(new[] {torrentInfo.Hash}, deleteDownloadedData: true);
|
||||
logger.LogInformation("Cleaned up stale torrent: [{InfoHash}]", torrentInfo.Hash);
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
logger.LogError(e, "Error cleaning up stale torrents this interval.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,10 +3,11 @@ namespace QBitCollector.Features.Worker;
|
||||
public static class QbitMetaToTorrentMeta
|
||||
{
|
||||
public static IReadOnlyList<TorrentFile> MapMetadataToFilesCollection(
|
||||
IParseTorrentTitle torrentTitle,
|
||||
IRankTorrentName rankTorrentName,
|
||||
Torrent torrent,
|
||||
string ImdbId,
|
||||
IReadOnlyList<TorrentContent> Metadata)
|
||||
IReadOnlyList<TorrentContent> Metadata,
|
||||
ILogger<WriteQbitMetadataConsumer> logger)
|
||||
{
|
||||
try
|
||||
{
|
||||
@@ -24,23 +25,31 @@ public static class QbitMetaToTorrentMeta
|
||||
Size = metadataEntry.Size,
|
||||
};
|
||||
|
||||
var parsedTitle = torrentTitle.Parse(file.Title);
|
||||
var parsedTitle = rankTorrentName.Parse(file.Title, false);
|
||||
|
||||
if (!parsedTitle.Success)
|
||||
{
|
||||
logger.LogWarning("Failed to parse title {Title} for metadata mapping", file.Title);
|
||||
continue;
|
||||
}
|
||||
|
||||
file.ImdbSeason = parsedTitle.Seasons.FirstOrDefault();
|
||||
file.ImdbEpisode = parsedTitle.Episodes.FirstOrDefault();
|
||||
file.ImdbSeason = parsedTitle.Response?.Season?.FirstOrDefault() ?? 0;
|
||||
file.ImdbEpisode = parsedTitle.Response?.Episode?.FirstOrDefault() ?? 0;
|
||||
|
||||
files.Add(file);
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
catch (Exception)
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning("Failed to map metadata to files collection: {Exception}", ex.Message);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task<IReadOnlyList<SubtitleFile>> MapMetadataToSubtitlesCollection(IDataStorage storage, string InfoHash, IReadOnlyList<TorrentContent> Metadata)
|
||||
public static async Task<IReadOnlyList<SubtitleFile>> MapMetadataToSubtitlesCollection(IDataStorage storage, string InfoHash, IReadOnlyList<TorrentContent> Metadata,
|
||||
ILogger<WriteQbitMetadataConsumer> logger)
|
||||
{
|
||||
try
|
||||
{
|
||||
@@ -70,8 +79,9 @@ public static class QbitMetaToTorrentMeta
|
||||
|
||||
return files;
|
||||
}
|
||||
catch (Exception)
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning("Failed to map metadata to subtitles collection: {Exception}", ex.Message);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,6 +53,12 @@ public class QbitMetadataSagaStateMachine : MassTransitStateMachine<QbitMetadata
|
||||
.Then(
|
||||
context =>
|
||||
{
|
||||
if (!context.Message.WithFiles)
|
||||
{
|
||||
logger.LogInformation("No files written for torrent {InfoHash} in Saga {SagaId}", context.Saga.Torrent.InfoHash, context.Saga.CorrelationId);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogInformation("Metadata Written for torrent {InfoHash} in Saga {SagaId}", context.Saga.Torrent.InfoHash, context.Saga.CorrelationId);
|
||||
})
|
||||
.TransitionTo(Completed)
|
||||
|
||||
@@ -1,22 +1,24 @@
|
||||
namespace QBitCollector.Features.Worker;
|
||||
|
||||
[EntityName("perform-metadata-request")]
|
||||
[EntityName("perform-metadata-request-qbit-collector")]
|
||||
public record PerformQbitMetadataRequest(Guid CorrelationId, string InfoHash) : CorrelatedBy<Guid>;
|
||||
|
||||
[EntityName("torrent-metadata-response")]
|
||||
[EntityName("torrent-metadata-response-qbit-collector")]
|
||||
public record GotQbitMetadata(QBitMetadataResponse Metadata) : CorrelatedBy<Guid>
|
||||
{
|
||||
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;
|
||||
}
|
||||
|
||||
[EntityName("write-metadata")]
|
||||
[EntityName("write-metadata-qbit-collector")]
|
||||
public record WriteQbitMetadata(Torrent Torrent, QBitMetadataResponse Metadata, string ImdbId) : CorrelatedBy<Guid>
|
||||
{
|
||||
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;
|
||||
}
|
||||
|
||||
[EntityName("metadata-written")]
|
||||
public record QbitMetadataWritten(QBitMetadataResponse Metadata) : CorrelatedBy<Guid>
|
||||
[EntityName("metadata-written-qbit-collector")]
|
||||
public record QbitMetadataWritten(QBitMetadataResponse Metadata, bool WithFiles) : CorrelatedBy<Guid>
|
||||
{
|
||||
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;
|
||||
|
||||
public QBitMetadataResponse Metadata { get; init; } = Metadata;
|
||||
}
|
||||
@@ -1,25 +1,30 @@
|
||||
namespace QBitCollector.Features.Worker;
|
||||
|
||||
public class WriteQbitMetadataConsumer(IParseTorrentTitle parseTorrentTitle, IDataStorage dataStorage) : IConsumer<WriteQbitMetadata>
|
||||
public class WriteQbitMetadataConsumer(IRankTorrentName rankTorrentName, IDataStorage dataStorage, ILogger<WriteQbitMetadataConsumer> logger) : IConsumer<WriteQbitMetadata>
|
||||
{
|
||||
public async Task Consume(ConsumeContext<WriteQbitMetadata> context)
|
||||
{
|
||||
var request = context.Message;
|
||||
|
||||
var torrentFiles = QbitMetaToTorrentMeta.MapMetadataToFilesCollection(parseTorrentTitle, request.Torrent, request.ImdbId, request.Metadata.Metadata);
|
||||
|
||||
if (torrentFiles.Any())
|
||||
var torrentFiles = QbitMetaToTorrentMeta.MapMetadataToFilesCollection(
|
||||
rankTorrentName, request.Torrent, request.ImdbId, request.Metadata.Metadata, logger);
|
||||
|
||||
if (!torrentFiles.Any())
|
||||
{
|
||||
await dataStorage.InsertFiles(torrentFiles);
|
||||
|
||||
var subtitles = await QbitMetaToTorrentMeta.MapMetadataToSubtitlesCollection(dataStorage, request.Torrent.InfoHash, request.Metadata.Metadata);
|
||||
|
||||
if (subtitles.Any())
|
||||
{
|
||||
await dataStorage.InsertSubtitles(subtitles);
|
||||
}
|
||||
await context.Publish(new QbitMetadataWritten(request.Metadata, false));
|
||||
return;
|
||||
}
|
||||
|
||||
await context.Publish(new QbitMetadataWritten(request.Metadata));
|
||||
|
||||
await dataStorage.InsertFiles(torrentFiles);
|
||||
|
||||
var subtitles = await QbitMetaToTorrentMeta.MapMetadataToSubtitlesCollection(
|
||||
dataStorage, request.Torrent.InfoHash, request.Metadata.Metadata, logger);
|
||||
|
||||
if (subtitles.Any())
|
||||
{
|
||||
await dataStorage.InsertSubtitles(subtitles);
|
||||
}
|
||||
|
||||
await context.Publish(new QbitMetadataWritten(request.Metadata, true));
|
||||
}
|
||||
}
|
||||
@@ -1,17 +1,11 @@
|
||||
// Global using directives
|
||||
|
||||
global using System.Text.Json;
|
||||
global using System.Text.Json.Serialization;
|
||||
global using System.Threading.Channels;
|
||||
global using MassTransit;
|
||||
global using MassTransit.Mediator;
|
||||
global using Microsoft.AspNetCore.Builder;
|
||||
global using Microsoft.Extensions.Caching.Distributed;
|
||||
global using Microsoft.Extensions.Caching.Memory;
|
||||
global using Microsoft.Extensions.DependencyInjection;
|
||||
global using Polly;
|
||||
global using Polly.Extensions.Http;
|
||||
global using PromKnight.ParseTorrentTitle;
|
||||
global using QBitCollector.Extensions;
|
||||
global using QBitCollector.Features.Qbit;
|
||||
global using QBitCollector.Features.Trackers;
|
||||
@@ -21,4 +15,6 @@ global using SharedContracts.Configuration;
|
||||
global using SharedContracts.Dapper;
|
||||
global using SharedContracts.Extensions;
|
||||
global using SharedContracts.Models;
|
||||
global using SharedContracts.Python;
|
||||
global using SharedContracts.Python.RTN;
|
||||
global using SharedContracts.Requests;
|
||||
@@ -18,7 +18,6 @@
|
||||
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="8.0.3" />
|
||||
<PackageReference Include="Polly" Version="8.3.1" />
|
||||
<PackageReference Include="PromKnight.ParseTorrentTitle" Version="1.0.4" />
|
||||
<PackageReference Include="QBittorrent.Client" Version="1.9.23349.1" />
|
||||
<PackageReference Include="Serilog" Version="3.1.1" />
|
||||
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
|
||||
@@ -31,10 +30,30 @@
|
||||
<None Include="Configuration\logging.json">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<Content Remove="eng\**" />
|
||||
<None Remove="eng\**" />
|
||||
<None Update="requirements.txt">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\shared\SharedContracts.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup Condition="'$(Configuration)' == 'Debug'">
|
||||
<Content Remove="python\**" />
|
||||
<None Include="python\**">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Compile Remove="eng\**" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Remove="eng\**" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -6,6 +6,12 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "shared", "shared", "{2C0A0F
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "QBitCollector", "QBitCollector.csproj", "{1EF124BE-6EBE-4D9E-846C-FFF814999F3B}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "eng", "eng", "{2F2EA33A-1303-405D-939B-E9394D262BC9}"
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
eng\install-python-reqs.ps1 = eng\install-python-reqs.ps1
|
||||
eng\install-python-reqs.sh = eng\install-python-reqs.sh
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
|
||||
3
src/qbit-collector/eng/install-python-reqs.ps1
Normal file
3
src/qbit-collector/eng/install-python-reqs.ps1
Normal file
@@ -0,0 +1,3 @@
|
||||
Remove-Item -Recurse -Force ../python
|
||||
mkdir -p ../python
|
||||
python -m pip install -r ../requirements.txt -t ../python/
|
||||
5
src/qbit-collector/eng/install-python-reqs.sh
Normal file
5
src/qbit-collector/eng/install-python-reqs.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
rm -rf ../python
|
||||
mkdir -p ../python
|
||||
python3 -m pip install -r ../requirements.txt -t ../python/
|
||||
1
src/qbit-collector/requirements.txt
Normal file
1
src/qbit-collector/requirements.txt
Normal file
@@ -0,0 +1 @@
|
||||
rank-torrent-name==0.2.13
|
||||
@@ -9,9 +9,9 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
|
||||
const string query =
|
||||
"""
|
||||
INSERT INTO ingested_torrents
|
||||
("name", "source", "category", "info_hash", "size", "seeders", "leechers", "imdb", "processed", "createdAt", "updatedAt")
|
||||
("name", "source", "category", "info_hash", "size", "seeders", "leechers", "imdb", "processed", "createdAt", "updatedAt", "rtn_response")
|
||||
VALUES
|
||||
(@Name, @Source, @Category, @InfoHash, @Size, @Seeders, @Leechers, @Imdb, @Processed, @CreatedAt, @UpdatedAt)
|
||||
(@Name, @Source, @Category, @InfoHash, @Size, @Seeders, @Leechers, @Imdb, @Processed, @CreatedAt, @UpdatedAt, @RtnResponse::jsonb)
|
||||
ON CONFLICT (source, info_hash) DO NOTHING
|
||||
""";
|
||||
|
||||
@@ -110,21 +110,21 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
|
||||
public async Task<List<ImdbEntry>> GetImdbEntriesForRequests(int year, int batchSize, string? stateLastProcessedImdbId, CancellationToken cancellationToken = default) =>
|
||||
await ExecuteCommandAsync(async connection =>
|
||||
{
|
||||
const string query = @"SELECT imdb_id AS ImdbId, title as Title, category as Category, year as Year, adult as Adult FROM imdb_metadata WHERE CAST(NULLIF(Year, '\N') AS INTEGER) <= @Year AND imdb_id > @LastProcessedImdbId ORDER BY ImdbId LIMIT @BatchSize";
|
||||
const string query = @"SELECT imdb_id AS ImdbId, title as Title, category as Category, year as Year, adult as Adult FROM imdb_metadata WHERE Year <= @Year AND imdb_id > @LastProcessedImdbId ORDER BY ImdbId LIMIT @BatchSize";
|
||||
var result = await connection.QueryAsync<ImdbEntry>(query, new { Year = year, LastProcessedImdbId = stateLastProcessedImdbId, BatchSize = batchSize });
|
||||
return result.ToList();
|
||||
}, "Error getting imdb metadata.", cancellationToken);
|
||||
|
||||
public async Task<List<ImdbEntry>> FindImdbMetadata(string? parsedTorrentTitle, string torrentType, string? year, CancellationToken cancellationToken = default) =>
|
||||
public async Task<ImdbEntry?> FindImdbMetadata(string? parsedTorrentTitle, string torrentType, int? year, CancellationToken cancellationToken = default) =>
|
||||
await ExecuteCommandAsync(async connection =>
|
||||
{
|
||||
var query = $"select \"imdb_id\" as \"ImdbId\", \"title\" as \"Title\", \"year\" as \"Year\" from search_imdb_meta('{parsedTorrentTitle.Replace("'", "").Replace("\"", "")}', '{(torrentType.Equals("movie", StringComparison.OrdinalIgnoreCase) ? "movie" : "tvSeries")}'";
|
||||
query += year is not null ? $", '{year}'" : ", NULL";
|
||||
query += ", 15)";
|
||||
var query = $"select \"imdb_id\" as \"ImdbId\", \"title\" as \"Title\", \"year\" as \"Year\", \"score\" as Score, \"category\" as Category from search_imdb_meta('{parsedTorrentTitle.Replace("'", "").Replace("\"", "")}', '{torrentType}'";
|
||||
query += year is not null ? $", {year}" : ", NULL";
|
||||
query += ", 1)";
|
||||
|
||||
var result = await connection.QueryAsync<ImdbEntry>(query);
|
||||
|
||||
return result.ToList();
|
||||
var results = result.ToList();
|
||||
return results.FirstOrDefault();
|
||||
}, "Error finding imdb metadata.", cancellationToken);
|
||||
|
||||
public Task InsertTorrent(Torrent torrent, CancellationToken cancellationToken = default) =>
|
||||
@@ -134,9 +134,9 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
|
||||
const string query =
|
||||
"""
|
||||
INSERT INTO "torrents"
|
||||
("infoHash", "provider", "torrentId", "title", "size", "type", "uploadDate", "seeders", "trackers", "languages", "resolution", "reviewed", "opened", "createdAt", "updatedAt")
|
||||
("infoHash", "ingestedTorrentId", "provider", "title", "size", "type", "uploadDate", "seeders", "languages", "resolution", "reviewed", "opened", "createdAt", "updatedAt")
|
||||
VALUES
|
||||
(@InfoHash, @Provider, @TorrentId, @Title, 0, @Type, NOW(), @Seeders, NULL, NULL, NULL, false, false, NOW(), NOW())
|
||||
(@InfoHash, @IngestedTorrentId, @Provider, @Title, 0, @Type, NOW(), @Seeders, NULL, NULL, false, false, NOW(), NOW())
|
||||
ON CONFLICT ("infoHash") DO NOTHING
|
||||
""";
|
||||
|
||||
@@ -152,7 +152,8 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
|
||||
INSERT INTO files
|
||||
("infoHash", "fileIndex", title, "size", "imdbId", "imdbSeason", "imdbEpisode", "kitsuId", "kitsuEpisode", "createdAt", "updatedAt")
|
||||
VALUES
|
||||
(@InfoHash, @FileIndex, @Title, @Size, @ImdbId, @ImdbSeason, @ImdbEpisode, @KitsuId, @KitsuEpisode, Now(), Now());
|
||||
(@InfoHash, @FileIndex, @Title, @Size, @ImdbId, @ImdbSeason, @ImdbEpisode, @KitsuId, @KitsuEpisode, Now(), Now())
|
||||
ON CONFLICT ("infoHash", "fileIndex") DO NOTHING;
|
||||
""";
|
||||
|
||||
await connection.ExecuteAsync(query, files);
|
||||
@@ -168,11 +169,7 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
|
||||
("infoHash", "fileIndex", "fileId", "title")
|
||||
VALUES
|
||||
(@InfoHash, @FileIndex, @FileId, @Title)
|
||||
ON CONFLICT
|
||||
("infoHash", "fileIndex")
|
||||
DO UPDATE SET
|
||||
"fileId" = COALESCE(subtitles."fileId", EXCLUDED."fileId"),
|
||||
"title" = COALESCE(subtitles."title", EXCLUDED."title");
|
||||
ON CONFLICT ("infoHash", "fileIndex") DO NOTHING;
|
||||
""";
|
||||
|
||||
await connection.ExecuteAsync(query, subtitles);
|
||||
|
||||
@@ -9,7 +9,7 @@ public interface IDataStorage
|
||||
Task<DapperResult<PageIngestedResult, PageIngestedResult>> MarkPageAsIngested(string pageId, CancellationToken cancellationToken = default);
|
||||
Task<DapperResult<int, int>> GetRowCountImdbMetadata(CancellationToken cancellationToken = default);
|
||||
Task<List<ImdbEntry>> GetImdbEntriesForRequests(int year, int batchSize, string? stateLastProcessedImdbId, CancellationToken cancellationToken = default);
|
||||
Task<List<ImdbEntry>> FindImdbMetadata(string? parsedTorrentTitle, string parsedTorrentTorrentType, string? parsedTorrentYear, CancellationToken cancellationToken = default);
|
||||
Task<ImdbEntry?> FindImdbMetadata(string? parsedTorrentTitle, string parsedTorrentTorrentType, int? parsedTorrentYear, CancellationToken cancellationToken = default);
|
||||
Task InsertTorrent(Torrent torrent, CancellationToken cancellationToken = default);
|
||||
Task InsertFiles(IEnumerable<TorrentFile> files, CancellationToken cancellationToken = default);
|
||||
Task InsertSubtitles(IEnumerable<SubtitleFile> subtitles, CancellationToken cancellationToken = default);
|
||||
|
||||
19
src/shared/Extensions/DictionaryExtensions.cs
Normal file
19
src/shared/Extensions/DictionaryExtensions.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
namespace SharedContracts.Extensions;
|
||||
|
||||
public static class DictionaryExtensions
|
||||
{
|
||||
public static ConcurrentDictionary<TKey, TValue> ToConcurrentDictionary<TSource, TKey, TValue>(
|
||||
this IEnumerable<TSource> source,
|
||||
Func<TSource, TKey> keySelector,
|
||||
Func<TSource, TValue> valueSelector) where TKey : notnull
|
||||
{
|
||||
var concurrentDictionary = new ConcurrentDictionary<TKey, TValue>();
|
||||
|
||||
foreach (var element in source)
|
||||
{
|
||||
concurrentDictionary.TryAdd(keySelector(element), valueSelector(element));
|
||||
}
|
||||
|
||||
return concurrentDictionary;
|
||||
}
|
||||
}
|
||||
14
src/shared/Extensions/JsonExtensions.cs
Normal file
14
src/shared/Extensions/JsonExtensions.cs
Normal file
@@ -0,0 +1,14 @@
|
||||
namespace SharedContracts.Extensions;
|
||||
|
||||
public static class JsonExtensions
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonSerializerOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
ReferenceHandler = ReferenceHandler.IgnoreCycles,
|
||||
NumberHandling = JsonNumberHandling.Strict,
|
||||
};
|
||||
|
||||
public static string AsJson<T>(this T obj) => JsonSerializer.Serialize(obj, JsonSerializerOptions);
|
||||
}
|
||||
@@ -1,5 +1,3 @@
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace SharedContracts.Extensions;
|
||||
|
||||
public static partial class StringExtensions
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
// Global using directives
|
||||
|
||||
global using System.Collections.Concurrent;
|
||||
global using System.Text.Json;
|
||||
global using System.Text.Json.Serialization;
|
||||
global using System.Text.RegularExpressions;
|
||||
global using Dapper;
|
||||
global using MassTransit;
|
||||
global using Microsoft.AspNetCore.Builder;
|
||||
@@ -14,4 +17,4 @@ global using Python.Runtime;
|
||||
global using Serilog;
|
||||
global using SharedContracts.Configuration;
|
||||
global using SharedContracts.Extensions;
|
||||
global using SharedContracts.Models;
|
||||
global using SharedContracts.Models;
|
||||
|
||||
@@ -7,4 +7,5 @@ public class ImdbEntry
|
||||
public string? Category { get; set; }
|
||||
public string? Year { get; set; }
|
||||
public bool? Adult { get; set; }
|
||||
public decimal? Score { get; set; }
|
||||
}
|
||||
|
||||
@@ -12,7 +12,9 @@ public class IngestedTorrent
|
||||
public int Leechers { get; set; }
|
||||
public string? Imdb { get; set; }
|
||||
|
||||
public bool Processed { get; set; } = false;
|
||||
public bool Processed { get; set; }
|
||||
public DateTime CreatedAt { get; set; } = DateTime.UtcNow;
|
||||
public DateTime UpdatedAt { get; set; } = DateTime.UtcNow;
|
||||
|
||||
public string? RtnResponse { get; set; }
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ namespace SharedContracts.Models;
|
||||
public class Torrent
|
||||
{
|
||||
public string? InfoHash { get; set; }
|
||||
public long? IngestedTorrentId { get; set; }
|
||||
public string? Provider { get; set; }
|
||||
public string? TorrentId { get; set; }
|
||||
public string? Title { get; set; }
|
||||
|
||||
13
src/shared/Python/IPythonEngineService.cs
Normal file
13
src/shared/Python/IPythonEngineService.cs
Normal file
@@ -0,0 +1,13 @@
|
||||
namespace SharedContracts.Python;
|
||||
|
||||
public interface IPythonEngineService
|
||||
{
|
||||
ILogger<PythonEngineService> Logger { get; }
|
||||
|
||||
Task InitializePythonEngine(CancellationToken cancellationToken);
|
||||
T ExecuteCommandOrScript<T>(string command, PyModule module, bool throwOnErrors);
|
||||
T ExecutePythonOperation<T>(Func<T> operation, string operationName, bool throwOnErrors);
|
||||
T ExecutePythonOperationWithDefault<T>(Func<T> operation, T? defaultValue, string operationName, bool throwOnErrors, bool logErrors);
|
||||
Task StopPythonEngine(CancellationToken cancellationToken);
|
||||
dynamic? Sys { get; }
|
||||
}
|
||||
8
src/shared/Python/PythonEngineManager.cs
Normal file
8
src/shared/Python/PythonEngineManager.cs
Normal file
@@ -0,0 +1,8 @@
|
||||
namespace SharedContracts.Python;
|
||||
|
||||
public class PythonEngineManager(IPythonEngineService pythonEngineService) : IHostedService
|
||||
{
|
||||
public Task StartAsync(CancellationToken cancellationToken) => pythonEngineService.InitializePythonEngine(cancellationToken);
|
||||
|
||||
public Task StopAsync(CancellationToken cancellationToken) => pythonEngineService.StopPythonEngine(cancellationToken);
|
||||
}
|
||||
@@ -1,24 +1,28 @@
|
||||
namespace SharedContracts.Python;
|
||||
|
||||
public class PythonEngineService(ILogger<PythonEngineService> logger) : IHostedService
|
||||
public class PythonEngineService(ILogger<PythonEngineService> logger) : IPythonEngineService
|
||||
{
|
||||
private IntPtr _mainThreadState;
|
||||
private bool _isInitialized;
|
||||
|
||||
public Task StartAsync(CancellationToken cancellationToken)
|
||||
|
||||
public ILogger<PythonEngineService> Logger { get; } = logger;
|
||||
|
||||
public dynamic? Sys { get; private set; }
|
||||
|
||||
public Task InitializePythonEngine(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_isInitialized)
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
|
||||
try
|
||||
{
|
||||
var pythonDllEnv = Environment.GetEnvironmentVariable("PYTHONNET_PYDLL");
|
||||
|
||||
|
||||
if (string.IsNullOrWhiteSpace(pythonDllEnv))
|
||||
{
|
||||
logger.LogWarning("PYTHONNET_PYDLL env is not set. Exiting Application");
|
||||
Logger.LogWarning("PYTHONNET_PYDLL env is not set. Exiting Application");
|
||||
Environment.Exit(1);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
@@ -26,24 +30,95 @@ public class PythonEngineService(ILogger<PythonEngineService> logger) : IHostedS
|
||||
Runtime.PythonDLL = pythonDllEnv;
|
||||
PythonEngine.Initialize();
|
||||
_mainThreadState = PythonEngine.BeginAllowThreads();
|
||||
|
||||
|
||||
_isInitialized = true;
|
||||
logger.LogInformation("Python engine initialized");
|
||||
Logger.LogInformation("Python engine initialized");
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
logger.LogWarning(e, "Failed to initialize Python engine");
|
||||
Logger.LogError(e, $"Failed to initialize Python engine: {e.Message}");
|
||||
Environment.Exit(1);
|
||||
}
|
||||
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task StopAsync(CancellationToken cancellationToken)
|
||||
public T ExecuteCommandOrScript<T>(string command, PyModule module, bool throwOnErrors) =>
|
||||
ExecutePythonOperation(
|
||||
() =>
|
||||
{
|
||||
var pyCompile = PythonEngine.Compile(command);
|
||||
var nativeResult = module.Execute(pyCompile);
|
||||
return nativeResult.As<T>();
|
||||
}, nameof(ExecuteCommandOrScript), throwOnErrors);
|
||||
|
||||
public T ExecutePythonOperation<T>(Func<T> operation, string operationName, bool throwOnErrors) =>
|
||||
ExecutePythonOperationWithDefault(operation, default, operationName, throwOnErrors, true);
|
||||
|
||||
public T ExecutePythonOperationWithDefault<T>(Func<T> operation, T? defaultValue, string operationName, bool throwOnErrors, bool logErrors) =>
|
||||
ExecutePythonOperationInternal(operation, defaultValue, operationName, throwOnErrors, logErrors);
|
||||
|
||||
public void ExecuteOnGIL(Action act, bool throwOnErrors)
|
||||
{
|
||||
Sys ??= LoadSys();
|
||||
|
||||
try
|
||||
{
|
||||
using var gil = Py.GIL();
|
||||
act();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Logger.LogError(ex, "Python Error: {Message} ({OperationName})", ex.Message, nameof(ExecuteOnGIL));
|
||||
|
||||
if (throwOnErrors)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Task StopPythonEngine(CancellationToken cancellationToken)
|
||||
{
|
||||
PythonEngine.EndAllowThreads(_mainThreadState);
|
||||
PythonEngine.Shutdown();
|
||||
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private static dynamic LoadSys()
|
||||
{
|
||||
using var gil = Py.GIL();
|
||||
var sys = Py.Import("sys");
|
||||
|
||||
return sys;
|
||||
}
|
||||
|
||||
// ReSharper disable once EntityNameCapturedOnly.Local
|
||||
private T ExecutePythonOperationInternal<T>(Func<T> operation, T? defaultValue, string operationName, bool throwOnErrors, bool logErrors)
|
||||
{
|
||||
Sys ??= LoadSys();
|
||||
|
||||
var result = defaultValue;
|
||||
|
||||
try
|
||||
{
|
||||
using var gil = Py.GIL();
|
||||
result = operation();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
if (logErrors)
|
||||
{
|
||||
Logger.LogError(ex, "Python Error: {Message} ({OperationName})", ex.Message, nameof(operationName));
|
||||
}
|
||||
|
||||
if (throwOnErrors)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,6 @@ namespace SharedContracts.Python.RTN;
|
||||
|
||||
public interface IRankTorrentName
|
||||
{
|
||||
ParseTorrentTitleResponse Parse(string title);
|
||||
bool IsTrash(string title);
|
||||
bool TitleMatch(string title, string checkTitle);
|
||||
ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true, bool logErrors = false, bool throwOnErrors = false);
|
||||
List<ParseTorrentTitleResponse?> BatchParse(IReadOnlyCollection<string> titles, int chunkSize = 500, int workers = 20, bool trashGarbage = true, bool logErrors = false, bool throwOnErrors = false);
|
||||
}
|
||||
@@ -1,6 +1,3 @@
|
||||
namespace SharedContracts.Python.RTN;
|
||||
|
||||
public record ParseTorrentTitleResponse(bool Success, string ParsedTitle, int Year, int[]? Season = null, int[]? Episode = null)
|
||||
{
|
||||
public bool IsMovie => Season == null && Episode == null;
|
||||
}
|
||||
public record ParseTorrentTitleResponse(bool Success, RtnResponse? Response);
|
||||
@@ -2,117 +2,119 @@ namespace SharedContracts.Python.RTN;
|
||||
|
||||
public class RankTorrentName : IRankTorrentName
|
||||
{
|
||||
private const string SysModuleName = "sys";
|
||||
private readonly IPythonEngineService _pythonEngineService;
|
||||
private const string RtnModuleName = "RTN";
|
||||
|
||||
private readonly ILogger<RankTorrentName> _logger;
|
||||
private dynamic? _sys;
|
||||
private dynamic? _rtn;
|
||||
|
||||
public RankTorrentName(ILogger<RankTorrentName> logger)
|
||||
public RankTorrentName(IPythonEngineService pythonEngineService)
|
||||
{
|
||||
_logger = logger;
|
||||
_pythonEngineService = pythonEngineService;
|
||||
InitModules();
|
||||
}
|
||||
|
||||
|
||||
public ParseTorrentTitleResponse Parse(string title)
|
||||
public ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true, bool logErrors = false, bool throwOnErrors = false)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var py = Py.GIL();
|
||||
var result = _rtn?.parse(title);
|
||||
|
||||
if (result == null)
|
||||
{
|
||||
return new(false, string.Empty, 0);
|
||||
}
|
||||
|
||||
using var gil = Py.GIL();
|
||||
var result = _rtn?.parse(title, trashGarbage);
|
||||
return ParseResult(result);
|
||||
}
|
||||
catch (Exception e)
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(e, "Failed to parse title");
|
||||
return new(false, string.Empty, 0);
|
||||
if (logErrors)
|
||||
{
|
||||
_pythonEngineService.Logger.LogError(ex, "Python Error: {Message} ({OperationName})", ex.Message, nameof(Parse));
|
||||
}
|
||||
|
||||
if (throwOnErrors)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
|
||||
return new(false, null);
|
||||
}
|
||||
}
|
||||
|
||||
public List<ParseTorrentTitleResponse?> BatchParse(IReadOnlyCollection<string> titles, int chunkSize = 500, int workers = 20, bool trashGarbage = true, bool logErrors = false, bool throwOnErrors = false)
|
||||
{
|
||||
var responses = new List<ParseTorrentTitleResponse?>();
|
||||
|
||||
public bool IsTrash(string title)
|
||||
try
|
||||
{
|
||||
if (titles.Count == 0)
|
||||
{
|
||||
return responses;
|
||||
}
|
||||
|
||||
using var gil = Py.GIL();
|
||||
var pythonList = new PyList(titles.Select(x => new PyString(x).As<PyObject>()).ToArray());
|
||||
PyList results = _rtn?.batch_parse(pythonList, trashGarbage, chunkSize, workers);
|
||||
|
||||
if (results == null)
|
||||
{
|
||||
return responses;
|
||||
}
|
||||
|
||||
responses.AddRange(results.Select(ParseResult));
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
if (logErrors)
|
||||
{
|
||||
_pythonEngineService.Logger.LogError(ex, "Python Error: {Message} ({OperationName})", ex.Message, nameof(Parse));
|
||||
}
|
||||
|
||||
if (throwOnErrors)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
return responses;
|
||||
}
|
||||
|
||||
private static ParseTorrentTitleResponse? ParseResult(dynamic result)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var py = Py.GIL();
|
||||
var result = _rtn?.check_trash(title);
|
||||
|
||||
if (result == null)
|
||||
{
|
||||
return false;
|
||||
return new(false, null);
|
||||
}
|
||||
|
||||
var response = result.As<bool>() ?? false;
|
||||
|
||||
return response;
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
_logger.LogError(e, "Failed to parse title");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public bool TitleMatch(string title, string checkTitle)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var py = Py.GIL();
|
||||
var result = _rtn?.title_match(title, checkTitle);
|
||||
var json = result.model_dump_json()?.As<string?>();
|
||||
|
||||
if (result == null)
|
||||
if (json is null || string.IsNullOrEmpty(json))
|
||||
{
|
||||
return false;
|
||||
return new(false, null);
|
||||
}
|
||||
|
||||
var response = result.As<bool>() ?? false;
|
||||
|
||||
return response;
|
||||
var mediaType = result.GetAttr("type")?.As<string>();
|
||||
|
||||
if (string.IsNullOrEmpty(mediaType))
|
||||
{
|
||||
return new(false, null);
|
||||
}
|
||||
|
||||
var response = JsonSerializer.Deserialize<RtnResponse>(json);
|
||||
|
||||
response.IsMovie = mediaType.Equals("movie", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
return new(true, response);
|
||||
}
|
||||
catch (Exception e)
|
||||
catch
|
||||
{
|
||||
_logger.LogError(e, "Failed to parse title");
|
||||
return false;
|
||||
return new(false, null);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static ParseTorrentTitleResponse ParseResult(dynamic result)
|
||||
{
|
||||
var parsedTitle = result.GetAttr("parsed_title")?.As<string>() ?? string.Empty;
|
||||
var year = result.GetAttr("year")?.As<int>() ?? 0;
|
||||
var seasonList = result.GetAttr("season")?.As<PyList>();
|
||||
var episodeList = result.GetAttr("episode")?.As<PyList>();
|
||||
int[]? seasons = seasonList?.Length() > 0 ? seasonList.As<int[]>() : null;
|
||||
int[]? episodes = episodeList?.Length() > 0 ? episodeList.As<int[]>() : null;
|
||||
|
||||
return new ParseTorrentTitleResponse(true, parsedTitle, year, seasons, episodes);
|
||||
}
|
||||
|
||||
private void InitModules()
|
||||
{
|
||||
using var py = Py.GIL();
|
||||
_sys = Py.Import(SysModuleName);
|
||||
|
||||
if (_sys == null)
|
||||
private void InitModules() =>
|
||||
_rtn =
|
||||
_pythonEngineService.ExecutePythonOperation(() =>
|
||||
{
|
||||
_logger.LogError($"Failed to import Python module: {SysModuleName}");
|
||||
return;
|
||||
}
|
||||
|
||||
_sys.path.append(Path.Combine(AppContext.BaseDirectory, "python"));
|
||||
|
||||
_rtn = Py.Import(RtnModuleName);
|
||||
if (_rtn == null)
|
||||
{
|
||||
_logger.LogError($"Failed to import Python module: {RtnModuleName}");
|
||||
}
|
||||
}
|
||||
_pythonEngineService.Sys.path.append(Path.Combine(AppContext.BaseDirectory, "python"));
|
||||
return Py.Import(RtnModuleName);
|
||||
}, nameof(InitModules), throwOnErrors: false);
|
||||
}
|
||||
83
src/shared/Python/RTN/RtnResponse.cs
Normal file
83
src/shared/Python/RTN/RtnResponse.cs
Normal file
@@ -0,0 +1,83 @@
|
||||
namespace SharedContracts.Python.RTN;
|
||||
|
||||
public class RtnResponse
|
||||
{
|
||||
[JsonPropertyName("raw_title")]
|
||||
public string? RawTitle { get; set; }
|
||||
|
||||
[JsonPropertyName("parsed_title")]
|
||||
public string? ParsedTitle { get; set; }
|
||||
|
||||
[JsonPropertyName("fetch")]
|
||||
public bool Fetch { get; set; }
|
||||
|
||||
[JsonPropertyName("is_4k")]
|
||||
public bool Is4K { get; set; }
|
||||
|
||||
[JsonPropertyName("is_multi_audio")]
|
||||
public bool IsMultiAudio { get; set; }
|
||||
|
||||
[JsonPropertyName("is_multi_subtitle")]
|
||||
public bool IsMultiSubtitle { get; set; }
|
||||
|
||||
[JsonPropertyName("is_complete")]
|
||||
public bool IsComplete { get; set; }
|
||||
|
||||
[JsonPropertyName("year")]
|
||||
public int Year { get; set; }
|
||||
|
||||
[JsonPropertyName("resolution")]
|
||||
public List<string>? Resolution { get; set; }
|
||||
|
||||
[JsonPropertyName("quality")]
|
||||
public List<string>? Quality { get; set; }
|
||||
|
||||
[JsonPropertyName("season")]
|
||||
public List<int>? Season { get; set; }
|
||||
|
||||
[JsonPropertyName("episode")]
|
||||
public List<int>? Episode { get; set; }
|
||||
|
||||
[JsonPropertyName("codec")]
|
||||
public List<string>? Codec { get; set; }
|
||||
|
||||
[JsonPropertyName("audio")]
|
||||
public List<string>? Audio { get; set; }
|
||||
|
||||
[JsonPropertyName("subtitles")]
|
||||
public List<string>? Subtitles { get; set; }
|
||||
|
||||
[JsonPropertyName("language")]
|
||||
public List<string>? Language { get; set; }
|
||||
|
||||
[JsonPropertyName("bit_depth")]
|
||||
public List<int>? BitDepth { get; set; }
|
||||
|
||||
[JsonPropertyName("hdr")]
|
||||
public string? Hdr { get; set; }
|
||||
|
||||
[JsonPropertyName("proper")]
|
||||
public bool Proper { get; set; }
|
||||
|
||||
[JsonPropertyName("repack")]
|
||||
public bool Repack { get; set; }
|
||||
|
||||
[JsonPropertyName("remux")]
|
||||
public bool Remux { get; set; }
|
||||
|
||||
[JsonPropertyName("upscaled")]
|
||||
public bool Upscaled { get; set; }
|
||||
|
||||
[JsonPropertyName("remastered")]
|
||||
public bool Remastered { get; set; }
|
||||
|
||||
[JsonPropertyName("directors_cut")]
|
||||
public bool DirectorsCut { get; set; }
|
||||
|
||||
[JsonPropertyName("extended")]
|
||||
public bool Extended { get; set; }
|
||||
|
||||
public bool IsMovie { get; set; }
|
||||
|
||||
public string ToJson() => this.AsJson();
|
||||
}
|
||||
@@ -4,9 +4,8 @@ public static class ServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection RegisterPythonEngine(this IServiceCollection services)
|
||||
{
|
||||
services.AddSingleton<PythonEngineService>();
|
||||
|
||||
services.AddHostedService(p => p.GetRequiredService<PythonEngineService>());
|
||||
services.AddSingleton<IPythonEngineService, PythonEngineService>();
|
||||
services.AddHostedService<PythonEngineManager>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
@@ -82,11 +82,4 @@ public static class ServiceCollectionExtensions
|
||||
|
||||
x.AddConsumer<PerformIngestionConsumer>();
|
||||
}
|
||||
|
||||
internal static IServiceCollection AddServiceConfiguration(this IServiceCollection services)
|
||||
{
|
||||
services.AddSingleton<IParseTorrentTitle, ParseTorrentTitle>();
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ public class PerformIngestionConsumer(IDataStorage dataStorage, ILogger<PerformI
|
||||
var torrent = new Torrent
|
||||
{
|
||||
InfoHash = request.IngestedTorrent.InfoHash.ToLowerInvariant(),
|
||||
IngestedTorrentId = request.IngestedTorrent.Id,
|
||||
Provider = request.IngestedTorrent.Source,
|
||||
Title = request.IngestedTorrent.Name,
|
||||
Type = request.IngestedTorrent.Category,
|
||||
|
||||
@@ -5,7 +5,6 @@ global using MassTransit;
|
||||
global using MassTransit.Mediator;
|
||||
global using Microsoft.AspNetCore.Builder;
|
||||
global using Microsoft.Extensions.DependencyInjection;
|
||||
global using PromKnight.ParseTorrentTitle;
|
||||
global using SharedContracts.Configuration;
|
||||
global using SharedContracts.Dapper;
|
||||
global using SharedContracts.Extensions;
|
||||
|
||||
@@ -10,7 +10,6 @@ builder.Host
|
||||
|
||||
builder.Services
|
||||
.RegisterMassTransit()
|
||||
.AddServiceConfiguration()
|
||||
.AddDatabase();
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="8.0.3" />
|
||||
<PackageReference Include="Polly" Version="8.3.1" />
|
||||
<PackageReference Include="PromKnight.ParseTorrentTitle" Version="1.0.4" />
|
||||
<PackageReference Include="Serilog" Version="3.1.1" />
|
||||
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
|
||||
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />
|
||||
|
||||
Reference in New Issue
Block a user