[wip] bridge python and c# and bring in rank torrent name (#177)
* [wip] bridge python and c# and bring in rank torrent name * Container restores package now Includes two dev scripts to install the python packages locally for debugging purposes. * Introduce slightly turned title matching scoring, by making it length aware this should help with sequels such as Terminator 2, vs Terminator etc * Version bump Also fixes postgres healthcheck so that it utilises the user from the stack.env file
This commit is contained in:
@@ -17,12 +17,8 @@ services:
|
|||||||
## All downloaded metadata is stored in this database.
|
## All downloaded metadata is stored in this database.
|
||||||
postgres:
|
postgres:
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
environment:
|
|
||||||
PGUSER: ${POSTGRES_USER}
|
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test:
|
test: [ "CMD", "sh", "-c", "pg_isready -h localhost -U $$POSTGRES_USER" ]
|
||||||
- CMD-SHELL
|
|
||||||
- pg_isready
|
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
interval: 10s
|
interval: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
@@ -44,9 +40,7 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test:
|
test: ["CMD-SHELL", "redis-cli ping"]
|
||||||
- CMD-SHELL
|
|
||||||
- redis-cli ping
|
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
interval: 10s
|
interval: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
@@ -66,9 +60,7 @@ services:
|
|||||||
rabbitmq:
|
rabbitmq:
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test:
|
test: ["CMD-SHELL", "rabbitmq-diagnostics -q ping"]
|
||||||
- CMD-SHELL
|
|
||||||
- rabbitmq-diagnostics -q ping
|
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
interval: 10s
|
interval: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
@@ -91,22 +83,17 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
metadata:
|
metadata:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
required: true
|
|
||||||
migrator:
|
migrator:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
required: true
|
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
rabbitmq:
|
rabbitmq:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
hostname: knightcrawler-addon
|
hostname: knightcrawler-addon
|
||||||
image: gabisonfire/knightcrawler-addon:2.0.7
|
image: gabisonfire/knightcrawler-addon:2.0.8
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -120,21 +107,16 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
metadata:
|
metadata:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
required: true
|
|
||||||
migrator:
|
migrator:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
required: true
|
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
rabbitmq:
|
rabbitmq:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-consumer:2.0.7
|
image: gabisonfire/knightcrawler-consumer:2.0.8
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -146,21 +128,16 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
metadata:
|
metadata:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
required: true
|
|
||||||
migrator:
|
migrator:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
required: true
|
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
rabbitmq:
|
rabbitmq:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-debrid-collector:2.0.7
|
image: gabisonfire/knightcrawler-debrid-collector:2.0.8
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -173,9 +150,8 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
migrator:
|
migrator:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
required: true
|
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-metadata:2.0.7
|
image: gabisonfire/knightcrawler-metadata:2.0.8
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
restart: "no"
|
restart: "no"
|
||||||
@@ -185,9 +161,8 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-migrator:2.0.7
|
image: gabisonfire/knightcrawler-migrator:2.0.8
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
restart: "no"
|
restart: "no"
|
||||||
@@ -197,21 +172,16 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
metadata:
|
metadata:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
required: true
|
|
||||||
migrator:
|
migrator:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
required: true
|
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
rabbitmq:
|
rabbitmq:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-producer:2.0.7
|
image: gabisonfire/knightcrawler-producer:2.0.8
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -223,11 +193,10 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
qbittorrent:
|
qbittorrent:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
required: true
|
|
||||||
deploy:
|
deploy:
|
||||||
replicas: ${QBIT_REPLICAS:-0}
|
replicas: ${QBIT_REPLICAS:-0}
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-qbit-collector:2.0.7
|
image: gabisonfire/knightcrawler-qbit-collector:2.0.8
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -246,9 +215,7 @@ services:
|
|||||||
TORRENTING_PORT: "6881"
|
TORRENTING_PORT: "6881"
|
||||||
WEBUI_PORT: "8080"
|
WEBUI_PORT: "8080"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test:
|
test: ["CMD-SHELL", "curl --fail http://localhost:8080"]
|
||||||
- CMD-SHELL
|
|
||||||
- curl --fail http://localhost:8080
|
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
interval: 10s
|
interval: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ x-redishealth: &redis-health
|
|||||||
<<: *base-health
|
<<: *base-health
|
||||||
|
|
||||||
x-postgreshealth: &postgresdb-health
|
x-postgreshealth: &postgresdb-health
|
||||||
test: pg_isready
|
test: [ "CMD", "sh", "-c", "pg_isready -h localhost -U $$POSTGRES_USER" ]
|
||||||
<<: *base-health
|
<<: *base-health
|
||||||
|
|
||||||
x-qbit: &qbit-health
|
x-qbit: &qbit-health
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ x-depends: &knightcrawler-app-depends
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
metadata:
|
metadata:
|
||||||
image: gabisonfire/knightcrawler-metadata:2.0.7
|
image: gabisonfire/knightcrawler-metadata:2.0.8
|
||||||
env_file: ../../.env
|
env_file: ../../.env
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
@@ -30,7 +30,7 @@ services:
|
|||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
|
|
||||||
migrator:
|
migrator:
|
||||||
image: gabisonfire/knightcrawler-migrator:2.0.7
|
image: gabisonfire/knightcrawler-migrator:2.0.8
|
||||||
env_file: ../../.env
|
env_file: ../../.env
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
@@ -40,7 +40,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
||||||
addon:
|
addon:
|
||||||
image: gabisonfire/knightcrawler-addon:2.0.7
|
image: gabisonfire/knightcrawler-addon:2.0.8
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
hostname: knightcrawler-addon
|
hostname: knightcrawler-addon
|
||||||
@@ -48,22 +48,22 @@ services:
|
|||||||
- "7000:7000"
|
- "7000:7000"
|
||||||
|
|
||||||
consumer:
|
consumer:
|
||||||
image: gabisonfire/knightcrawler-consumer:2.0.7
|
image: gabisonfire/knightcrawler-consumer:2.0.8
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
debridcollector:
|
debridcollector:
|
||||||
image: gabisonfire/knightcrawler-debrid-collector:2.0.7
|
image: gabisonfire/knightcrawler-debrid-collector:2.0.8
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
producer:
|
producer:
|
||||||
image: gabisonfire/knightcrawler-producer:2.0.7
|
image: gabisonfire/knightcrawler-producer:2.0.8
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
qbitcollector:
|
qbitcollector:
|
||||||
image: gabisonfire/knightcrawler-qbit-collector:2.0.7
|
image: gabisonfire/knightcrawler-qbit-collector:2.0.8
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
|
|||||||
2
src/producer/.dockerignore
Normal file
2
src/producer/.dockerignore
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
**/python/
|
||||||
|
.idea/
|
||||||
@@ -6,6 +6,12 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharedContracts", "..\share
|
|||||||
EndProject
|
EndProject
|
||||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "shared", "shared", "{FF5CA857-51E8-4446-8840-2A1D24ED3952}"
|
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "shared", "shared", "{FF5CA857-51E8-4446-8840-2A1D24ED3952}"
|
||||||
EndProject
|
EndProject
|
||||||
|
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "eng", "eng", "{1AE7F597-24C4-4575-B59F-67A625D95C1E}"
|
||||||
|
ProjectSection(SolutionItems) = preProject
|
||||||
|
eng\install-python-reqs.ps1 = eng\install-python-reqs.ps1
|
||||||
|
eng\install-python-reqs.sh = eng\install-python-reqs.sh
|
||||||
|
EndProjectSection
|
||||||
|
EndProject
|
||||||
Global
|
Global
|
||||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||||
Debug|Any CPU = Debug|Any CPU
|
Debug|Any CPU = Debug|Any CPU
|
||||||
|
|||||||
2
src/producer/eng/install-python-reqs.ps1
Normal file
2
src/producer/eng/install-python-reqs.ps1
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
mkdir -p ../src/python
|
||||||
|
pip install --force-reinstall rank-torrent-name==0.1.6 -t ../src/python/
|
||||||
4
src/producer/eng/install-python-reqs.sh
Normal file
4
src/producer/eng/install-python-reqs.sh
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
mkdir -p ../src/python
|
||||||
|
pip install --force-reinstall rank-torrent-name==0.1.6 -t ../src/python/
|
||||||
2
src/producer/src/.dockerignore
Normal file
2
src/producer/src/.dockerignore
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
**/python/
|
||||||
|
.idea/
|
||||||
@@ -8,13 +8,21 @@ WORKDIR /src/producer/src
|
|||||||
RUN dotnet restore -a $TARGETARCH
|
RUN dotnet restore -a $TARGETARCH
|
||||||
RUN dotnet publish -c Release --no-restore -o /src/out -a $TARGETARCH
|
RUN dotnet publish -c Release --no-restore -o /src/out -a $TARGETARCH
|
||||||
|
|
||||||
|
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine3.19
|
||||||
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
|
||||||
|
|
||||||
COPY --from=build /src/out .
|
COPY --from=build /src/out .
|
||||||
|
RUN rm -rf /app/python && mkdir -p /app/python
|
||||||
|
RUN pip3 install --force-reinstall rank-torrent-name==0.1.6 -t /app/python
|
||||||
RUN addgroup -S producer && adduser -S -G producer producer
|
RUN addgroup -S producer && adduser -S -G producer producer
|
||||||
USER producer
|
USER producer
|
||||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||||
CMD pgrep -f dotnet || exit 1
|
CMD pgrep -f dotnet || exit 1
|
||||||
|
|
||||||
|
ENV PYTHONNET_PYDLL=/usr/lib/libpython3.11.so.1.0
|
||||||
|
|
||||||
ENTRYPOINT ["dotnet", "Producer.dll"]
|
ENTRYPOINT ["dotnet", "Producer.dll"]
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
using Microsoft.VisualBasic;
|
||||||
|
|
||||||
namespace Producer.Features.Crawlers.Dmm;
|
namespace Producer.Features.Crawlers.Dmm;
|
||||||
|
|
||||||
public partial class DebridMediaManagerCrawler(
|
public partial class DebridMediaManagerCrawler(
|
||||||
@@ -5,11 +7,12 @@ public partial class DebridMediaManagerCrawler(
|
|||||||
ILogger<DebridMediaManagerCrawler> logger,
|
ILogger<DebridMediaManagerCrawler> logger,
|
||||||
IDataStorage storage,
|
IDataStorage storage,
|
||||||
GithubConfiguration githubConfiguration,
|
GithubConfiguration githubConfiguration,
|
||||||
IParseTorrentTitle parseTorrentTitle,
|
IRankTorrentName rankTorrentName,
|
||||||
IDistributedCache cache) : BaseCrawler(logger, storage)
|
IDistributedCache cache) : BaseCrawler(logger, storage)
|
||||||
{
|
{
|
||||||
[GeneratedRegex("""<iframe src="https:\/\/debridmediamanager.com\/hashlist#(.*)"></iframe>""")]
|
[GeneratedRegex("""<iframe src="https:\/\/debridmediamanager.com\/hashlist#(.*)"></iframe>""")]
|
||||||
private static partial Regex HashCollectionMatcher();
|
private static partial Regex HashCollectionMatcher();
|
||||||
|
private LengthAwareRatioScorer _lengthAwareRatioScorer = new();
|
||||||
|
|
||||||
private const string DownloadBaseUrl = "https://raw.githubusercontent.com/debridmediamanager/hashlists/main";
|
private const string DownloadBaseUrl = "https://raw.githubusercontent.com/debridmediamanager/hashlists/main";
|
||||||
protected override IReadOnlyDictionary<string, string> Mappings => new Dictionary<string, string>();
|
protected override IReadOnlyDictionary<string, string> Mappings => new Dictionary<string, string>();
|
||||||
@@ -107,74 +110,65 @@ public partial class DebridMediaManagerCrawler(
|
|||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
var parsedTorrent = parseTorrentTitle.Parse(torrentTitle.CleanTorrentTitleForImdb());
|
|
||||||
|
|
||||||
var (cached, cachedResult) = await CheckIfInCacheAndReturn(parsedTorrent.Title);
|
var parsedTorrent = rankTorrentName.Parse(torrentTitle.CleanTorrentTitleForImdb());
|
||||||
|
|
||||||
if (cached)
|
if (!parsedTorrent.Success)
|
||||||
{
|
|
||||||
logger.LogInformation("[{ImdbId}] Found cached imdb result for {Title}", cachedResult.ImdbId, parsedTorrent.Title);
|
|
||||||
return new()
|
|
||||||
{
|
|
||||||
Source = Source,
|
|
||||||
Name = cachedResult.Title,
|
|
||||||
Imdb = cachedResult.ImdbId,
|
|
||||||
Size = bytesElement.GetInt64().ToString(),
|
|
||||||
InfoHash = hashElement.ToString(),
|
|
||||||
Seeders = 0,
|
|
||||||
Leechers = 0,
|
|
||||||
Category = parsedTorrent.TorrentType switch
|
|
||||||
{
|
|
||||||
TorrentType.Movie => "movies",
|
|
||||||
TorrentType.Tv => "tv",
|
|
||||||
_ => "unknown",
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
var imdbEntry = await Storage.FindImdbMetadata(parsedTorrent.Title, parsedTorrent.TorrentType, parsedTorrent.Year);
|
|
||||||
|
|
||||||
if (imdbEntry.Count == 0)
|
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
var scoredTitles = await ScoreTitles(parsedTorrent, imdbEntry);
|
var (cached, cachedResult) = await CheckIfInCacheAndReturn(parsedTorrent.ParsedTitle);
|
||||||
|
|
||||||
|
if (cached)
|
||||||
|
{
|
||||||
|
logger.LogInformation("[{ImdbId}] Found cached imdb result for {Title}", cachedResult.ImdbId, parsedTorrent.ParsedTitle);
|
||||||
|
return MapToTorrent(cachedResult, bytesElement, hashElement, parsedTorrent);
|
||||||
|
}
|
||||||
|
|
||||||
|
var year = parsedTorrent.Year != 0 ? parsedTorrent.Year.ToString() : null;
|
||||||
|
var imdbEntries = await Storage.FindImdbMetadata(parsedTorrent.ParsedTitle, parsedTorrent.IsMovie ? "movies" : "tv", year);
|
||||||
|
|
||||||
|
if (imdbEntries.Count == 0)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var scoredTitles = await ScoreTitles(parsedTorrent, imdbEntries);
|
||||||
|
|
||||||
if (!scoredTitles.Success)
|
if (!scoredTitles.Success)
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.LogInformation("[{ImdbId}] Found best match for {Title}: {BestMatch} with score {Score}", scoredTitles.BestMatch.Value.ImdbId, parsedTorrent.Title, scoredTitles.BestMatch.Value.Title, scoredTitles.BestMatch.Score);
|
logger.LogInformation("[{ImdbId}] Found best match for {Title}: {BestMatch} with score {Score}", scoredTitles.BestMatch.Value.ImdbId, parsedTorrent.ParsedTitle, scoredTitles.BestMatch.Value.Title, scoredTitles.BestMatch.Score);
|
||||||
|
|
||||||
var torrent = new IngestedTorrent
|
return MapToTorrent(scoredTitles.BestMatch.Value, bytesElement, hashElement, parsedTorrent);
|
||||||
|
}
|
||||||
|
|
||||||
|
private IngestedTorrent MapToTorrent(ImdbEntry result, JsonElement bytesElement, JsonElement hashElement, ParseTorrentTitleResponse parsedTorrent) =>
|
||||||
|
new()
|
||||||
{
|
{
|
||||||
Source = Source,
|
Source = Source,
|
||||||
Name = scoredTitles.BestMatch.Value.Title,
|
Name = result.Title,
|
||||||
Imdb = scoredTitles.BestMatch.Value.ImdbId,
|
Imdb = result.ImdbId,
|
||||||
Size = bytesElement.GetInt64().ToString(),
|
Size = bytesElement.GetInt64().ToString(),
|
||||||
InfoHash = hashElement.ToString(),
|
InfoHash = hashElement.ToString(),
|
||||||
Seeders = 0,
|
Seeders = 0,
|
||||||
Leechers = 0,
|
Leechers = 0,
|
||||||
Category = parsedTorrent.TorrentType switch
|
Category = parsedTorrent.IsMovie switch
|
||||||
{
|
{
|
||||||
TorrentType.Movie => "movies",
|
true => "movies",
|
||||||
TorrentType.Tv => "tv",
|
false => "tv",
|
||||||
_ => "unknown",
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
return torrent;
|
private async Task<(bool Success, ExtractedResult<ImdbEntry>? BestMatch)> ScoreTitles(ParseTorrentTitleResponse parsedTorrent, List<ImdbEntry> imdbEntries)
|
||||||
}
|
|
||||||
|
|
||||||
private async Task<(bool Success, ExtractedResult<ImdbEntry>? BestMatch)> ScoreTitles(TorrentMetadata parsedTorrent, List<ImdbEntry> imdbEntries)
|
|
||||||
{
|
{
|
||||||
var lowerCaseTitle = parsedTorrent.Title.ToLowerInvariant();
|
var lowerCaseTitle = parsedTorrent.ParsedTitle.ToLowerInvariant();
|
||||||
|
|
||||||
// Scoring directly operates on the List<ImdbEntry>, no need for lookup table.
|
// Scoring directly operates on the List<ImdbEntry>, no need for lookup table.
|
||||||
var scoredResults = Process.ExtractAll(new(){Title = lowerCaseTitle}, imdbEntries, x => x.Title?.ToLowerInvariant(), scorer: new DefaultRatioScorer(), cutoff: 90);
|
var scoredResults = Process.ExtractAll(new(){Title = lowerCaseTitle}, imdbEntries, x => x.Title?.ToLowerInvariant(), scorer: _lengthAwareRatioScorer, cutoff: 90);
|
||||||
|
|
||||||
var best = scoredResults.MaxBy(x => x.Score);
|
var best = scoredResults.MaxBy(x => x.Score);
|
||||||
|
|
||||||
@@ -192,7 +186,7 @@ public partial class DebridMediaManagerCrawler(
|
|||||||
{
|
{
|
||||||
var cacheOptions = new DistributedCacheEntryOptions
|
var cacheOptions = new DistributedCacheEntryOptions
|
||||||
{
|
{
|
||||||
AbsoluteExpirationRelativeToNow = TimeSpan.FromMinutes(15),
|
AbsoluteExpirationRelativeToNow = TimeSpan.FromDays(1),
|
||||||
};
|
};
|
||||||
|
|
||||||
return cache.SetStringAsync(lowerCaseTitle, JsonSerializer.Serialize(best.Value), cacheOptions);
|
return cache.SetStringAsync(lowerCaseTitle, JsonSerializer.Serialize(best.Value), cacheOptions);
|
||||||
|
|||||||
@@ -0,0 +1,24 @@
|
|||||||
|
namespace Producer.Features.DataProcessing
|
||||||
|
{
|
||||||
|
public class LengthAwareRatioScorer : IRatioScorer
|
||||||
|
{
|
||||||
|
private readonly IRatioScorer _defaultScorer = new DefaultRatioScorer();
|
||||||
|
|
||||||
|
public int Score(string input1, string input2)
|
||||||
|
{
|
||||||
|
var score = _defaultScorer.Score(input1, input2);
|
||||||
|
var lengthRatio = (double)Math.Min(input1.Length, input2.Length) / Math.Max(input1.Length, input2.Length);
|
||||||
|
var result = (int)(score * lengthRatio);
|
||||||
|
return result > 100 ? 100 : result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int Score(string input1, string input2, PreprocessMode preprocessMode)
|
||||||
|
{
|
||||||
|
var score = _defaultScorer.Score(input1, input2, preprocessMode);
|
||||||
|
var lengthRatio = (double)Math.Min(input1.Length, input2.Length) / Math.Max(input1.Length, input2.Length);
|
||||||
|
var result = (int)(score * lengthRatio);
|
||||||
|
|
||||||
|
return result > 100 ? 100 : result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,7 +9,8 @@ internal static class ServiceCollectionExtensions
|
|||||||
|
|
||||||
services.AddTransient<IDataStorage, DapperDataStorage>();
|
services.AddTransient<IDataStorage, DapperDataStorage>();
|
||||||
services.AddTransient<IMessagePublisher, TorrentPublisher>();
|
services.AddTransient<IMessagePublisher, TorrentPublisher>();
|
||||||
services.AddSingleton<IParseTorrentTitle, ParseTorrentTitle>();
|
services.RegisterPythonEngine();
|
||||||
|
services.AddSingleton<IRankTorrentName, RankTorrentName>();
|
||||||
services.AddStackExchangeRedisCache(options =>
|
services.AddStackExchangeRedisCache(options =>
|
||||||
{
|
{
|
||||||
options.Configuration = redisConfiguration.ConnectionString;
|
options.Configuration = redisConfiguration.ConnectionString;
|
||||||
|
|||||||
@@ -7,6 +7,8 @@ global using System.Text.RegularExpressions;
|
|||||||
global using System.Xml.Linq;
|
global using System.Xml.Linq;
|
||||||
global using FuzzySharp;
|
global using FuzzySharp;
|
||||||
global using FuzzySharp.Extractor;
|
global using FuzzySharp.Extractor;
|
||||||
|
global using FuzzySharp.PreProcess;
|
||||||
|
global using FuzzySharp.SimilarityRatio.Scorer;
|
||||||
global using FuzzySharp.SimilarityRatio.Scorer.StrategySensitive;
|
global using FuzzySharp.SimilarityRatio.Scorer.StrategySensitive;
|
||||||
global using LZStringCSharp;
|
global using LZStringCSharp;
|
||||||
global using MassTransit;
|
global using MassTransit;
|
||||||
@@ -23,11 +25,10 @@ global using Producer.Features.Crawlers.Torrentio;
|
|||||||
global using Producer.Features.CrawlerSupport;
|
global using Producer.Features.CrawlerSupport;
|
||||||
global using Producer.Features.DataProcessing;
|
global using Producer.Features.DataProcessing;
|
||||||
global using Producer.Features.JobSupport;
|
global using Producer.Features.JobSupport;
|
||||||
global using PromKnight.ParseTorrentTitle;
|
|
||||||
global using Serilog;
|
|
||||||
global using SharedContracts.Configuration;
|
global using SharedContracts.Configuration;
|
||||||
global using SharedContracts.Dapper;
|
global using SharedContracts.Dapper;
|
||||||
global using SharedContracts.Extensions;
|
global using SharedContracts.Extensions;
|
||||||
global using SharedContracts.Models;
|
global using SharedContracts.Models;
|
||||||
global using SharedContracts.Requests;
|
global using SharedContracts.Python;
|
||||||
global using StackExchange.Redis;
|
global using SharedContracts.Python.RTN;
|
||||||
|
global using SharedContracts.Requests;
|
||||||
@@ -19,6 +19,7 @@
|
|||||||
<PackageReference Include="Microsoft.Extensions.Hosting" Version="8.0.0" />
|
<PackageReference Include="Microsoft.Extensions.Hosting" Version="8.0.0" />
|
||||||
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
|
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
|
||||||
<PackageReference Include="Polly" Version="8.3.0" />
|
<PackageReference Include="Polly" Version="8.3.0" />
|
||||||
|
<PackageReference Include="pythonnet" Version="3.0.3" />
|
||||||
<PackageReference Include="Quartz.Extensions.DependencyInjection" Version="3.8.0" />
|
<PackageReference Include="Quartz.Extensions.DependencyInjection" Version="3.8.0" />
|
||||||
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.8.0" />
|
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.8.0" />
|
||||||
<PackageReference Include="Serilog" Version="3.1.1" />
|
<PackageReference Include="Serilog" Version="3.1.1" />
|
||||||
@@ -34,9 +35,9 @@
|
|||||||
</None>
|
</None>
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup Condition="'$(Configuration)' == 'Debug'">
|
||||||
<Content Remove="Data\**" />
|
<Content Remove="python\**" />
|
||||||
<None Include="Data\**">
|
<None Include="python\**">
|
||||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||||
</None>
|
</None>
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|||||||
@@ -115,10 +115,10 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
|
|||||||
return result.ToList();
|
return result.ToList();
|
||||||
}, "Error getting imdb metadata.", cancellationToken);
|
}, "Error getting imdb metadata.", cancellationToken);
|
||||||
|
|
||||||
public async Task<List<ImdbEntry>> FindImdbMetadata(string? parsedTorrentTitle, TorrentType torrentType, string? year, CancellationToken cancellationToken = default) =>
|
public async Task<List<ImdbEntry>> FindImdbMetadata(string? parsedTorrentTitle, string torrentType, string? year, CancellationToken cancellationToken = default) =>
|
||||||
await ExecuteCommandAsync(async connection =>
|
await ExecuteCommandAsync(async connection =>
|
||||||
{
|
{
|
||||||
var query = $"select \"imdb_id\" as \"ImdbId\", \"title\" as \"Title\", \"year\" as \"Year\" from search_imdb_meta('{parsedTorrentTitle.Replace("'", "").Replace("\"", "")}', '{(torrentType == TorrentType.Movie ? "movie" : "tvSeries")}'";
|
var query = $"select \"imdb_id\" as \"ImdbId\", \"title\" as \"Title\", \"year\" as \"Year\" from search_imdb_meta('{parsedTorrentTitle.Replace("'", "").Replace("\"", "")}', '{(torrentType.Equals("movie", StringComparison.OrdinalIgnoreCase) ? "movie" : "tvSeries")}'";
|
||||||
query += year is not null ? $", '{year}'" : ", NULL";
|
query += year is not null ? $", '{year}'" : ", NULL";
|
||||||
query += ", 15)";
|
query += ", 15)";
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ public interface IDataStorage
|
|||||||
Task<DapperResult<PageIngestedResult, PageIngestedResult>> MarkPageAsIngested(string pageId, CancellationToken cancellationToken = default);
|
Task<DapperResult<PageIngestedResult, PageIngestedResult>> MarkPageAsIngested(string pageId, CancellationToken cancellationToken = default);
|
||||||
Task<DapperResult<int, int>> GetRowCountImdbMetadata(CancellationToken cancellationToken = default);
|
Task<DapperResult<int, int>> GetRowCountImdbMetadata(CancellationToken cancellationToken = default);
|
||||||
Task<List<ImdbEntry>> GetImdbEntriesForRequests(int year, int batchSize, string? stateLastProcessedImdbId, CancellationToken cancellationToken = default);
|
Task<List<ImdbEntry>> GetImdbEntriesForRequests(int year, int batchSize, string? stateLastProcessedImdbId, CancellationToken cancellationToken = default);
|
||||||
Task<List<ImdbEntry>> FindImdbMetadata(string? parsedTorrentTitle, TorrentType parsedTorrentTorrentType, string? parsedTorrentYear, CancellationToken cancellationToken = default);
|
Task<List<ImdbEntry>> FindImdbMetadata(string? parsedTorrentTitle, string parsedTorrentTorrentType, string? parsedTorrentYear, CancellationToken cancellationToken = default);
|
||||||
Task InsertTorrent(Torrent torrent, CancellationToken cancellationToken = default);
|
Task InsertTorrent(Torrent torrent, CancellationToken cancellationToken = default);
|
||||||
Task InsertFiles(IEnumerable<TorrentFile> files, CancellationToken cancellationToken = default);
|
Task InsertFiles(IEnumerable<TorrentFile> files, CancellationToken cancellationToken = default);
|
||||||
Task InsertSubtitles(IEnumerable<SubtitleFile> subtitles, CancellationToken cancellationToken = default);
|
Task InsertSubtitles(IEnumerable<SubtitleFile> subtitles, CancellationToken cancellationToken = default);
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
using Microsoft.Extensions.DependencyInjection;
|
|
||||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||||
|
|
||||||
namespace SharedContracts.Extensions;
|
namespace SharedContracts.Extensions;
|
||||||
|
|||||||
@@ -6,10 +6,11 @@ global using MassTransit;
|
|||||||
global using Microsoft.AspNetCore.Builder;
|
global using Microsoft.AspNetCore.Builder;
|
||||||
global using Microsoft.AspNetCore.Hosting;
|
global using Microsoft.AspNetCore.Hosting;
|
||||||
global using Microsoft.Extensions.Configuration;
|
global using Microsoft.Extensions.Configuration;
|
||||||
|
global using Microsoft.Extensions.DependencyInjection;
|
||||||
global using Microsoft.Extensions.Hosting;
|
global using Microsoft.Extensions.Hosting;
|
||||||
global using Microsoft.Extensions.Logging;
|
global using Microsoft.Extensions.Logging;
|
||||||
global using Npgsql;
|
global using Npgsql;
|
||||||
global using PromKnight.ParseTorrentTitle;
|
global using Python.Runtime;
|
||||||
global using Serilog;
|
global using Serilog;
|
||||||
global using SharedContracts.Configuration;
|
global using SharedContracts.Configuration;
|
||||||
global using SharedContracts.Extensions;
|
global using SharedContracts.Extensions;
|
||||||
|
|||||||
49
src/shared/Python/PythonEngineService.cs
Normal file
49
src/shared/Python/PythonEngineService.cs
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
namespace SharedContracts.Python;
|
||||||
|
|
||||||
|
public class PythonEngineService(ILogger<PythonEngineService> logger) : IHostedService
|
||||||
|
{
|
||||||
|
private IntPtr _mainThreadState;
|
||||||
|
private bool _isInitialized;
|
||||||
|
|
||||||
|
public Task StartAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
if (_isInitialized)
|
||||||
|
{
|
||||||
|
return Task.CompletedTask;
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var pythonDllEnv = Environment.GetEnvironmentVariable("PYTHONNET_PYDLL");
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(pythonDllEnv))
|
||||||
|
{
|
||||||
|
logger.LogWarning("PYTHONNET_PYDLL env is not set. Exiting Application");
|
||||||
|
Environment.Exit(1);
|
||||||
|
return Task.CompletedTask;
|
||||||
|
}
|
||||||
|
|
||||||
|
Runtime.PythonDLL = pythonDllEnv;
|
||||||
|
PythonEngine.Initialize();
|
||||||
|
_mainThreadState = PythonEngine.BeginAllowThreads();
|
||||||
|
|
||||||
|
_isInitialized = true;
|
||||||
|
logger.LogInformation("Python engine initialized");
|
||||||
|
}
|
||||||
|
catch (Exception e)
|
||||||
|
{
|
||||||
|
logger.LogWarning(e, "Failed to initialize Python engine");
|
||||||
|
Environment.Exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Task.CompletedTask;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task StopAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
PythonEngine.EndAllowThreads(_mainThreadState);
|
||||||
|
PythonEngine.Shutdown();
|
||||||
|
|
||||||
|
return Task.CompletedTask;
|
||||||
|
}
|
||||||
|
}
|
||||||
8
src/shared/Python/RTN/IRankTorrentName.cs
Normal file
8
src/shared/Python/RTN/IRankTorrentName.cs
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
namespace SharedContracts.Python.RTN;
|
||||||
|
|
||||||
|
public interface IRankTorrentName
|
||||||
|
{
|
||||||
|
ParseTorrentTitleResponse Parse(string title);
|
||||||
|
bool IsTrash(string title);
|
||||||
|
bool TitleMatch(string title, string checkTitle);
|
||||||
|
}
|
||||||
6
src/shared/Python/RTN/ParseTorrentTitleResponse.cs
Normal file
6
src/shared/Python/RTN/ParseTorrentTitleResponse.cs
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
namespace SharedContracts.Python.RTN;
|
||||||
|
|
||||||
|
public record ParseTorrentTitleResponse(bool Success, string ParsedTitle, int Year, int[]? Season = null, int[]? Episode = null)
|
||||||
|
{
|
||||||
|
public bool IsMovie => Season == null && Episode == null;
|
||||||
|
}
|
||||||
118
src/shared/Python/RTN/RankTorrentName.cs
Normal file
118
src/shared/Python/RTN/RankTorrentName.cs
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
namespace SharedContracts.Python.RTN;
|
||||||
|
|
||||||
|
public class RankTorrentName : IRankTorrentName
|
||||||
|
{
|
||||||
|
private const string SysModuleName = "sys";
|
||||||
|
private const string RtnModuleName = "RTN";
|
||||||
|
|
||||||
|
private readonly ILogger<RankTorrentName> _logger;
|
||||||
|
private dynamic? _sys;
|
||||||
|
private dynamic? _rtn;
|
||||||
|
|
||||||
|
public RankTorrentName(ILogger<RankTorrentName> logger)
|
||||||
|
{
|
||||||
|
_logger = logger;
|
||||||
|
InitModules();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public ParseTorrentTitleResponse Parse(string title)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using var py = Py.GIL();
|
||||||
|
var result = _rtn?.parse(title);
|
||||||
|
|
||||||
|
if (result == null)
|
||||||
|
{
|
||||||
|
return new(false, string.Empty, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
return ParseResult(result);
|
||||||
|
}
|
||||||
|
catch (Exception e)
|
||||||
|
{
|
||||||
|
_logger.LogError(e, "Failed to parse title");
|
||||||
|
return new(false, string.Empty, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public bool IsTrash(string title)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using var py = Py.GIL();
|
||||||
|
var result = _rtn?.check_trash(title);
|
||||||
|
|
||||||
|
if (result == null)
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var response = result.As<bool>() ?? false;
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
catch (Exception e)
|
||||||
|
{
|
||||||
|
_logger.LogError(e, "Failed to parse title");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public bool TitleMatch(string title, string checkTitle)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using var py = Py.GIL();
|
||||||
|
var result = _rtn?.title_match(title, checkTitle);
|
||||||
|
|
||||||
|
if (result == null)
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var response = result.As<bool>() ?? false;
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
catch (Exception e)
|
||||||
|
{
|
||||||
|
_logger.LogError(e, "Failed to parse title");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static ParseTorrentTitleResponse ParseResult(dynamic result)
|
||||||
|
{
|
||||||
|
var parsedTitle = result.GetAttr("parsed_title")?.As<string>() ?? string.Empty;
|
||||||
|
var year = result.GetAttr("year")?.As<int>() ?? 0;
|
||||||
|
var seasonList = result.GetAttr("season")?.As<PyList>();
|
||||||
|
var episodeList = result.GetAttr("episode")?.As<PyList>();
|
||||||
|
int[]? seasons = seasonList?.Length() > 0 ? seasonList.As<int[]>() : null;
|
||||||
|
int[]? episodes = episodeList?.Length() > 0 ? episodeList.As<int[]>() : null;
|
||||||
|
|
||||||
|
return new ParseTorrentTitleResponse(true, parsedTitle, year, seasons, episodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void InitModules()
|
||||||
|
{
|
||||||
|
using var py = Py.GIL();
|
||||||
|
_sys = Py.Import(SysModuleName);
|
||||||
|
|
||||||
|
if (_sys == null)
|
||||||
|
{
|
||||||
|
_logger.LogError($"Failed to import Python module: {SysModuleName}");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
_sys.path.append(Path.Combine(AppContext.BaseDirectory, "python"));
|
||||||
|
|
||||||
|
_rtn = Py.Import(RtnModuleName);
|
||||||
|
if (_rtn == null)
|
||||||
|
{
|
||||||
|
_logger.LogError($"Failed to import Python module: {RtnModuleName}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
13
src/shared/Python/ServiceCollectionExtensions.cs
Normal file
13
src/shared/Python/ServiceCollectionExtensions.cs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
namespace SharedContracts.Python;
|
||||||
|
|
||||||
|
public static class ServiceCollectionExtensions
|
||||||
|
{
|
||||||
|
public static IServiceCollection RegisterPythonEngine(this IServiceCollection services)
|
||||||
|
{
|
||||||
|
services.AddSingleton<PythonEngineService>();
|
||||||
|
|
||||||
|
services.AddHostedService(p => p.GetRequiredService<PythonEngineService>());
|
||||||
|
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -16,7 +16,7 @@
|
|||||||
<PackageReference Include="MassTransit.Abstractions" Version="8.2.0" />
|
<PackageReference Include="MassTransit.Abstractions" Version="8.2.0" />
|
||||||
<PackageReference Include="MassTransit.RabbitMQ" Version="8.2.0" />
|
<PackageReference Include="MassTransit.RabbitMQ" Version="8.2.0" />
|
||||||
<PackageReference Include="Npgsql" Version="8.0.2" />
|
<PackageReference Include="Npgsql" Version="8.0.2" />
|
||||||
<PackageReference Include="PromKnight.ParseTorrentTitle" Version="1.0.4" />
|
<PackageReference Include="pythonnet" Version="3.0.3" />
|
||||||
<PackageReference Include="Serilog" Version="3.1.1" />
|
<PackageReference Include="Serilog" Version="3.1.1" />
|
||||||
<PackageReference Include="Serilog.Extensions.Hosting" Version="8.0.0" />
|
<PackageReference Include="Serilog.Extensions.Hosting" Version="8.0.0" />
|
||||||
<PackageReference Include="Serilog.Settings.Configuration" Version="8.0.0" />
|
<PackageReference Include="Serilog.Settings.Configuration" Version="8.0.0" />
|
||||||
|
|||||||
Reference in New Issue
Block a user