Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9344531b34 | ||
|
|
723aa6b6a0 | ||
|
|
e17b476801 | ||
|
|
2a414d8bc0 | ||
|
|
9b5f454e6e | ||
|
|
ad9549c695 | ||
|
|
1e85cb00ff | ||
|
|
da640a4071 | ||
|
|
e6a63fd72e | ||
|
|
02101ac50a | ||
|
|
3c8ffd5082 |
@@ -14,12 +14,14 @@ program=
|
|||||||
[BitTorrent]
|
[BitTorrent]
|
||||||
Session\AnonymousModeEnabled=true
|
Session\AnonymousModeEnabled=true
|
||||||
Session\BTProtocol=TCP
|
Session\BTProtocol=TCP
|
||||||
|
Session\ConnectionSpeed=150
|
||||||
Session\DefaultSavePath=/downloads/
|
Session\DefaultSavePath=/downloads/
|
||||||
Session\ExcludedFileNames=
|
Session\ExcludedFileNames=
|
||||||
Session\MaxActiveCheckingTorrents=5
|
Session\MaxActiveCheckingTorrents=20
|
||||||
Session\MaxActiveDownloads=10
|
Session\MaxActiveDownloads=20
|
||||||
Session\MaxActiveTorrents=50
|
Session\MaxActiveTorrents=50
|
||||||
Session\MaxActiveUploads=50
|
Session\MaxActiveUploads=50
|
||||||
|
Session\MaxConcurrentHTTPAnnounces=1000
|
||||||
Session\MaxConnections=2000
|
Session\MaxConnections=2000
|
||||||
Session\Port=6881
|
Session\Port=6881
|
||||||
Session\QueueingSystemEnabled=true
|
Session\QueueingSystemEnabled=true
|
||||||
|
|||||||
@@ -94,7 +94,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
hostname: knightcrawler-addon
|
hostname: knightcrawler-addon
|
||||||
image: gabisonfire/knightcrawler-addon:2.0.20
|
image: gabisonfire/knightcrawler-addon:2.0.26
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -117,7 +117,7 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-consumer:2.0.20
|
image: gabisonfire/knightcrawler-consumer:2.0.26
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -138,7 +138,7 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-debrid-collector:2.0.20
|
image: gabisonfire/knightcrawler-debrid-collector:2.0.26
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -152,7 +152,7 @@ services:
|
|||||||
migrator:
|
migrator:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-metadata:2.0.20
|
image: gabisonfire/knightcrawler-metadata:2.0.26
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
restart: "no"
|
restart: "no"
|
||||||
@@ -163,7 +163,7 @@ services:
|
|||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-migrator:2.0.20
|
image: gabisonfire/knightcrawler-migrator:2.0.26
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
restart: "no"
|
restart: "no"
|
||||||
@@ -182,7 +182,7 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-producer:2.0.20
|
image: gabisonfire/knightcrawler-producer:2.0.26
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -207,7 +207,7 @@ services:
|
|||||||
deploy:
|
deploy:
|
||||||
replicas: ${QBIT_REPLICAS:-0}
|
replicas: ${QBIT_REPLICAS:-0}
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-qbit-collector:2.0.20
|
image: gabisonfire/knightcrawler-qbit-collector:2.0.26
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ x-depends: &knightcrawler-app-depends
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
metadata:
|
metadata:
|
||||||
image: gabisonfire/knightcrawler-metadata:2.0.20
|
image: gabisonfire/knightcrawler-metadata:2.0.26
|
||||||
env_file: ../../.env
|
env_file: ../../.env
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
@@ -30,7 +30,7 @@ services:
|
|||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
|
|
||||||
migrator:
|
migrator:
|
||||||
image: gabisonfire/knightcrawler-migrator:2.0.20
|
image: gabisonfire/knightcrawler-migrator:2.0.26
|
||||||
env_file: ../../.env
|
env_file: ../../.env
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
@@ -40,7 +40,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
||||||
addon:
|
addon:
|
||||||
image: gabisonfire/knightcrawler-addon:2.0.20
|
image: gabisonfire/knightcrawler-addon:2.0.26
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
hostname: knightcrawler-addon
|
hostname: knightcrawler-addon
|
||||||
@@ -48,22 +48,22 @@ services:
|
|||||||
- "7000:7000"
|
- "7000:7000"
|
||||||
|
|
||||||
consumer:
|
consumer:
|
||||||
image: gabisonfire/knightcrawler-consumer:2.0.20
|
image: gabisonfire/knightcrawler-consumer:2.0.26
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
debridcollector:
|
debridcollector:
|
||||||
image: gabisonfire/knightcrawler-debrid-collector:2.0.20
|
image: gabisonfire/knightcrawler-debrid-collector:2.0.26
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
producer:
|
producer:
|
||||||
image: gabisonfire/knightcrawler-producer:2.0.20
|
image: gabisonfire/knightcrawler-producer:2.0.26
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
qbitcollector:
|
qbitcollector:
|
||||||
image: gabisonfire/knightcrawler-qbit-collector:2.0.20
|
image: gabisonfire/knightcrawler-qbit-collector:2.0.26
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ COLLECTOR_DEBRID_ENABLED=true
|
|||||||
COLLECTOR_REAL_DEBRID_API_KEY=
|
COLLECTOR_REAL_DEBRID_API_KEY=
|
||||||
QBIT_HOST=http://qbittorrent:8080
|
QBIT_HOST=http://qbittorrent:8080
|
||||||
QBIT_TRACKERS_URL=https://raw.githubusercontent.com/ngosang/trackerslist/master/trackers_all_http.txt
|
QBIT_TRACKERS_URL=https://raw.githubusercontent.com/ngosang/trackerslist/master/trackers_all_http.txt
|
||||||
|
QBIT_CONCURRENCY=8
|
||||||
|
|
||||||
# Number of replicas for the qBittorrent collector and qBitTorrent client. Should be 0 or 1.
|
# Number of replicas for the qBittorrent collector and qBitTorrent client. Should be 0 or 1.
|
||||||
QBIT_REPLICAS=0
|
QBIT_REPLICAS=0
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { addonBuilder } from 'stremio-addon-sdk';
|
|||||||
import { cacheWrapStream } from './lib/cache.js';
|
import { cacheWrapStream } from './lib/cache.js';
|
||||||
import { dummyManifest } from './lib/manifest.js';
|
import { dummyManifest } from './lib/manifest.js';
|
||||||
import * as repository from './lib/repository.js';
|
import * as repository from './lib/repository.js';
|
||||||
|
import applyFilters from "./lib/filter.js";
|
||||||
import applySorting from './lib/sort.js';
|
import applySorting from './lib/sort.js';
|
||||||
import { toStreamInfo, applyStaticInfo } from './lib/streamInfo.js';
|
import { toStreamInfo, applyStaticInfo } from './lib/streamInfo.js';
|
||||||
import { Type } from './lib/types.js';
|
import { Type } from './lib/types.js';
|
||||||
@@ -32,6 +33,7 @@ builder.defineStreamHandler((args) => {
|
|||||||
.then(records => records
|
.then(records => records
|
||||||
.sort((a, b) => b.torrent.seeders - a.torrent.seeders || b.torrent.uploadDate - a.torrent.uploadDate)
|
.sort((a, b) => b.torrent.seeders - a.torrent.seeders || b.torrent.uploadDate - a.torrent.uploadDate)
|
||||||
.map(record => toStreamInfo(record)))))
|
.map(record => toStreamInfo(record)))))
|
||||||
|
.then(streams => applyFilters(streams, args.extra))
|
||||||
.then(streams => applySorting(streams, args.extra))
|
.then(streams => applySorting(streams, args.extra))
|
||||||
.then(streams => applyStaticInfo(streams))
|
.then(streams => applyStaticInfo(streams))
|
||||||
.then(streams => applyMochs(streams, args.extra))
|
.then(streams => applyMochs(streams, args.extra))
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ export function getImdbIdMovieEntries(imdbId) {
|
|||||||
where: {
|
where: {
|
||||||
imdbId: { [Op.eq]: imdbId }
|
imdbId: { [Op.eq]: imdbId }
|
||||||
},
|
},
|
||||||
include: [Torrent],
|
include: { model: Torrent, required: true },
|
||||||
limit: 500,
|
limit: 500,
|
||||||
order: [
|
order: [
|
||||||
[Torrent, 'size', 'DESC']
|
[Torrent, 'size', 'DESC']
|
||||||
@@ -99,7 +99,7 @@ export function getImdbIdSeriesEntries(imdbId, season, episode) {
|
|||||||
imdbSeason: { [Op.eq]: season },
|
imdbSeason: { [Op.eq]: season },
|
||||||
imdbEpisode: { [Op.eq]: episode }
|
imdbEpisode: { [Op.eq]: episode }
|
||||||
},
|
},
|
||||||
include: [Torrent],
|
include: { model: Torrent, required: true },
|
||||||
limit: 500,
|
limit: 500,
|
||||||
order: [
|
order: [
|
||||||
[Torrent, 'size', 'DESC']
|
[Torrent, 'size', 'DESC']
|
||||||
@@ -112,7 +112,7 @@ export function getKitsuIdMovieEntries(kitsuId) {
|
|||||||
where: {
|
where: {
|
||||||
kitsuId: { [Op.eq]: kitsuId }
|
kitsuId: { [Op.eq]: kitsuId }
|
||||||
},
|
},
|
||||||
include: [Torrent],
|
include: { model: Torrent, required: true },
|
||||||
limit: 500,
|
limit: 500,
|
||||||
order: [
|
order: [
|
||||||
[Torrent, 'size', 'DESC']
|
[Torrent, 'size', 'DESC']
|
||||||
@@ -126,7 +126,7 @@ export function getKitsuIdSeriesEntries(kitsuId, episode) {
|
|||||||
kitsuId: { [Op.eq]: kitsuId },
|
kitsuId: { [Op.eq]: kitsuId },
|
||||||
kitsuEpisode: { [Op.eq]: episode }
|
kitsuEpisode: { [Op.eq]: episode }
|
||||||
},
|
},
|
||||||
include: [Torrent],
|
include: { model: Torrent, required: true },
|
||||||
limit: 500,
|
limit: 500,
|
||||||
order: [
|
order: [
|
||||||
[Torrent, 'size', 'DESC']
|
[Torrent, 'size', 'DESC']
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ export function toStreamInfo(record) {
|
|||||||
const title = joinDetailParts(
|
const title = joinDetailParts(
|
||||||
[
|
[
|
||||||
joinDetailParts([record.torrent.title.replace(/[, ]+/g, ' ')]),
|
joinDetailParts([record.torrent.title.replace(/[, ]+/g, ' ')]),
|
||||||
joinDetailParts([!sameInfo && record.title || undefined]),
|
joinDetailParts([record.title || undefined]),
|
||||||
joinDetailParts([
|
joinDetailParts([
|
||||||
joinDetailParts([formatSize(record.size)], '💾 ')
|
joinDetailParts([formatSize(record.size)], '💾 ')
|
||||||
]),
|
]),
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ WORKDIR /app
|
|||||||
|
|
||||||
ENV PYTHONUNBUFFERED=1
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
|
RUN apk add --update --no-cache python3=~3.11.9-r0 py3-pip && ln -sf python3 /usr/bin/python
|
||||||
|
|
||||||
COPY --from=build /src/out .
|
COPY --from=build /src/out .
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,43 @@
|
|||||||
|
-- Drop Duplicate Files in Files Table
|
||||||
|
DELETE FROM public.files
|
||||||
|
WHERE id NOT IN (
|
||||||
|
SELECT MAX(id)
|
||||||
|
FROM public.files
|
||||||
|
GROUP BY "infoHash", "fileIndex"
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Add Index to files table
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1
|
||||||
|
FROM pg_constraint
|
||||||
|
WHERE conname = 'files_unique_infohash_fileindex'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.files
|
||||||
|
ADD CONSTRAINT files_unique_infohash_fileindex UNIQUE ("infoHash", "fileIndex");
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
|
||||||
|
-- Drop Duplicate subtitles in Subtitles Table
|
||||||
|
DELETE FROM public.subtitles
|
||||||
|
WHERE id NOT IN (
|
||||||
|
SELECT MAX(id)
|
||||||
|
FROM public.subtitles
|
||||||
|
GROUP BY "infoHash", "fileIndex"
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Add Index to subtitles table
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1
|
||||||
|
FROM pg_constraint
|
||||||
|
WHERE conname = 'subtitles_unique_infohash_fileindex'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.subtitles
|
||||||
|
ADD CONSTRAINT subtitles_unique_infohash_fileindex UNIQUE ("infoHash", "fileIndex");
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
@@ -4,27 +4,34 @@
|
|||||||
{
|
{
|
||||||
"Name": "SyncEzTvJob",
|
"Name": "SyncEzTvJob",
|
||||||
"IntervalSeconds": 60,
|
"IntervalSeconds": 60,
|
||||||
"Enabled": true
|
"Enabled": true,
|
||||||
|
"Url": "https://eztvx.to/ezrss.xml",
|
||||||
|
"XmlNamespace": "http://xmlns.ezrss.it/0.1/"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Name": "SyncNyaaJob",
|
"Name": "SyncNyaaJob",
|
||||||
"IntervalSeconds": 60,
|
"IntervalSeconds": 60,
|
||||||
"Enabled": true
|
"Enabled": true,
|
||||||
|
"Url": "https://nyaa.si/?page=rss&c=1_2&f=0",
|
||||||
|
"XmlNamespace": "https://nyaa.si/xmlns/nyaa"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Name": "SyncTpbJob",
|
"Name": "SyncTpbJob",
|
||||||
"IntervalSeconds": 60,
|
"IntervalSeconds": 60,
|
||||||
"Enabled": true
|
"Enabled": true,
|
||||||
|
"Url": "https://apibay.org/precompiled/data_top100_recent.json"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Name": "SyncYtsJob",
|
"Name": "SyncYtsJob",
|
||||||
"IntervalSeconds": 60,
|
"IntervalSeconds": 60,
|
||||||
"Enabled": true
|
"Enabled": true,
|
||||||
|
"Url": "https://yts.am/rss"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Name": "SyncTgxJob",
|
"Name": "SyncTgxJob",
|
||||||
"IntervalSeconds": 60,
|
"IntervalSeconds": 60,
|
||||||
"Enabled": true
|
"Enabled": true,
|
||||||
|
"Url": "https://tgx.rs/rss"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Name": "SyncDmmJob",
|
"Name": "SyncDmmJob",
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ WORKDIR /app
|
|||||||
|
|
||||||
ENV PYTHONUNBUFFERED=1
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
|
RUN apk add --update --no-cache python3=~3.11.9-r0 py3-pip && ln -sf python3 /usr/bin/python
|
||||||
|
|
||||||
COPY --from=build /src/out .
|
COPY --from=build /src/out .
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,12 @@ public abstract class BaseJsonCrawler(IHttpClientFactory httpClientFactory, ILog
|
|||||||
|
|
||||||
protected virtual async Task Execute(string collectionName)
|
protected virtual async Task Execute(string collectionName)
|
||||||
{
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(Url))
|
||||||
|
{
|
||||||
|
logger.LogWarning("No URL provided for {Source} crawl", Source);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
logger.LogInformation("Starting {Source} crawl", Source);
|
logger.LogInformation("Starting {Source} crawl", Source);
|
||||||
|
|
||||||
using var client = httpClientFactory.CreateClient("Scraper");
|
using var client = httpClientFactory.CreateClient("Scraper");
|
||||||
|
|||||||
@@ -4,6 +4,12 @@ public abstract class BaseXmlCrawler(IHttpClientFactory httpClientFactory, ILogg
|
|||||||
{
|
{
|
||||||
public override async Task Execute()
|
public override async Task Execute()
|
||||||
{
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(Url))
|
||||||
|
{
|
||||||
|
logger.LogWarning("No URL provided for {Source} crawl", Source);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
logger.LogInformation("Starting {Source} crawl", Source);
|
logger.LogInformation("Starting {Source} crawl", Source);
|
||||||
|
|
||||||
using var client = httpClientFactory.CreateClient(Literals.CrawlerClient);
|
using var client = httpClientFactory.CreateClient(Literals.CrawlerClient);
|
||||||
|
|||||||
@@ -7,4 +7,8 @@ public class Scraper
|
|||||||
public int IntervalSeconds { get; set; } = 60;
|
public int IntervalSeconds { get; set; } = 60;
|
||||||
|
|
||||||
public bool Enabled { get; set; } = true;
|
public bool Enabled { get; set; } = true;
|
||||||
|
|
||||||
|
public string? Url { get; set; }
|
||||||
|
|
||||||
|
public string? XmlNamespace { get; set; }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ public static class ServiceCollectionExtensions
|
|||||||
client.BaseAddress = new("https://github.com/debridmediamanager/hashlists/zipball/main/");
|
client.BaseAddress = new("https://github.com/debridmediamanager/hashlists/zipball/main/");
|
||||||
client.DefaultRequestHeaders.Add("Accept-Encoding", "gzip");
|
client.DefaultRequestHeaders.Add("Accept-Encoding", "gzip");
|
||||||
client.DefaultRequestHeaders.UserAgent.ParseAdd("curl");
|
client.DefaultRequestHeaders.UserAgent.ParseAdd("curl");
|
||||||
|
client.Timeout = TimeSpan.FromMinutes(10); // 10 minute timeout, #217
|
||||||
});
|
});
|
||||||
|
|
||||||
return services;
|
return services;
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
namespace Producer.Features.Crawlers.EzTv;
|
namespace Producer.Features.Crawlers.EzTv;
|
||||||
|
|
||||||
public class EzTvCrawler(IHttpClientFactory httpClientFactory, ILogger<EzTvCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
public class EzTvCrawler(IHttpClientFactory httpClientFactory, ILogger<EzTvCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
||||||
{
|
{
|
||||||
protected override string Url => "https://eztv1.xyz/ezrss.xml";
|
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncEzTvJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
||||||
protected override string Source => "EZTV";
|
protected override string Source => "EZTV";
|
||||||
|
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncEzTvJob", StringComparison.OrdinalIgnoreCase))?.XmlNamespace ?? string.Empty;
|
||||||
private static readonly XNamespace XmlNamespace = "http://xmlns.ezrss.it/0.1/";
|
|
||||||
|
|
||||||
protected override IReadOnlyDictionary<string, string> Mappings =>
|
protected override IReadOnlyDictionary<string, string> Mappings =>
|
||||||
new Dictionary<string, string>
|
new Dictionary<string, string>
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
namespace Producer.Features.Crawlers.Nyaa;
|
namespace Producer.Features.Crawlers.Nyaa;
|
||||||
|
|
||||||
public class NyaaCrawler(IHttpClientFactory httpClientFactory, ILogger<NyaaCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
public class NyaaCrawler(IHttpClientFactory httpClientFactory, ILogger<NyaaCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
||||||
{
|
{
|
||||||
protected override string Url => "https://nyaa.si/?page=rss&c=1_2&f=0";
|
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncNyaaJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
||||||
protected override string Source => "Nyaa";
|
protected override string Source => "Nyaa";
|
||||||
|
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncNyaaJob", StringComparison.OrdinalIgnoreCase))?.XmlNamespace ?? string.Empty;
|
||||||
private static readonly XNamespace XmlNamespace = "https://nyaa.si/xmlns/nyaa";
|
|
||||||
|
|
||||||
protected override IReadOnlyDictionary<string, string> Mappings =>
|
protected override IReadOnlyDictionary<string, string> Mappings =>
|
||||||
new Dictionary<string, string>
|
new Dictionary<string, string>
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
namespace Producer.Features.Crawlers.Tgx;
|
namespace Producer.Features.Crawlers.Tgx;
|
||||||
|
|
||||||
public partial class TgxCrawler(IHttpClientFactory httpClientFactory, ILogger<TgxCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
public partial class TgxCrawler(IHttpClientFactory httpClientFactory, ILogger<TgxCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
||||||
{
|
{
|
||||||
[GeneratedRegex(@"Size:\s+(.+?)\s+Added")]
|
[GeneratedRegex(@"Size:\s+(.+?)\s+Added")]
|
||||||
private static partial Regex SizeStringExtractor();
|
private static partial Regex SizeStringExtractor();
|
||||||
[GeneratedRegex(@"(?i)\b(\d+(\.\d+)?)\s*([KMGT]?B)\b", RegexOptions.None, "en-GB")]
|
[GeneratedRegex(@"(?i)\b(\d+(\.\d+)?)\s*([KMGT]?B)\b", RegexOptions.None, "en-GB")]
|
||||||
private static partial Regex SizeStringParser();
|
private static partial Regex SizeStringParser();
|
||||||
|
|
||||||
protected override string Url => "https://tgx.rs/rss";
|
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncTgxJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
||||||
|
|
||||||
protected override string Source => "TorrentGalaxy";
|
protected override string Source => "TorrentGalaxy";
|
||||||
protected override IReadOnlyDictionary<string, string> Mappings
|
protected override IReadOnlyDictionary<string, string> Mappings
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
namespace Producer.Features.Crawlers.Tpb;
|
namespace Producer.Features.Crawlers.Tpb;
|
||||||
|
|
||||||
public class TpbCrawler(IHttpClientFactory httpClientFactory, ILogger<TpbCrawler> logger, IDataStorage storage) : BaseJsonCrawler(httpClientFactory, logger, storage)
|
public class TpbCrawler(IHttpClientFactory httpClientFactory, ILogger<TpbCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseJsonCrawler(httpClientFactory, logger, storage)
|
||||||
{
|
{
|
||||||
protected override string Url => "https://apibay.org/precompiled/data_top100_recent.json";
|
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncTpbJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
||||||
|
|
||||||
protected override string Source => "TPB";
|
protected override string Source => "TPB";
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
namespace Producer.Features.Crawlers.Yts;
|
namespace Producer.Features.Crawlers.Yts;
|
||||||
|
|
||||||
public class YtsCrawler(IHttpClientFactory httpClientFactory, ILogger<YtsCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
public class YtsCrawler(IHttpClientFactory httpClientFactory, ILogger<YtsCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
||||||
{
|
{
|
||||||
protected override string Url => "https://yts.am/rss";
|
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncYtsJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
||||||
|
|
||||||
protected override string Source => "YTS";
|
protected override string Source => "YTS";
|
||||||
protected override IReadOnlyDictionary<string, string> Mappings
|
protected override IReadOnlyDictionary<string, string> Mappings
|
||||||
=> new Dictionary<string, string>
|
=> new Dictionary<string, string>
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ WORKDIR /app
|
|||||||
|
|
||||||
ENV PYTHONUNBUFFERED=1
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
|
RUN apk add --update --no-cache python3=~3.11.9-r0 py3-pip && ln -sf python3 /usr/bin/python
|
||||||
|
|
||||||
COPY --from=build /src/out .
|
COPY --from=build /src/out .
|
||||||
|
|
||||||
|
|||||||
@@ -44,6 +44,7 @@ public static class ServiceCollectionExtensions
|
|||||||
{
|
{
|
||||||
var rabbitConfiguration = services.LoadConfigurationFromEnv<RabbitMqConfiguration>();
|
var rabbitConfiguration = services.LoadConfigurationFromEnv<RabbitMqConfiguration>();
|
||||||
var redisConfiguration = services.LoadConfigurationFromEnv<RedisConfiguration>();
|
var redisConfiguration = services.LoadConfigurationFromEnv<RedisConfiguration>();
|
||||||
|
var qbitConfiguration = services.LoadConfigurationFromEnv<QbitConfiguration>();
|
||||||
|
|
||||||
services.AddStackExchangeRedisCache(
|
services.AddStackExchangeRedisCache(
|
||||||
option =>
|
option =>
|
||||||
@@ -80,8 +81,8 @@ public static class ServiceCollectionExtensions
|
|||||||
e.ConfigureConsumer<WriteQbitMetadataConsumer>(context);
|
e.ConfigureConsumer<WriteQbitMetadataConsumer>(context);
|
||||||
e.ConfigureConsumer<PerformQbitMetadataRequestConsumer>(context);
|
e.ConfigureConsumer<PerformQbitMetadataRequestConsumer>(context);
|
||||||
e.ConfigureSaga<QbitMetadataSagaState>(context);
|
e.ConfigureSaga<QbitMetadataSagaState>(context);
|
||||||
e.ConcurrentMessageLimit = 5;
|
e.ConcurrentMessageLimit = qbitConfiguration.Concurrency;
|
||||||
e.PrefetchCount = 5;
|
e.PrefetchCount = qbitConfiguration.Concurrency;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -98,7 +99,7 @@ public static class ServiceCollectionExtensions
|
|||||||
cfg.UseTimeout(
|
cfg.UseTimeout(
|
||||||
timeout =>
|
timeout =>
|
||||||
{
|
{
|
||||||
timeout.Timeout = TimeSpan.FromMinutes(1);
|
timeout.Timeout = TimeSpan.FromMinutes(3);
|
||||||
});
|
});
|
||||||
})
|
})
|
||||||
.RedisRepository(redisConfiguration.ConnectionString, options =>
|
.RedisRepository(redisConfiguration.ConnectionString, options =>
|
||||||
@@ -110,7 +111,7 @@ public static class ServiceCollectionExtensions
|
|||||||
{
|
{
|
||||||
var qbitConfiguration = services.LoadConfigurationFromEnv<QbitConfiguration>();
|
var qbitConfiguration = services.LoadConfigurationFromEnv<QbitConfiguration>();
|
||||||
var client = new QBittorrentClient(new(qbitConfiguration.Host));
|
var client = new QBittorrentClient(new(qbitConfiguration.Host));
|
||||||
client.Timeout = TimeSpan.FromSeconds(10);
|
client.Timeout = TimeSpan.FromSeconds(20);
|
||||||
|
|
||||||
services.AddSingleton<IQBittorrentClient>(client);
|
services.AddSingleton<IQBittorrentClient>(client);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
namespace QBitCollector.Features.Qbit;
|
namespace QBitCollector.Features.Qbit;
|
||||||
|
|
||||||
public class QbitRequestProcessor(IQBittorrentClient client, ITrackersService trackersService, ILogger<QbitRequestProcessor> logger)
|
public class QbitRequestProcessor(IQBittorrentClient client, ITrackersService trackersService, ILogger<QbitRequestProcessor> logger, QbitConfiguration configuration)
|
||||||
{
|
{
|
||||||
public async Task<IReadOnlyList<TorrentContent>?> ProcessAsync(string infoHash, CancellationToken cancellationToken = default)
|
public async Task<IReadOnlyList<TorrentContent>?> ProcessAsync(string infoHash, CancellationToken cancellationToken = default)
|
||||||
{
|
{
|
||||||
@@ -14,7 +14,7 @@ public class QbitRequestProcessor(IQBittorrentClient client, ITrackersService tr
|
|||||||
|
|
||||||
using var timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
using var timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||||
|
|
||||||
timeoutCts.CancelAfter(TimeSpan.FromSeconds(30));
|
timeoutCts.CancelAfter(TimeSpan.FromSeconds(60));
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
@@ -30,7 +30,7 @@ public class QbitRequestProcessor(IQBittorrentClient client, ITrackersService tr
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
await Task.Delay(TimeSpan.FromSeconds(1), timeoutCts.Token);
|
await Task.Delay(TimeSpan.FromMilliseconds(200), timeoutCts.Token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (OperationCanceledException) when (timeoutCts.IsCancellationRequested)
|
catch (OperationCanceledException) when (timeoutCts.IsCancellationRequested)
|
||||||
|
|||||||
@@ -5,7 +5,10 @@ public class QbitConfiguration
|
|||||||
private const string Prefix = "QBIT";
|
private const string Prefix = "QBIT";
|
||||||
private const string HOST_VARIABLE = "HOST";
|
private const string HOST_VARIABLE = "HOST";
|
||||||
private const string TRACKERS_URL_VARIABLE = "TRACKERS_URL";
|
private const string TRACKERS_URL_VARIABLE = "TRACKERS_URL";
|
||||||
|
private const string CONCURRENCY_VARIABLE = "CONCURRENCY";
|
||||||
|
|
||||||
public string? Host { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(HOST_VARIABLE);
|
public string? Host { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(HOST_VARIABLE);
|
||||||
public string? TrackersUrl { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(TRACKERS_URL_VARIABLE);
|
public string? TrackersUrl { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(TRACKERS_URL_VARIABLE);
|
||||||
|
|
||||||
|
public int Concurrency { get; init; } = Prefix.GetEnvironmentVariableAsInt(CONCURRENCY_VARIABLE, 8);
|
||||||
}
|
}
|
||||||
@@ -5,6 +5,12 @@ public class WriteQbitMetadataConsumer(IRankTorrentName rankTorrentName, IDataSt
|
|||||||
public async Task Consume(ConsumeContext<WriteQbitMetadata> context)
|
public async Task Consume(ConsumeContext<WriteQbitMetadata> context)
|
||||||
{
|
{
|
||||||
var request = context.Message;
|
var request = context.Message;
|
||||||
|
|
||||||
|
if (request.Metadata.Metadata.Count == 0)
|
||||||
|
{
|
||||||
|
await context.Publish(new QbitMetadataWritten(request.Metadata, false));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
var torrentFiles = QbitMetaToTorrentMeta.MapMetadataToFilesCollection(
|
var torrentFiles = QbitMetaToTorrentMeta.MapMetadataToFilesCollection(
|
||||||
rankTorrentName, request.Torrent, request.ImdbId, request.Metadata.Metadata, logger);
|
rankTorrentName, request.Torrent, request.ImdbId, request.Metadata.Metadata, logger);
|
||||||
|
|||||||
@@ -152,7 +152,8 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
|
|||||||
INSERT INTO files
|
INSERT INTO files
|
||||||
("infoHash", "fileIndex", title, "size", "imdbId", "imdbSeason", "imdbEpisode", "kitsuId", "kitsuEpisode", "createdAt", "updatedAt")
|
("infoHash", "fileIndex", title, "size", "imdbId", "imdbSeason", "imdbEpisode", "kitsuId", "kitsuEpisode", "createdAt", "updatedAt")
|
||||||
VALUES
|
VALUES
|
||||||
(@InfoHash, @FileIndex, @Title, @Size, @ImdbId, @ImdbSeason, @ImdbEpisode, @KitsuId, @KitsuEpisode, Now(), Now());
|
(@InfoHash, @FileIndex, @Title, @Size, @ImdbId, @ImdbSeason, @ImdbEpisode, @KitsuId, @KitsuEpisode, Now(), Now())
|
||||||
|
ON CONFLICT ("infoHash", "fileIndex") DO NOTHING;
|
||||||
""";
|
""";
|
||||||
|
|
||||||
await connection.ExecuteAsync(query, files);
|
await connection.ExecuteAsync(query, files);
|
||||||
@@ -167,7 +168,8 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
|
|||||||
INSERT INTO subtitles
|
INSERT INTO subtitles
|
||||||
("infoHash", "fileIndex", "fileId", "title")
|
("infoHash", "fileIndex", "fileId", "title")
|
||||||
VALUES
|
VALUES
|
||||||
(@InfoHash, @FileIndex, @FileId, @Title);
|
(@InfoHash, @FileIndex, @FileId, @Title)
|
||||||
|
ON CONFLICT ("infoHash", "fileIndex") DO NOTHING;
|
||||||
""";
|
""";
|
||||||
|
|
||||||
await connection.ExecuteAsync(query, subtitles);
|
await connection.ExecuteAsync(query, subtitles);
|
||||||
|
|||||||
Reference in New Issue
Block a user