Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ad9549c695 | ||
|
|
1e85cb00ff | ||
|
|
da640a4071 |
@@ -94,7 +94,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
hostname: knightcrawler-addon
|
hostname: knightcrawler-addon
|
||||||
image: gabisonfire/knightcrawler-addon:2.0.23
|
image: gabisonfire/knightcrawler-addon:2.0.25
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -117,7 +117,7 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-consumer:2.0.23
|
image: gabisonfire/knightcrawler-consumer:2.0.25
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -138,7 +138,7 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-debrid-collector:2.0.23
|
image: gabisonfire/knightcrawler-debrid-collector:2.0.25
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -152,7 +152,7 @@ services:
|
|||||||
migrator:
|
migrator:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-metadata:2.0.23
|
image: gabisonfire/knightcrawler-metadata:2.0.25
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
restart: "no"
|
restart: "no"
|
||||||
@@ -163,7 +163,7 @@ services:
|
|||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-migrator:2.0.23
|
image: gabisonfire/knightcrawler-migrator:2.0.25
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
restart: "no"
|
restart: "no"
|
||||||
@@ -182,7 +182,7 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-producer:2.0.23
|
image: gabisonfire/knightcrawler-producer:2.0.25
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -207,7 +207,7 @@ services:
|
|||||||
deploy:
|
deploy:
|
||||||
replicas: ${QBIT_REPLICAS:-0}
|
replicas: ${QBIT_REPLICAS:-0}
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-qbit-collector:2.0.23
|
image: gabisonfire/knightcrawler-qbit-collector:2.0.25
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ x-depends: &knightcrawler-app-depends
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
metadata:
|
metadata:
|
||||||
image: gabisonfire/knightcrawler-metadata:2.0.23
|
image: gabisonfire/knightcrawler-metadata:2.0.25
|
||||||
env_file: ../../.env
|
env_file: ../../.env
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
@@ -30,7 +30,7 @@ services:
|
|||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
|
|
||||||
migrator:
|
migrator:
|
||||||
image: gabisonfire/knightcrawler-migrator:2.0.23
|
image: gabisonfire/knightcrawler-migrator:2.0.25
|
||||||
env_file: ../../.env
|
env_file: ../../.env
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
@@ -40,7 +40,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
||||||
addon:
|
addon:
|
||||||
image: gabisonfire/knightcrawler-addon:2.0.23
|
image: gabisonfire/knightcrawler-addon:2.0.25
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
hostname: knightcrawler-addon
|
hostname: knightcrawler-addon
|
||||||
@@ -48,22 +48,22 @@ services:
|
|||||||
- "7000:7000"
|
- "7000:7000"
|
||||||
|
|
||||||
consumer:
|
consumer:
|
||||||
image: gabisonfire/knightcrawler-consumer:2.0.23
|
image: gabisonfire/knightcrawler-consumer:2.0.25
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
debridcollector:
|
debridcollector:
|
||||||
image: gabisonfire/knightcrawler-debrid-collector:2.0.23
|
image: gabisonfire/knightcrawler-debrid-collector:2.0.25
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
producer:
|
producer:
|
||||||
image: gabisonfire/knightcrawler-producer:2.0.23
|
image: gabisonfire/knightcrawler-producer:2.0.25
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
qbitcollector:
|
qbitcollector:
|
||||||
image: gabisonfire/knightcrawler-qbit-collector:2.0.23
|
image: gabisonfire/knightcrawler-qbit-collector:2.0.25
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ export function getImdbIdMovieEntries(imdbId) {
|
|||||||
where: {
|
where: {
|
||||||
imdbId: { [Op.eq]: imdbId }
|
imdbId: { [Op.eq]: imdbId }
|
||||||
},
|
},
|
||||||
include: [Torrent],
|
include: { model: Torrent, required: true },
|
||||||
limit: 500,
|
limit: 500,
|
||||||
order: [
|
order: [
|
||||||
[Torrent, 'size', 'DESC']
|
[Torrent, 'size', 'DESC']
|
||||||
@@ -99,7 +99,7 @@ export function getImdbIdSeriesEntries(imdbId, season, episode) {
|
|||||||
imdbSeason: { [Op.eq]: season },
|
imdbSeason: { [Op.eq]: season },
|
||||||
imdbEpisode: { [Op.eq]: episode }
|
imdbEpisode: { [Op.eq]: episode }
|
||||||
},
|
},
|
||||||
include: [Torrent],
|
include: { model: Torrent, required: true },
|
||||||
limit: 500,
|
limit: 500,
|
||||||
order: [
|
order: [
|
||||||
[Torrent, 'size', 'DESC']
|
[Torrent, 'size', 'DESC']
|
||||||
@@ -112,7 +112,7 @@ export function getKitsuIdMovieEntries(kitsuId) {
|
|||||||
where: {
|
where: {
|
||||||
kitsuId: { [Op.eq]: kitsuId }
|
kitsuId: { [Op.eq]: kitsuId }
|
||||||
},
|
},
|
||||||
include: [Torrent],
|
include: { model: Torrent, required: true },
|
||||||
limit: 500,
|
limit: 500,
|
||||||
order: [
|
order: [
|
||||||
[Torrent, 'size', 'DESC']
|
[Torrent, 'size', 'DESC']
|
||||||
@@ -126,7 +126,7 @@ export function getKitsuIdSeriesEntries(kitsuId, episode) {
|
|||||||
kitsuId: { [Op.eq]: kitsuId },
|
kitsuId: { [Op.eq]: kitsuId },
|
||||||
kitsuEpisode: { [Op.eq]: episode }
|
kitsuEpisode: { [Op.eq]: episode }
|
||||||
},
|
},
|
||||||
include: [Torrent],
|
include: { model: Torrent, required: true },
|
||||||
limit: 500,
|
limit: 500,
|
||||||
order: [
|
order: [
|
||||||
[Torrent, 'size', 'DESC']
|
[Torrent, 'size', 'DESC']
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ public class EzTvCrawler(IHttpClientFactory httpClientFactory, ILogger<EzTvCrawl
|
|||||||
{
|
{
|
||||||
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncEzTvJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncEzTvJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
||||||
protected override string Source => "EZTV";
|
protected override string Source => "EZTV";
|
||||||
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncEzTvJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncEzTvJob", StringComparison.OrdinalIgnoreCase))?.XmlNamespace ?? string.Empty;
|
||||||
|
|
||||||
protected override IReadOnlyDictionary<string, string> Mappings =>
|
protected override IReadOnlyDictionary<string, string> Mappings =>
|
||||||
new Dictionary<string, string>
|
new Dictionary<string, string>
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ public class NyaaCrawler(IHttpClientFactory httpClientFactory, ILogger<NyaaCrawl
|
|||||||
{
|
{
|
||||||
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncNyaaJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncNyaaJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
||||||
protected override string Source => "Nyaa";
|
protected override string Source => "Nyaa";
|
||||||
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncNyaaJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncNyaaJob", StringComparison.OrdinalIgnoreCase))?.XmlNamespace ?? string.Empty;
|
||||||
|
|
||||||
protected override IReadOnlyDictionary<string, string> Mappings =>
|
protected override IReadOnlyDictionary<string, string> Mappings =>
|
||||||
new Dictionary<string, string>
|
new Dictionary<string, string>
|
||||||
|
|||||||
Reference in New Issue
Block a user