8 Commits

Author SHA1 Message Date
FunkeCoder23
9344531b34 Add longer timeout for DMM (#218) 2024-07-01 11:40:09 +01:00
FunkeCoder23
723aa6b6a0 update eztv url (#220)
Co-authored-by: funkecoder23 <funkecoder@DESKTOP-AMVOBPG>
2024-05-18 21:39:14 -04:00
Davide Marcoli
e17b476801 Re-add filter step in the processing of the streams (#215) 2024-05-15 12:00:02 +01:00
David Young
2a414d8bc0 Always return title and filename (#210)
Signed-off-by: David Young <davidy@funkypenguin.co.nz>
2024-05-15 11:55:42 +01:00
iPromKnight
9b5f454e6e Python version bump in alpine (#209) 2024-04-22 12:49:59 +01:00
iPromKnight
ad9549c695 Version bump for release (#208) 2024-04-22 12:46:02 +01:00
David Young
1e85cb00ff INNER JOIN when selecting files and torrents to avoid null results (#207)
* INNER JOIN when selecting files and torrents to avoid null results

Signed-off-by: David Young <davidy@funkypenguin.co.nz>

* Extend fix to all torrent types

Signed-off-by: David Young <davidy@funkypenguin.co.nz>

---------

Signed-off-by: David Young <davidy@funkypenguin.co.nz>
2024-04-22 12:43:57 +01:00
iPromKnight
da640a4071 Fix namespaces on extracted scraper info (#204)
* Fix namespaces on extracted scrapers

* version bump
2024-04-11 18:56:29 +01:00
12 changed files with 28 additions and 25 deletions

View File

@@ -94,7 +94,7 @@ services:
condition: service_healthy
env_file: stack.env
hostname: knightcrawler-addon
image: gabisonfire/knightcrawler-addon:2.0.23
image: gabisonfire/knightcrawler-addon:2.0.26
labels:
logging: promtail
networks:
@@ -117,7 +117,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-consumer:2.0.23
image: gabisonfire/knightcrawler-consumer:2.0.26
labels:
logging: promtail
networks:
@@ -138,7 +138,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-debrid-collector:2.0.23
image: gabisonfire/knightcrawler-debrid-collector:2.0.26
labels:
logging: promtail
networks:
@@ -152,7 +152,7 @@ services:
migrator:
condition: service_completed_successfully
env_file: stack.env
image: gabisonfire/knightcrawler-metadata:2.0.23
image: gabisonfire/knightcrawler-metadata:2.0.26
networks:
- knightcrawler-network
restart: "no"
@@ -163,7 +163,7 @@ services:
postgres:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-migrator:2.0.23
image: gabisonfire/knightcrawler-migrator:2.0.26
networks:
- knightcrawler-network
restart: "no"
@@ -182,7 +182,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-producer:2.0.23
image: gabisonfire/knightcrawler-producer:2.0.26
labels:
logging: promtail
networks:
@@ -207,7 +207,7 @@ services:
deploy:
replicas: ${QBIT_REPLICAS:-0}
env_file: stack.env
image: gabisonfire/knightcrawler-qbit-collector:2.0.23
image: gabisonfire/knightcrawler-qbit-collector:2.0.26
labels:
logging: promtail
networks:

View File

@@ -20,7 +20,7 @@ x-depends: &knightcrawler-app-depends
services:
metadata:
image: gabisonfire/knightcrawler-metadata:2.0.23
image: gabisonfire/knightcrawler-metadata:2.0.26
env_file: ../../.env
networks:
- knightcrawler-network
@@ -30,7 +30,7 @@ services:
condition: service_completed_successfully
migrator:
image: gabisonfire/knightcrawler-migrator:2.0.23
image: gabisonfire/knightcrawler-migrator:2.0.26
env_file: ../../.env
networks:
- knightcrawler-network
@@ -40,7 +40,7 @@ services:
condition: service_healthy
addon:
image: gabisonfire/knightcrawler-addon:2.0.23
image: gabisonfire/knightcrawler-addon:2.0.26
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
hostname: knightcrawler-addon
@@ -48,22 +48,22 @@ services:
- "7000:7000"
consumer:
image: gabisonfire/knightcrawler-consumer:2.0.23
image: gabisonfire/knightcrawler-consumer:2.0.26
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
debridcollector:
image: gabisonfire/knightcrawler-debrid-collector:2.0.23
image: gabisonfire/knightcrawler-debrid-collector:2.0.26
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
producer:
image: gabisonfire/knightcrawler-producer:2.0.23
image: gabisonfire/knightcrawler-producer:2.0.26
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
qbitcollector:
image: gabisonfire/knightcrawler-qbit-collector:2.0.23
image: gabisonfire/knightcrawler-qbit-collector:2.0.26
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
depends_on:

View File

@@ -3,6 +3,7 @@ import { addonBuilder } from 'stremio-addon-sdk';
import { cacheWrapStream } from './lib/cache.js';
import { dummyManifest } from './lib/manifest.js';
import * as repository from './lib/repository.js';
import applyFilters from "./lib/filter.js";
import applySorting from './lib/sort.js';
import { toStreamInfo, applyStaticInfo } from './lib/streamInfo.js';
import { Type } from './lib/types.js';
@@ -32,6 +33,7 @@ builder.defineStreamHandler((args) => {
.then(records => records
.sort((a, b) => b.torrent.seeders - a.torrent.seeders || b.torrent.uploadDate - a.torrent.uploadDate)
.map(record => toStreamInfo(record)))))
.then(streams => applyFilters(streams, args.extra))
.then(streams => applySorting(streams, args.extra))
.then(streams => applyStaticInfo(streams))
.then(streams => applyMochs(streams, args.extra))

View File

@@ -84,7 +84,7 @@ export function getImdbIdMovieEntries(imdbId) {
where: {
imdbId: { [Op.eq]: imdbId }
},
include: [Torrent],
include: { model: Torrent, required: true },
limit: 500,
order: [
[Torrent, 'size', 'DESC']
@@ -99,7 +99,7 @@ export function getImdbIdSeriesEntries(imdbId, season, episode) {
imdbSeason: { [Op.eq]: season },
imdbEpisode: { [Op.eq]: episode }
},
include: [Torrent],
include: { model: Torrent, required: true },
limit: 500,
order: [
[Torrent, 'size', 'DESC']
@@ -112,7 +112,7 @@ export function getKitsuIdMovieEntries(kitsuId) {
where: {
kitsuId: { [Op.eq]: kitsuId }
},
include: [Torrent],
include: { model: Torrent, required: true },
limit: 500,
order: [
[Torrent, 'size', 'DESC']
@@ -126,7 +126,7 @@ export function getKitsuIdSeriesEntries(kitsuId, episode) {
kitsuId: { [Op.eq]: kitsuId },
kitsuEpisode: { [Op.eq]: episode }
},
include: [Torrent],
include: { model: Torrent, required: true },
limit: 500,
order: [
[Torrent, 'size', 'DESC']

View File

@@ -20,7 +20,7 @@ export function toStreamInfo(record) {
const title = joinDetailParts(
[
joinDetailParts([record.torrent.title.replace(/[, ]+/g, ' ')]),
joinDetailParts([!sameInfo && record.title || undefined]),
joinDetailParts([record.title || undefined]),
joinDetailParts([
joinDetailParts([formatSize(record.size)], '💾 ')
]),

View File

@@ -15,7 +15,7 @@ WORKDIR /app
ENV PYTHONUNBUFFERED=1
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
RUN apk add --update --no-cache python3=~3.11.9-r0 py3-pip && ln -sf python3 /usr/bin/python
COPY --from=build /src/out .

View File

@@ -5,7 +5,7 @@
"Name": "SyncEzTvJob",
"IntervalSeconds": 60,
"Enabled": true,
"Url": "https://eztv1.xyz/ezrss.xml",
"Url": "https://eztvx.to/ezrss.xml",
"XmlNamespace": "http://xmlns.ezrss.it/0.1/"
},
{

View File

@@ -14,7 +14,7 @@ WORKDIR /app
ENV PYTHONUNBUFFERED=1
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
RUN apk add --update --no-cache python3=~3.11.9-r0 py3-pip && ln -sf python3 /usr/bin/python
COPY --from=build /src/out .

View File

@@ -9,6 +9,7 @@ public static class ServiceCollectionExtensions
client.BaseAddress = new("https://github.com/debridmediamanager/hashlists/zipball/main/");
client.DefaultRequestHeaders.Add("Accept-Encoding", "gzip");
client.DefaultRequestHeaders.UserAgent.ParseAdd("curl");
client.Timeout = TimeSpan.FromMinutes(10); // 10 minute timeout, #217
});
return services;

View File

@@ -4,7 +4,7 @@ public class EzTvCrawler(IHttpClientFactory httpClientFactory, ILogger<EzTvCrawl
{
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncEzTvJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
protected override string Source => "EZTV";
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncEzTvJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncEzTvJob", StringComparison.OrdinalIgnoreCase))?.XmlNamespace ?? string.Empty;
protected override IReadOnlyDictionary<string, string> Mappings =>
new Dictionary<string, string>

View File

@@ -4,7 +4,7 @@ public class NyaaCrawler(IHttpClientFactory httpClientFactory, ILogger<NyaaCrawl
{
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncNyaaJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
protected override string Source => "Nyaa";
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncNyaaJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncNyaaJob", StringComparison.OrdinalIgnoreCase))?.XmlNamespace ?? string.Empty;
protected override IReadOnlyDictionary<string, string> Mappings =>
new Dictionary<string, string>

View File

@@ -15,7 +15,7 @@ WORKDIR /app
ENV PYTHONUNBUFFERED=1
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
RUN apk add --update --no-cache python3=~3.11.9-r0 py3-pip && ln -sf python3 /usr/bin/python
COPY --from=build /src/out .