Compare commits
8 Commits
moch-updat
...
v2.0.25
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ad9549c695 | ||
|
|
1e85cb00ff | ||
|
|
da640a4071 | ||
|
|
e6a63fd72e | ||
|
|
02101ac50a | ||
|
|
3c8ffd5082 | ||
|
|
79e0a0f102 | ||
|
|
6181207513 |
@@ -14,12 +14,14 @@ program=
|
|||||||
[BitTorrent]
|
[BitTorrent]
|
||||||
Session\AnonymousModeEnabled=true
|
Session\AnonymousModeEnabled=true
|
||||||
Session\BTProtocol=TCP
|
Session\BTProtocol=TCP
|
||||||
|
Session\ConnectionSpeed=150
|
||||||
Session\DefaultSavePath=/downloads/
|
Session\DefaultSavePath=/downloads/
|
||||||
Session\ExcludedFileNames=
|
Session\ExcludedFileNames=
|
||||||
Session\MaxActiveCheckingTorrents=5
|
Session\MaxActiveCheckingTorrents=20
|
||||||
Session\MaxActiveDownloads=10
|
Session\MaxActiveDownloads=20
|
||||||
Session\MaxActiveTorrents=50
|
Session\MaxActiveTorrents=50
|
||||||
Session\MaxActiveUploads=50
|
Session\MaxActiveUploads=50
|
||||||
|
Session\MaxConcurrentHTTPAnnounces=1000
|
||||||
Session\MaxConnections=2000
|
Session\MaxConnections=2000
|
||||||
Session\Port=6881
|
Session\Port=6881
|
||||||
Session\QueueingSystemEnabled=true
|
Session\QueueingSystemEnabled=true
|
||||||
|
|||||||
@@ -94,7 +94,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
hostname: knightcrawler-addon
|
hostname: knightcrawler-addon
|
||||||
image: gabisonfire/knightcrawler-addon:2.0.18
|
image: gabisonfire/knightcrawler-addon:2.0.25
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -117,7 +117,7 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-consumer:2.0.18
|
image: gabisonfire/knightcrawler-consumer:2.0.25
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -138,7 +138,7 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-debrid-collector:2.0.18
|
image: gabisonfire/knightcrawler-debrid-collector:2.0.25
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -152,7 +152,7 @@ services:
|
|||||||
migrator:
|
migrator:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-metadata:2.0.18
|
image: gabisonfire/knightcrawler-metadata:2.0.25
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
restart: "no"
|
restart: "no"
|
||||||
@@ -163,7 +163,7 @@ services:
|
|||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-migrator:2.0.18
|
image: gabisonfire/knightcrawler-migrator:2.0.25
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
restart: "no"
|
restart: "no"
|
||||||
@@ -182,7 +182,7 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-producer:2.0.18
|
image: gabisonfire/knightcrawler-producer:2.0.25
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
@@ -207,7 +207,7 @@ services:
|
|||||||
deploy:
|
deploy:
|
||||||
replicas: ${QBIT_REPLICAS:-0}
|
replicas: ${QBIT_REPLICAS:-0}
|
||||||
env_file: stack.env
|
env_file: stack.env
|
||||||
image: gabisonfire/knightcrawler-qbit-collector:2.0.18
|
image: gabisonfire/knightcrawler-qbit-collector:2.0.25
|
||||||
labels:
|
labels:
|
||||||
logging: promtail
|
logging: promtail
|
||||||
networks:
|
networks:
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ x-depends: &knightcrawler-app-depends
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
metadata:
|
metadata:
|
||||||
image: gabisonfire/knightcrawler-metadata:2.0.18
|
image: gabisonfire/knightcrawler-metadata:2.0.25
|
||||||
env_file: ../../.env
|
env_file: ../../.env
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
@@ -30,7 +30,7 @@ services:
|
|||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
|
|
||||||
migrator:
|
migrator:
|
||||||
image: gabisonfire/knightcrawler-migrator:2.0.18
|
image: gabisonfire/knightcrawler-migrator:2.0.25
|
||||||
env_file: ../../.env
|
env_file: ../../.env
|
||||||
networks:
|
networks:
|
||||||
- knightcrawler-network
|
- knightcrawler-network
|
||||||
@@ -40,7 +40,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
||||||
addon:
|
addon:
|
||||||
image: gabisonfire/knightcrawler-addon:2.0.18
|
image: gabisonfire/knightcrawler-addon:2.0.25
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
hostname: knightcrawler-addon
|
hostname: knightcrawler-addon
|
||||||
@@ -48,22 +48,22 @@ services:
|
|||||||
- "7000:7000"
|
- "7000:7000"
|
||||||
|
|
||||||
consumer:
|
consumer:
|
||||||
image: gabisonfire/knightcrawler-consumer:2.0.18
|
image: gabisonfire/knightcrawler-consumer:2.0.25
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
debridcollector:
|
debridcollector:
|
||||||
image: gabisonfire/knightcrawler-debrid-collector:2.0.18
|
image: gabisonfire/knightcrawler-debrid-collector:2.0.25
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
producer:
|
producer:
|
||||||
image: gabisonfire/knightcrawler-producer:2.0.18
|
image: gabisonfire/knightcrawler-producer:2.0.25
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
qbitcollector:
|
qbitcollector:
|
||||||
image: gabisonfire/knightcrawler-qbit-collector:2.0.18
|
image: gabisonfire/knightcrawler-qbit-collector:2.0.25
|
||||||
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
<<: [*knightcrawler-app, *knightcrawler-app-depends]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
|
|||||||
@@ -32,12 +32,10 @@ COLLECTOR_DEBRID_ENABLED=true
|
|||||||
COLLECTOR_REAL_DEBRID_API_KEY=
|
COLLECTOR_REAL_DEBRID_API_KEY=
|
||||||
QBIT_HOST=http://qbittorrent:8080
|
QBIT_HOST=http://qbittorrent:8080
|
||||||
QBIT_TRACKERS_URL=https://raw.githubusercontent.com/ngosang/trackerslist/master/trackers_all_http.txt
|
QBIT_TRACKERS_URL=https://raw.githubusercontent.com/ngosang/trackerslist/master/trackers_all_http.txt
|
||||||
|
QBIT_CONCURRENCY=8
|
||||||
|
|
||||||
# Number of replicas for the qBittorrent collector and qBitTorrent client. Should be 0 or 1.
|
# Number of replicas for the qBittorrent collector and qBitTorrent client. Should be 0 or 1.
|
||||||
QBIT_REPLICAS=0
|
QBIT_REPLICAS=0
|
||||||
|
|
||||||
# Addon
|
# Addon
|
||||||
DEBUG_MODE=false
|
DEBUG_MODE=false
|
||||||
|
|
||||||
# Producer
|
|
||||||
GITHUB_PAT=
|
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ export function getImdbIdMovieEntries(imdbId) {
|
|||||||
where: {
|
where: {
|
||||||
imdbId: { [Op.eq]: imdbId }
|
imdbId: { [Op.eq]: imdbId }
|
||||||
},
|
},
|
||||||
include: [Torrent],
|
include: { model: Torrent, required: true },
|
||||||
limit: 500,
|
limit: 500,
|
||||||
order: [
|
order: [
|
||||||
[Torrent, 'size', 'DESC']
|
[Torrent, 'size', 'DESC']
|
||||||
@@ -99,7 +99,7 @@ export function getImdbIdSeriesEntries(imdbId, season, episode) {
|
|||||||
imdbSeason: { [Op.eq]: season },
|
imdbSeason: { [Op.eq]: season },
|
||||||
imdbEpisode: { [Op.eq]: episode }
|
imdbEpisode: { [Op.eq]: episode }
|
||||||
},
|
},
|
||||||
include: [Torrent],
|
include: { model: Torrent, required: true },
|
||||||
limit: 500,
|
limit: 500,
|
||||||
order: [
|
order: [
|
||||||
[Torrent, 'size', 'DESC']
|
[Torrent, 'size', 'DESC']
|
||||||
@@ -112,7 +112,7 @@ export function getKitsuIdMovieEntries(kitsuId) {
|
|||||||
where: {
|
where: {
|
||||||
kitsuId: { [Op.eq]: kitsuId }
|
kitsuId: { [Op.eq]: kitsuId }
|
||||||
},
|
},
|
||||||
include: [Torrent],
|
include: { model: Torrent, required: true },
|
||||||
limit: 500,
|
limit: 500,
|
||||||
order: [
|
order: [
|
||||||
[Torrent, 'size', 'DESC']
|
[Torrent, 'size', 'DESC']
|
||||||
@@ -126,7 +126,7 @@ export function getKitsuIdSeriesEntries(kitsuId, episode) {
|
|||||||
kitsuId: { [Op.eq]: kitsuId },
|
kitsuId: { [Op.eq]: kitsuId },
|
||||||
kitsuEpisode: { [Op.eq]: episode }
|
kitsuEpisode: { [Op.eq]: episode }
|
||||||
},
|
},
|
||||||
include: [Torrent],
|
include: { model: Torrent, required: true },
|
||||||
limit: 500,
|
limit: 500,
|
||||||
order: [
|
order: [
|
||||||
[Torrent, 'size', 'DESC']
|
[Torrent, 'size', 'DESC']
|
||||||
|
|||||||
@@ -9,187 +9,187 @@ const KEY = 'alldebrid';
|
|||||||
const AGENT = 'knightcrawler';
|
const AGENT = 'knightcrawler';
|
||||||
|
|
||||||
export async function getCachedStreams(streams, apiKey) {
|
export async function getCachedStreams(streams, apiKey) {
|
||||||
const options = await getDefaultOptions();
|
const options = await getDefaultOptions();
|
||||||
const AD = new AllDebridClient(apiKey, options);
|
const AD = new AllDebridClient(apiKey, options);
|
||||||
const hashes = streams.map(stream => stream.infoHash);
|
const hashes = streams.map(stream => stream.infoHash);
|
||||||
const available = await AD.magnet.instant(hashes)
|
const available = await AD.magnet.instant(hashes)
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
if (toCommonError(error)) {
|
if (toCommonError(error)) {
|
||||||
return Promise.reject(error);
|
return Promise.reject(error);
|
||||||
}
|
}
|
||||||
console.warn(`Failed AllDebrid cached [${hashes[0]}] torrent availability request:`, error);
|
console.warn(`Failed AllDebrid cached [${hashes[0]}] torrent availability request:`, error);
|
||||||
return undefined;
|
return undefined;
|
||||||
});
|
});
|
||||||
return available?.data?.magnets && streams
|
return available?.data?.magnets && streams
|
||||||
.reduce((mochStreams, stream) => {
|
.reduce((mochStreams, stream) => {
|
||||||
const cachedEntry = available.data.magnets.find(magnet => stream.infoHash === magnet.hash.toLowerCase());
|
const cachedEntry = available.data.magnets.find(magnet => stream.infoHash === magnet.hash.toLowerCase());
|
||||||
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
|
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
|
||||||
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
||||||
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
|
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
|
||||||
const encodedFileName = encodeURIComponent(fileName);
|
const encodedFileName = encodeURIComponent(fileName);
|
||||||
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
|
mochStreams[stream.infoHash] = {
|
||||||
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
|
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
|
||||||
cached: cachedEntry?.instant
|
cached: cachedEntry?.instant
|
||||||
}
|
}
|
||||||
return mochStreams;
|
return mochStreams;
|
||||||
}, {})
|
}, {})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getCatalog(apiKey, offset = 0) {
|
export async function getCatalog(apiKey, offset = 0) {
|
||||||
if (offset > 0) {
|
if (offset > 0) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
const options = await getDefaultOptions();
|
const options = await getDefaultOptions();
|
||||||
const AD = new AllDebridClient(apiKey, options);
|
const AD = new AllDebridClient(apiKey, options);
|
||||||
return AD.magnet.status()
|
return AD.magnet.status()
|
||||||
.then(response => response.data.magnets)
|
.then(response => response.data.magnets)
|
||||||
.then(torrents => (torrents || [])
|
.then(torrents => (torrents || [])
|
||||||
.filter(torrent => torrent && statusReady(torrent.statusCode))
|
.filter(torrent => torrent && statusReady(torrent.statusCode))
|
||||||
.map(torrent => ({
|
.map(torrent => ({
|
||||||
id: `${KEY}:${torrent.id}`,
|
id: `${KEY}:${torrent.id}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: torrent.filename
|
name: torrent.filename
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getItemMeta(itemId, apiKey) {
|
export async function getItemMeta(itemId, apiKey) {
|
||||||
const options = await getDefaultOptions();
|
const options = await getDefaultOptions();
|
||||||
const AD = new AllDebridClient(apiKey, options);
|
const AD = new AllDebridClient(apiKey, options);
|
||||||
return AD.magnet.status(itemId)
|
return AD.magnet.status(itemId)
|
||||||
.then(response => response.data.magnets)
|
.then(response => response.data.magnets)
|
||||||
.then(torrent => ({
|
.then(torrent => ({
|
||||||
id: `${KEY}:${torrent.id}`,
|
id: `${KEY}:${torrent.id}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: torrent.filename,
|
name: torrent.filename,
|
||||||
infoHash: torrent.hash.toLowerCase(),
|
infoHash: torrent.hash.toLowerCase(),
|
||||||
videos: torrent.links
|
videos: torrent.links
|
||||||
.filter(file => isVideo(file.filename))
|
.filter(file => isVideo(file.filename))
|
||||||
.map((file, index) => ({
|
.map((file, index) => ({
|
||||||
id: `${KEY}:${torrent.id}:${index}`,
|
id: `${KEY}:${torrent.id}:${index}`,
|
||||||
title: file.filename,
|
title: file.filename,
|
||||||
released: new Date(torrent.uploadDate * 1000 - index).toISOString(),
|
released: new Date(torrent.uploadDate * 1000 - index).toISOString(),
|
||||||
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/${encodeURIComponent(file.filename)}/${index}` }]
|
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/${encodeURIComponent(file.filename)}/${index}` }]
|
||||||
}))
|
}))
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
|
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
|
||||||
console.log(`Unrestricting AllDebrid ${infoHash} [${fileIndex}]`);
|
console.log(`Unrestricting AllDebrid ${infoHash} [${fileIndex}]`);
|
||||||
const options = await getDefaultOptions(ip);
|
const options = await getDefaultOptions(ip);
|
||||||
const AD = new AllDebridClient(apiKey, options);
|
const AD = new AllDebridClient(apiKey, options);
|
||||||
|
|
||||||
return _resolve(AD, infoHash, cachedEntryInfo, fileIndex)
|
return _resolve(AD, infoHash, cachedEntryInfo, fileIndex)
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
if (errorExpiredSubscriptionError(error)) {
|
if (errorExpiredSubscriptionError(error)) {
|
||||||
console.log(`Access denied to AllDebrid ${infoHash} [${fileIndex}]`);
|
console.log(`Access denied to AllDebrid ${infoHash} [${fileIndex}]`);
|
||||||
return StaticResponse.FAILED_ACCESS;
|
return StaticResponse.FAILED_ACCESS;
|
||||||
} else if (error.code === 'MAGNET_TOO_MANY') {
|
} else if (error.code === 'MAGNET_TOO_MANY') {
|
||||||
console.log(`Deleting and retrying adding to AllDebrid ${infoHash} [${fileIndex}]...`);
|
console.log(`Deleting and retrying adding to AllDebrid ${infoHash} [${fileIndex}]...`);
|
||||||
return _deleteAndRetry(AD, infoHash, cachedEntryInfo, fileIndex);
|
return _deleteAndRetry(AD, infoHash, cachedEntryInfo, fileIndex);
|
||||||
}
|
}
|
||||||
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(error)}`);
|
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(error)}`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _resolve(AD, infoHash, cachedEntryInfo, fileIndex) {
|
async function _resolve(AD, infoHash, cachedEntryInfo, fileIndex) {
|
||||||
const torrent = await _createOrFindTorrent(AD, infoHash);
|
const torrent = await _createOrFindTorrent(AD, infoHash);
|
||||||
if (torrent && statusReady(torrent.statusCode)) {
|
if (torrent && statusReady(torrent.statusCode)) {
|
||||||
return _unrestrictLink(AD, torrent, cachedEntryInfo, fileIndex);
|
return _unrestrictLink(AD, torrent, cachedEntryInfo, fileIndex);
|
||||||
} else if (torrent && statusDownloading(torrent.statusCode)) {
|
} else if (torrent && statusDownloading(torrent.statusCode)) {
|
||||||
console.log(`Downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
|
console.log(`Downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
|
||||||
return StaticResponse.DOWNLOADING;
|
return StaticResponse.DOWNLOADING;
|
||||||
} else if (torrent && statusHandledError(torrent.statusCode)) {
|
} else if (torrent && statusHandledError(torrent.statusCode)) {
|
||||||
console.log(`Retrying downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
|
console.log(`Retrying downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
|
||||||
return _retryCreateTorrent(AD, infoHash, cachedEntryInfo, fileIndex);
|
return _retryCreateTorrent(AD, infoHash, cachedEntryInfo, fileIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(torrent)}`);
|
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(torrent)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _createOrFindTorrent(AD, infoHash) {
|
async function _createOrFindTorrent(AD, infoHash) {
|
||||||
return _findTorrent(AD, infoHash)
|
return _findTorrent(AD, infoHash)
|
||||||
.catch(() => _createTorrent(AD, infoHash));
|
.catch(() => _createTorrent(AD, infoHash));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _retryCreateTorrent(AD, infoHash, encodedFileName, fileIndex) {
|
async function _retryCreateTorrent(AD, infoHash, encodedFileName, fileIndex) {
|
||||||
const newTorrent = await _createTorrent(AD, infoHash);
|
const newTorrent = await _createTorrent(AD, infoHash);
|
||||||
return newTorrent && statusReady(newTorrent.statusCode)
|
return newTorrent && statusReady(newTorrent.statusCode)
|
||||||
? _unrestrictLink(AD, newTorrent, encodedFileName, fileIndex)
|
? _unrestrictLink(AD, newTorrent, encodedFileName, fileIndex)
|
||||||
: StaticResponse.FAILED_DOWNLOAD;
|
: StaticResponse.FAILED_DOWNLOAD;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _deleteAndRetry(AD, infoHash, encodedFileName, fileIndex) {
|
async function _deleteAndRetry(AD, infoHash, encodedFileName, fileIndex) {
|
||||||
const torrents = await AD.magnet.status().then(response => response.data.magnets);
|
const torrents = await AD.magnet.status().then(response => response.data.magnets);
|
||||||
const lastTorrent = torrents[torrents.length - 1];
|
const lastTorrent = torrents[torrents.length - 1];
|
||||||
return AD.magnet.delete(lastTorrent.id)
|
return AD.magnet.delete(lastTorrent.id)
|
||||||
.then(() => _retryCreateTorrent(AD, infoHash, encodedFileName, fileIndex));
|
.then(() => _retryCreateTorrent(AD, infoHash, encodedFileName, fileIndex));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _findTorrent(AD, infoHash) {
|
async function _findTorrent(AD, infoHash) {
|
||||||
const torrents = await AD.magnet.status().then(response => response.data.magnets);
|
const torrents = await AD.magnet.status().then(response => response.data.magnets);
|
||||||
const foundTorrents = torrents.filter(torrent => torrent.hash.toLowerCase() === infoHash);
|
const foundTorrents = torrents.filter(torrent => torrent.hash.toLowerCase() === infoHash);
|
||||||
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
|
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
|
||||||
const foundTorrent = nonFailedTorrent || foundTorrents[0];
|
const foundTorrent = nonFailedTorrent || foundTorrents[0];
|
||||||
return foundTorrent || Promise.reject('No recent torrent found');
|
return foundTorrent || Promise.reject('No recent torrent found');
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _createTorrent(AD, infoHash) {
|
async function _createTorrent(AD, infoHash) {
|
||||||
const magnetLink = await getMagnetLink(infoHash);
|
const magnetLink = await getMagnetLink(infoHash);
|
||||||
const uploadResponse = await AD.magnet.upload(magnetLink);
|
const uploadResponse = await AD.magnet.upload(magnetLink);
|
||||||
const torrentId = uploadResponse.data.magnets[0].id;
|
const torrentId = uploadResponse.data.magnets[0].id;
|
||||||
return AD.magnet.status(torrentId).then(statusResponse => statusResponse.data.magnets);
|
return AD.magnet.status(torrentId).then(statusResponse => statusResponse.data.magnets);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _unrestrictLink(AD, torrent, encodedFileName, fileIndex) {
|
async function _unrestrictLink(AD, torrent, encodedFileName, fileIndex) {
|
||||||
const targetFileName = decodeURIComponent(encodedFileName);
|
const targetFileName = decodeURIComponent(encodedFileName);
|
||||||
const videos = torrent.links.filter(link => isVideo(link.filename)).sort((a, b) => b.size - a.size);
|
const videos = torrent.links.filter(link => isVideo(link.filename));
|
||||||
const targetVideo = Number.isInteger(fileIndex)
|
const targetVideo = Number.isInteger(fileIndex)
|
||||||
? videos.find(video => sameFilename(targetFileName, video.filename))
|
? videos.find(video => sameFilename(targetFileName, video.filename))
|
||||||
: videos[0];
|
: videos.sort((a, b) => b.size - a.size)[0];
|
||||||
|
|
||||||
if (!targetVideo && torrent.links.every(link => isArchive(link.filename))) {
|
if (!targetVideo && torrent.links.every(link => isArchive(link.filename))) {
|
||||||
console.log(`Only AllDebrid archive is available for [${torrent.hash}] ${encodedFileName}`)
|
console.log(`Only AllDebrid archive is available for [${torrent.hash}] ${encodedFileName}`)
|
||||||
return StaticResponse.FAILED_RAR;
|
return StaticResponse.FAILED_RAR;
|
||||||
}
|
}
|
||||||
if (!targetVideo || !targetVideo.link || !targetVideo.link.length) {
|
if (!targetVideo || !targetVideo.link || !targetVideo.link.length) {
|
||||||
return Promise.reject(`No AllDebrid links found for [${torrent.hash}] ${encodedFileName}`);
|
return Promise.reject(`No AllDebrid links found for [${torrent.hash}] ${encodedFileName}`);
|
||||||
}
|
}
|
||||||
const unrestrictedLink = await AD.link.unlock(targetVideo.link).then(response => response.data.link);
|
const unrestrictedLink = await AD.link.unlock(targetVideo.link).then(response => response.data.link);
|
||||||
console.log(`Unrestricted AllDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
|
console.log(`Unrestricted AllDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
|
||||||
return unrestrictedLink;
|
return unrestrictedLink;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getDefaultOptions(ip) {
|
async function getDefaultOptions(ip) {
|
||||||
return { base_agent: AGENT, timeout: 10000 };
|
return { base_agent: AGENT, timeout: 10000 };
|
||||||
}
|
}
|
||||||
|
|
||||||
export function toCommonError(error) {
|
export function toCommonError(error) {
|
||||||
if (error && error.code === 'AUTH_BAD_APIKEY') {
|
if (error && error.code === 'AUTH_BAD_APIKEY') {
|
||||||
return BadTokenError;
|
return BadTokenError;
|
||||||
}
|
}
|
||||||
if (error && error.code === 'AUTH_USER_BANNED') {
|
if (error && error.code === 'AUTH_USER_BANNED') {
|
||||||
return AccessDeniedError;
|
return AccessDeniedError;
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusError(statusCode) {
|
function statusError(statusCode) {
|
||||||
return [5, 6, 7, 8, 9, 10, 11].includes(statusCode);
|
return [5, 6, 7, 8, 9, 10, 11].includes(statusCode);
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusHandledError(statusCode) {
|
function statusHandledError(statusCode) {
|
||||||
return [5, 7, 9, 10, 11].includes(statusCode);
|
return [5, 7, 9, 10, 11].includes(statusCode);
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusDownloading(statusCode) {
|
function statusDownloading(statusCode) {
|
||||||
return [0, 1, 2, 3].includes(statusCode);
|
return [0, 1, 2, 3].includes(statusCode);
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusReady(statusCode) {
|
function statusReady(statusCode) {
|
||||||
return statusCode === 4;
|
return statusCode === 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
function errorExpiredSubscriptionError(error) {
|
function errorExpiredSubscriptionError(error) {
|
||||||
return ['AUTH_BAD_APIKEY', 'MUST_BE_PREMIUM', 'MAGNET_MUST_BE_PREMIUM', 'FREE_TRIAL_LIMIT_REACHED', 'AUTH_USER_BANNED']
|
return ['AUTH_BAD_APIKEY', 'MUST_BE_PREMIUM', 'MAGNET_MUST_BE_PREMIUM', 'FREE_TRIAL_LIMIT_REACHED', 'AUTH_USER_BANNED']
|
||||||
.includes(error.code);
|
.includes(error.code);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,148 +8,148 @@ import StaticResponse from './static.js';
|
|||||||
const KEY = 'debridlink';
|
const KEY = 'debridlink';
|
||||||
|
|
||||||
export async function getCachedStreams(streams, apiKey) {
|
export async function getCachedStreams(streams, apiKey) {
|
||||||
const options = await getDefaultOptions();
|
const options = await getDefaultOptions();
|
||||||
const DL = new DebridLinkClient(apiKey, options);
|
const DL = new DebridLinkClient(apiKey, options);
|
||||||
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 50)
|
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 50)
|
||||||
.map(batch => batch.join(','));
|
.map(batch => batch.join(','));
|
||||||
const available = await Promise.all(hashBatches.map(hashes => DL.seedbox.cached(hashes)))
|
const available = await Promise.all(hashBatches.map(hashes => DL.seedbox.cached(hashes)))
|
||||||
.then(results => results.map(result => result.value))
|
.then(results => results.map(result => result.value))
|
||||||
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
|
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
if (toCommonError(error)) {
|
if (toCommonError(error)) {
|
||||||
return Promise.reject(error);
|
return Promise.reject(error);
|
||||||
}
|
}
|
||||||
console.warn('Failed DebridLink cached torrent availability request:', error);
|
console.warn('Failed DebridLink cached torrent availability request:', error);
|
||||||
return undefined;
|
return undefined;
|
||||||
});
|
});
|
||||||
return available && streams
|
return available && streams
|
||||||
.reduce((mochStreams, stream) => {
|
.reduce((mochStreams, stream) => {
|
||||||
const cachedEntry = available[stream.infoHash];
|
const cachedEntry = available[stream.infoHash];
|
||||||
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
|
mochStreams[stream.infoHash] = {
|
||||||
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
|
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
|
||||||
cached: !!cachedEntry
|
cached: !!cachedEntry
|
||||||
};
|
};
|
||||||
return mochStreams;
|
return mochStreams;
|
||||||
}, {})
|
}, {})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getCatalog(apiKey, offset = 0) {
|
export async function getCatalog(apiKey, offset = 0) {
|
||||||
if (offset > 0) {
|
if (offset > 0) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
const options = await getDefaultOptions();
|
const options = await getDefaultOptions();
|
||||||
const DL = new DebridLinkClient(apiKey, options);
|
const DL = new DebridLinkClient(apiKey, options);
|
||||||
return DL.seedbox.list()
|
return DL.seedbox.list()
|
||||||
.then(response => response.value)
|
.then(response => response.value)
|
||||||
.then(torrents => (torrents || [])
|
.then(torrents => (torrents || [])
|
||||||
.filter(torrent => torrent && statusReady(torrent))
|
.filter(torrent => torrent && statusReady(torrent))
|
||||||
.map(torrent => ({
|
.map(torrent => ({
|
||||||
id: `${KEY}:${torrent.id}`,
|
id: `${KEY}:${torrent.id}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: torrent.name
|
name: torrent.name
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getItemMeta(itemId, apiKey, ip) {
|
export async function getItemMeta(itemId, apiKey, ip) {
|
||||||
const options = await getDefaultOptions(ip);
|
const options = await getDefaultOptions(ip);
|
||||||
const DL = new DebridLinkClient(apiKey, options);
|
const DL = new DebridLinkClient(apiKey, options);
|
||||||
return DL.seedbox.list(itemId)
|
return DL.seedbox.list(itemId)
|
||||||
.then(response => response.value[0])
|
.then(response => response.value[0])
|
||||||
.then(torrent => ({
|
.then(torrent => ({
|
||||||
id: `${KEY}:${torrent.id}`,
|
id: `${KEY}:${torrent.id}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: torrent.name,
|
name: torrent.name,
|
||||||
infoHash: torrent.hashString.toLowerCase(),
|
infoHash: torrent.hashString.toLowerCase(),
|
||||||
videos: torrent.files
|
videos: torrent.files
|
||||||
.filter(file => isVideo(file.name))
|
.filter(file => isVideo(file.name))
|
||||||
.map((file, index) => ({
|
.map((file, index) => ({
|
||||||
id: `${KEY}:${torrent.id}:${index}`,
|
id: `${KEY}:${torrent.id}:${index}`,
|
||||||
title: file.name,
|
title: file.name,
|
||||||
released: new Date(torrent.created * 1000 - index).toISOString(),
|
released: new Date(torrent.created * 1000 - index).toISOString(),
|
||||||
streams: [{ url: file.downloadUrl }]
|
streams: [{ url: file.downloadUrl }]
|
||||||
}))
|
}))
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function resolve({ ip, apiKey, infoHash, fileIndex }) {
|
export async function resolve({ ip, apiKey, infoHash, fileIndex }) {
|
||||||
console.log(`Unrestricting DebridLink ${infoHash} [${fileIndex}]`);
|
console.log(`Unrestricting DebridLink ${infoHash} [${fileIndex}]`);
|
||||||
const options = await getDefaultOptions(ip);
|
const options = await getDefaultOptions(ip);
|
||||||
const DL = new DebridLinkClient(apiKey, options);
|
const DL = new DebridLinkClient(apiKey, options);
|
||||||
|
|
||||||
return _resolve(DL, infoHash, fileIndex)
|
return _resolve(DL, infoHash, fileIndex)
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
if (errorExpiredSubscriptionError(error)) {
|
if (errorExpiredSubscriptionError(error)) {
|
||||||
console.log(`Access denied to DebridLink ${infoHash} [${fileIndex}]`);
|
console.log(`Access denied to DebridLink ${infoHash} [${fileIndex}]`);
|
||||||
return StaticResponse.FAILED_ACCESS;
|
return StaticResponse.FAILED_ACCESS;
|
||||||
}
|
}
|
||||||
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(error)}`);
|
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(error)}`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _resolve(DL, infoHash, fileIndex) {
|
async function _resolve(DL, infoHash, fileIndex) {
|
||||||
const torrent = await _createOrFindTorrent(DL, infoHash);
|
const torrent = await _createOrFindTorrent(DL, infoHash);
|
||||||
if (torrent && statusReady(torrent)) {
|
if (torrent && statusReady(torrent)) {
|
||||||
return _unrestrictLink(DL, torrent, fileIndex);
|
return _unrestrictLink(DL, torrent, fileIndex);
|
||||||
} else if (torrent && statusDownloading(torrent)) {
|
} else if (torrent && statusDownloading(torrent)) {
|
||||||
console.log(`Downloading to DebridLink ${infoHash} [${fileIndex}]...`);
|
console.log(`Downloading to DebridLink ${infoHash} [${fileIndex}]...`);
|
||||||
return StaticResponse.DOWNLOADING;
|
return StaticResponse.DOWNLOADING;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(torrent)}`);
|
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(torrent)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _createOrFindTorrent(DL, infoHash) {
|
async function _createOrFindTorrent(DL, infoHash) {
|
||||||
return _findTorrent(DL, infoHash)
|
return _findTorrent(DL, infoHash)
|
||||||
.catch(() => _createTorrent(DL, infoHash));
|
.catch(() => _createTorrent(DL, infoHash));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _findTorrent(DL, infoHash) {
|
async function _findTorrent(DL, infoHash) {
|
||||||
const torrents = await DL.seedbox.list().then(response => response.value);
|
const torrents = await DL.seedbox.list().then(response => response.value);
|
||||||
const foundTorrents = torrents.filter(torrent => torrent.hashString.toLowerCase() === infoHash);
|
const foundTorrents = torrents.filter(torrent => torrent.hashString.toLowerCase() === infoHash);
|
||||||
return foundTorrents[0] || Promise.reject('No recent torrent found');
|
return foundTorrents[0] || Promise.reject('No recent torrent found');
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _createTorrent(DL, infoHash) {
|
async function _createTorrent(DL, infoHash) {
|
||||||
const magnetLink = await getMagnetLink(infoHash);
|
const magnetLink = await getMagnetLink(infoHash);
|
||||||
const uploadResponse = await DL.seedbox.add(magnetLink, null, true);
|
const uploadResponse = await DL.seedbox.add(magnetLink, null, true);
|
||||||
return uploadResponse.value;
|
return uploadResponse.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _unrestrictLink(DL, torrent, fileIndex) {
|
async function _unrestrictLink(DL, torrent, fileIndex) {
|
||||||
const targetFile = Number.isInteger(fileIndex)
|
const targetFile = Number.isInteger(fileIndex)
|
||||||
? torrent.files[fileIndex]
|
? torrent.files[fileIndex]
|
||||||
: torrent.files.filter(file => file.downloadPercent === 100).sort((a, b) => b.size - a.size)[0];
|
: torrent.files.filter(file => file.downloadPercent === 100).sort((a, b) => b.size - a.size)[0];
|
||||||
|
|
||||||
if (!targetFile && torrent.files.every(file => isArchive(file.downloadUrl))) {
|
if (!targetFile && torrent.files.every(file => isArchive(file.downloadUrl))) {
|
||||||
console.log(`Only DebridLink archive is available for [${torrent.hashString}] ${fileIndex}`)
|
console.log(`Only DebridLink archive is available for [${torrent.hashString}] ${fileIndex}`)
|
||||||
return StaticResponse.FAILED_RAR;
|
return StaticResponse.FAILED_RAR;
|
||||||
}
|
}
|
||||||
if (!targetFile || !targetFile.downloadUrl) {
|
if (!targetFile || !targetFile.downloadUrl) {
|
||||||
return Promise.reject(`No DebridLink links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
|
return Promise.reject(`No DebridLink links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
|
||||||
}
|
}
|
||||||
console.log(`Unrestricted DebridLink ${torrent.hashString} [${fileIndex}] to ${targetFile.downloadUrl}`);
|
console.log(`Unrestricted DebridLink ${torrent.hashString} [${fileIndex}] to ${targetFile.downloadUrl}`);
|
||||||
return targetFile.downloadUrl;
|
return targetFile.downloadUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getDefaultOptions(ip) {
|
async function getDefaultOptions(ip) {
|
||||||
return { ip, timeout: 10000 };
|
return { ip, timeout: 10000 };
|
||||||
}
|
}
|
||||||
|
|
||||||
export function toCommonError(error) {
|
export function toCommonError(error) {
|
||||||
if (error === 'badToken') {
|
if (error === 'badToken') {
|
||||||
return BadTokenError;
|
return BadTokenError;
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusDownloading(torrent) {
|
function statusDownloading(torrent) {
|
||||||
return torrent.downloadPercent < 100
|
return torrent.downloadPercent < 100
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusReady(torrent) {
|
function statusReady(torrent) {
|
||||||
return torrent.downloadPercent === 100;
|
return torrent.downloadPercent === 100;
|
||||||
}
|
}
|
||||||
|
|
||||||
function errorExpiredSubscriptionError(error) {
|
function errorExpiredSubscriptionError(error) {
|
||||||
return ['freeServerOverload', 'maxTorrent', 'maxLink', 'maxLinkHost', 'maxData', 'maxDataHost'].includes(error);
|
return ['freeServerOverload', 'maxTorrent', 'maxLink', 'maxLinkHost', 'maxData', 'maxDataHost'].includes(error);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,226 +15,226 @@ const RESOLVE_TIMEOUT = 2 * 60 * 1000; // 2 minutes
|
|||||||
const MIN_API_KEY_SYMBOLS = 15;
|
const MIN_API_KEY_SYMBOLS = 15;
|
||||||
const TOKEN_BLACKLIST = [];
|
const TOKEN_BLACKLIST = [];
|
||||||
export const MochOptions = {
|
export const MochOptions = {
|
||||||
realdebrid: {
|
realdebrid: {
|
||||||
key: 'realdebrid',
|
key: 'realdebrid',
|
||||||
instance: realdebrid,
|
instance: realdebrid,
|
||||||
name: "RealDebrid",
|
name: "RealDebrid",
|
||||||
shortName: 'RD',
|
shortName: 'RD',
|
||||||
catalog: true
|
catalog: true
|
||||||
},
|
},
|
||||||
premiumize: {
|
premiumize: {
|
||||||
key: 'premiumize',
|
key: 'premiumize',
|
||||||
instance: premiumize,
|
instance: premiumize,
|
||||||
name: 'Premiumize',
|
name: 'Premiumize',
|
||||||
shortName: 'PM',
|
shortName: 'PM',
|
||||||
catalog: true
|
catalog: true
|
||||||
},
|
},
|
||||||
alldebrid: {
|
alldebrid: {
|
||||||
key: 'alldebrid',
|
key: 'alldebrid',
|
||||||
instance: alldebrid,
|
instance: alldebrid,
|
||||||
name: 'AllDebrid',
|
name: 'AllDebrid',
|
||||||
shortName: 'AD',
|
shortName: 'AD',
|
||||||
catalog: true
|
catalog: true
|
||||||
},
|
},
|
||||||
debridlink: {
|
debridlink: {
|
||||||
key: 'debridlink',
|
key: 'debridlink',
|
||||||
instance: debridlink,
|
instance: debridlink,
|
||||||
name: 'DebridLink',
|
name: 'DebridLink',
|
||||||
shortName: 'DL',
|
shortName: 'DL',
|
||||||
catalog: true
|
catalog: true
|
||||||
},
|
},
|
||||||
offcloud: {
|
offcloud: {
|
||||||
key: 'offcloud',
|
key: 'offcloud',
|
||||||
instance: offcloud,
|
instance: offcloud,
|
||||||
name: 'Offcloud',
|
name: 'Offcloud',
|
||||||
shortName: 'OC',
|
shortName: 'OC',
|
||||||
catalog: true
|
catalog: true
|
||||||
},
|
},
|
||||||
putio: {
|
putio: {
|
||||||
key: 'putio',
|
key: 'putio',
|
||||||
instance: putio,
|
instance: putio,
|
||||||
name: 'Put.io',
|
name: 'Put.io',
|
||||||
shortName: 'Putio',
|
shortName: 'Putio',
|
||||||
catalog: true
|
catalog: true
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const unrestrictQueues = {}
|
const unrestrictQueues = {}
|
||||||
Object.values(MochOptions)
|
Object.values(MochOptions)
|
||||||
.map(moch => moch.key)
|
.map(moch => moch.key)
|
||||||
.forEach(mochKey => unrestrictQueues[mochKey] = new namedQueue((task, callback) => task.method()
|
.forEach(mochKey => unrestrictQueues[mochKey] = new namedQueue((task, callback) => task.method()
|
||||||
.then(result => callback(false, result))
|
.then(result => callback(false, result))
|
||||||
.catch((error => callback(error))), 200));
|
.catch((error => callback(error))), 200));
|
||||||
|
|
||||||
export function hasMochConfigured(config) {
|
export function hasMochConfigured(config) {
|
||||||
return Object.keys(MochOptions).find(moch => config?.[moch])
|
return Object.keys(MochOptions).find(moch => config?.[moch])
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function applyMochs(streams, config) {
|
export async function applyMochs(streams, config) {
|
||||||
if (!streams?.length || !hasMochConfigured(config)) {
|
if (!streams?.length || !hasMochConfigured(config)) {
|
||||||
return streams;
|
return streams;
|
||||||
}
|
}
|
||||||
return Promise.all(Object.keys(config)
|
return Promise.all(Object.keys(config)
|
||||||
.filter(configKey => MochOptions[configKey])
|
.filter(configKey => MochOptions[configKey])
|
||||||
.map(configKey => MochOptions[configKey])
|
.map(configKey => MochOptions[configKey])
|
||||||
.map(moch => {
|
.map(moch => {
|
||||||
if (isInvalidToken(config[moch.key], moch.key)) {
|
if (isInvalidToken(config[moch.key], moch.key)) {
|
||||||
return { moch, error: BadTokenError };
|
return { moch, error: BadTokenError };
|
||||||
}
|
}
|
||||||
return moch.instance.getCachedStreams(streams, config[moch.key])
|
return moch.instance.getCachedStreams(streams, config[moch.key])
|
||||||
.then(mochStreams => ({ moch, mochStreams }))
|
.then(mochStreams => ({ moch, mochStreams }))
|
||||||
.catch(rawError => {
|
.catch(rawError => {
|
||||||
const error = moch.instance.toCommonError(rawError) || rawError;
|
const error = moch.instance.toCommonError(rawError) || rawError;
|
||||||
if (error === BadTokenError) {
|
if (error === BadTokenError) {
|
||||||
blackListToken(config[moch.key], moch.key);
|
blackListToken(config[moch.key], moch.key);
|
||||||
}
|
}
|
||||||
return { moch, error };
|
return { moch, error };
|
||||||
})
|
})
|
||||||
}))
|
}))
|
||||||
.then(results => processMochResults(streams, config, results));
|
.then(results => processMochResults(streams, config, results));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function resolve(parameters) {
|
export async function resolve(parameters) {
|
||||||
const moch = MochOptions[parameters.mochKey];
|
const moch = MochOptions[parameters.mochKey];
|
||||||
if (!moch) {
|
if (!moch) {
|
||||||
return Promise.reject(new Error(`Not a valid moch provider: ${parameters.mochKey}`));
|
return Promise.reject(new Error(`Not a valid moch provider: ${parameters.mochKey}`));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!parameters.apiKey || !parameters.infoHash || !parameters.cachedEntryInfo) {
|
if (!parameters.apiKey || !parameters.infoHash || !parameters.cachedEntryInfo) {
|
||||||
return Promise.reject(new Error("No valid parameters passed"));
|
return Promise.reject(new Error("No valid parameters passed"));
|
||||||
}
|
}
|
||||||
const id = `${parameters.ip}_${parameters.mochKey}_${parameters.apiKey}_${parameters.infoHash}_${parameters.fileIndex}`;
|
const id = `${parameters.ip}_${parameters.mochKey}_${parameters.apiKey}_${parameters.infoHash}_${parameters.fileIndex}`;
|
||||||
const method = () => timeout(RESOLVE_TIMEOUT, cacheWrapResolvedUrl(id, () => moch.instance.resolve(parameters)))
|
const method = () => timeout(RESOLVE_TIMEOUT, cacheWrapResolvedUrl(id, () => moch.instance.resolve(parameters)))
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
console.warn(error);
|
console.warn(error);
|
||||||
return StaticResponse.FAILED_UNEXPECTED;
|
return StaticResponse.FAILED_UNEXPECTED;
|
||||||
})
|
})
|
||||||
.then(url => isStaticUrl(url) ? `${parameters.host}/${url}` : url);
|
.then(url => isStaticUrl(url) ? `${parameters.host}/${url}` : url);
|
||||||
const unrestrictQueue = unrestrictQueues[moch.key];
|
const unrestrictQueue = unrestrictQueues[moch.key];
|
||||||
return new Promise(((resolve, reject) => {
|
return new Promise(((resolve, reject) => {
|
||||||
unrestrictQueue.push({ id, method }, (error, result) => result ? resolve(result) : reject(error));
|
unrestrictQueue.push({ id, method }, (error, result) => result ? resolve(result) : reject(error));
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getMochCatalog(mochKey, config) {
|
export async function getMochCatalog(mochKey, config) {
|
||||||
const moch = MochOptions[mochKey];
|
const moch = MochOptions[mochKey];
|
||||||
if (!moch) {
|
if (!moch) {
|
||||||
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
|
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
|
||||||
}
|
}
|
||||||
if (isInvalidToken(config[mochKey], mochKey)) {
|
if (isInvalidToken(config[mochKey], mochKey)) {
|
||||||
return Promise.reject(new Error(`Invalid API key for moch provider: ${mochKey}`));
|
return Promise.reject(new Error(`Invalid API key for moch provider: ${mochKey}`));
|
||||||
}
|
}
|
||||||
return moch.instance.getCatalog(config[moch.key], config.skip, config.ip)
|
return moch.instance.getCatalog(config[moch.key], config.skip, config.ip)
|
||||||
.catch(rawError => {
|
.catch(rawError => {
|
||||||
const commonError = moch.instance.toCommonError(rawError);
|
const commonError = moch.instance.toCommonError(rawError);
|
||||||
if (commonError === BadTokenError) {
|
if (commonError === BadTokenError) {
|
||||||
blackListToken(config[moch.key], moch.key);
|
blackListToken(config[moch.key], moch.key);
|
||||||
}
|
}
|
||||||
return commonError ? [] : Promise.reject(rawError);
|
return commonError ? [] : Promise.reject(rawError);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getMochItemMeta(mochKey, itemId, config) {
|
export async function getMochItemMeta(mochKey, itemId, config) {
|
||||||
const moch = MochOptions[mochKey];
|
const moch = MochOptions[mochKey];
|
||||||
if (!moch) {
|
if (!moch) {
|
||||||
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
|
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
|
||||||
}
|
}
|
||||||
|
|
||||||
return moch.instance.getItemMeta(itemId, config[moch.key], config.ip)
|
return moch.instance.getItemMeta(itemId, config[moch.key], config.ip)
|
||||||
.then(meta => enrichMeta(meta))
|
.then(meta => enrichMeta(meta))
|
||||||
.then(meta => {
|
.then(meta => {
|
||||||
meta.videos.forEach(video => video.streams.forEach(stream => {
|
meta.videos.forEach(video => video.streams.forEach(stream => {
|
||||||
if (!stream.url.startsWith('http')) {
|
if (!stream.url.startsWith('http')) {
|
||||||
stream.url = `${config.host}/${moch.key}/${stream.url}/${streamFilename(video)}`
|
stream.url = `${config.host}/${moch.key}/${stream.url}/${streamFilename(video)}`
|
||||||
}
|
}
|
||||||
stream.behaviorHints = { bingeGroup: itemId }
|
stream.behaviorHints = { bingeGroup: itemId }
|
||||||
}))
|
}))
|
||||||
return meta;
|
return meta;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function processMochResults(streams, config, results) {
|
function processMochResults(streams, config, results) {
|
||||||
const errorResults = results
|
const errorResults = results
|
||||||
.map(result => errorStreamResponse(result.moch.key, result.error, config))
|
.map(result => errorStreamResponse(result.moch.key, result.error, config))
|
||||||
.filter(errorResponse => errorResponse);
|
.filter(errorResponse => errorResponse);
|
||||||
if (errorResults.length) {
|
if (errorResults.length) {
|
||||||
return errorResults;
|
return errorResults;
|
||||||
}
|
}
|
||||||
|
|
||||||
const includeTorrentLinks = options.includeTorrentLinks(config);
|
const includeTorrentLinks = options.includeTorrentLinks(config);
|
||||||
const excludeDownloadLinks = options.excludeDownloadLinks(config);
|
const excludeDownloadLinks = options.excludeDownloadLinks(config);
|
||||||
const mochResults = results.filter(result => result?.mochStreams);
|
const mochResults = results.filter(result => result?.mochStreams);
|
||||||
|
|
||||||
const cachedStreams = mochResults
|
const cachedStreams = mochResults
|
||||||
.reduce((resultStreams, mochResult) => populateCachedLinks(resultStreams, mochResult, config), streams);
|
.reduce((resultStreams, mochResult) => populateCachedLinks(resultStreams, mochResult, config), streams);
|
||||||
const resultStreams = excludeDownloadLinks ? cachedStreams : populateDownloadLinks(cachedStreams, mochResults, config);
|
const resultStreams = excludeDownloadLinks ? cachedStreams : populateDownloadLinks(cachedStreams, mochResults, config);
|
||||||
return includeTorrentLinks ? resultStreams : resultStreams.filter(stream => stream.url);
|
return includeTorrentLinks ? resultStreams : resultStreams.filter(stream => stream.url);
|
||||||
}
|
}
|
||||||
|
|
||||||
function populateCachedLinks(streams, mochResult, config) {
|
function populateCachedLinks(streams, mochResult, config) {
|
||||||
return streams.map(stream => {
|
return streams.map(stream => {
|
||||||
const cachedEntry = stream.infoHash && mochResult.mochStreams[`${stream.infoHash}@${stream.fileIdx}`];
|
const cachedEntry = stream.infoHash && mochResult.mochStreams[stream.infoHash];
|
||||||
if (cachedEntry?.cached) {
|
if (cachedEntry?.cached) {
|
||||||
return {
|
return {
|
||||||
name: `[${mochResult.moch.shortName}+] ${stream.name}`,
|
name: `[${mochResult.moch.shortName}+] ${stream.name}`,
|
||||||
title: stream.title,
|
title: stream.title,
|
||||||
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
|
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
|
||||||
behaviorHints: stream.behaviorHints
|
behaviorHints: stream.behaviorHints
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
return stream;
|
return stream;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function populateDownloadLinks(streams, mochResults, config) {
|
function populateDownloadLinks(streams, mochResults, config) {
|
||||||
const torrentStreams = streams.filter(stream => stream.infoHash);
|
const torrentStreams = streams.filter(stream => stream.infoHash);
|
||||||
const seededStreams = streams.filter(stream => !stream.title.includes('👤 0'));
|
const seededStreams = streams.filter(stream => !stream.title.includes('👤 0'));
|
||||||
torrentStreams.forEach(stream => mochResults.forEach(mochResult => {
|
torrentStreams.forEach(stream => mochResults.forEach(mochResult => {
|
||||||
const cachedEntry = mochResult.mochStreams[`${stream.infoHash}@${stream.fileIdx}`];
|
const cachedEntry = mochResult.mochStreams[stream.infoHash];
|
||||||
const isCached = cachedEntry?.cached;
|
const isCached = cachedEntry?.cached;
|
||||||
if (!isCached && isHealthyStreamForDebrid(seededStreams, stream)) {
|
if (!isCached && isHealthyStreamForDebrid(seededStreams, stream)) {
|
||||||
streams.push({
|
streams.push({
|
||||||
name: `[${mochResult.moch.shortName} download] ${stream.name}`,
|
name: `[${mochResult.moch.shortName} download] ${stream.name}`,
|
||||||
title: stream.title,
|
title: stream.title,
|
||||||
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
|
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
|
||||||
behaviorHints: stream.behaviorHints
|
behaviorHints: stream.behaviorHints
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
return streams;
|
return streams;
|
||||||
}
|
}
|
||||||
|
|
||||||
function isHealthyStreamForDebrid(streams, stream) {
|
function isHealthyStreamForDebrid(streams, stream) {
|
||||||
const isZeroSeeders = stream.title.includes('👤 0');
|
const isZeroSeeders = stream.title.includes('👤 0');
|
||||||
const is4kStream = stream.name.includes('4k');
|
const is4kStream = stream.name.includes('4k');
|
||||||
const isNotEnoughOptions = streams.length <= 5;
|
const isNotEnoughOptions = streams.length <= 5;
|
||||||
return !isZeroSeeders || is4kStream || isNotEnoughOptions;
|
return !isZeroSeeders || is4kStream || isNotEnoughOptions;
|
||||||
}
|
}
|
||||||
|
|
||||||
function isInvalidToken(token, mochKey) {
|
function isInvalidToken(token, mochKey) {
|
||||||
return token.length < MIN_API_KEY_SYMBOLS || TOKEN_BLACKLIST.includes(`${mochKey}|${token}`);
|
return token.length < MIN_API_KEY_SYMBOLS || TOKEN_BLACKLIST.includes(`${mochKey}|${token}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
function blackListToken(token, mochKey) {
|
function blackListToken(token, mochKey) {
|
||||||
const tokenKey = `${mochKey}|${token}`;
|
const tokenKey = `${mochKey}|${token}`;
|
||||||
console.log(`Blacklisting invalid token: ${tokenKey}`)
|
console.log(`Blacklisting invalid token: ${tokenKey}`)
|
||||||
TOKEN_BLACKLIST.push(tokenKey);
|
TOKEN_BLACKLIST.push(tokenKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
function errorStreamResponse(mochKey, error, config) {
|
function errorStreamResponse(mochKey, error, config) {
|
||||||
if (error === BadTokenError) {
|
if (error === BadTokenError) {
|
||||||
return {
|
return {
|
||||||
name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
|
name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
|
||||||
title: `Invalid ${MochOptions[mochKey].name} ApiKey/Token!`,
|
title: `Invalid ${MochOptions[mochKey].name} ApiKey/Token!`,
|
||||||
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
|
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
if (error === AccessDeniedError) {
|
if (error === AccessDeniedError) {
|
||||||
return {
|
return {
|
||||||
name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
|
name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
|
||||||
title: `Expired/invalid ${MochOptions[mochKey].name} subscription!`,
|
title: `Expired/invalid ${MochOptions[mochKey].name} subscription!`,
|
||||||
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
|
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,63 +1,63 @@
|
|||||||
import * as repository from '../lib/repository.js';
|
import * as repository from '../lib/repository.js';
|
||||||
|
|
||||||
const METAHUB_URL = 'https://images.metahub.space'
|
const METAHUB_URL = 'https://images.metahub.space'
|
||||||
export const BadTokenError = { code: 'BAD_TOKEN' }
|
export const BadTokenError = { code: 'BAD_TOKEN' }
|
||||||
export const AccessDeniedError = { code: 'ACCESS_DENIED' }
|
export const AccessDeniedError = { code: 'ACCESS_DENIED' }
|
||||||
|
|
||||||
export function chunkArray(arr, size) {
|
export function chunkArray(arr, size) {
|
||||||
return arr.length > size
|
return arr.length > size
|
||||||
? [arr.slice(0, size), ...chunkArray(arr.slice(size), size)]
|
? [arr.slice(0, size), ...chunkArray(arr.slice(size), size)]
|
||||||
: [arr];
|
: [arr];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function streamFilename(stream) {
|
export function streamFilename(stream) {
|
||||||
const titleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
|
const titleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
|
||||||
const filename = titleParts.pop().split('/').pop();
|
const filename = titleParts.pop().split('/').pop();
|
||||||
return encodeURIComponent(filename)
|
return encodeURIComponent(filename)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function enrichMeta(itemMeta) {
|
export async function enrichMeta(itemMeta) {
|
||||||
const infoHashes = [...new Set([itemMeta.infoHash]
|
const infoHashes = [...new Set([itemMeta.infoHash]
|
||||||
.concat(itemMeta.videos.map(video => video.infoHash))
|
.concat(itemMeta.videos.map(video => video.infoHash))
|
||||||
.filter(infoHash => infoHash))];
|
.filter(infoHash => infoHash))];
|
||||||
const files = infoHashes.length ? await repository.getFiles(infoHashes).catch(() => []) : [];
|
const files = infoHashes.length ? await repository.getFiles(infoHashes).catch(() => []) : [];
|
||||||
const commonImdbId = itemMeta.infoHash && mostCommonValue(files.map(file => file.imdbId));
|
const commonImdbId = itemMeta.infoHash && mostCommonValue(files.map(file => file.imdbId));
|
||||||
if (files.length) {
|
if (files.length) {
|
||||||
return {
|
return {
|
||||||
...itemMeta,
|
...itemMeta,
|
||||||
logo: commonImdbId && `${METAHUB_URL}/logo/medium/${commonImdbId}/img`,
|
logo: commonImdbId && `${METAHUB_URL}/logo/medium/${commonImdbId}/img`,
|
||||||
poster: commonImdbId && `${METAHUB_URL}/poster/medium/${commonImdbId}/img`,
|
poster: commonImdbId && `${METAHUB_URL}/poster/medium/${commonImdbId}/img`,
|
||||||
background: commonImdbId && `${METAHUB_URL}/background/medium/${commonImdbId}/img`,
|
background: commonImdbId && `${METAHUB_URL}/background/medium/${commonImdbId}/img`,
|
||||||
videos: itemMeta.videos.map(video => {
|
videos: itemMeta.videos.map(video => {
|
||||||
const file = files.find(file => sameFilename(video.title, file.title));
|
const file = files.find(file => sameFilename(video.title, file.title));
|
||||||
if (file?.imdbId) {
|
if (file?.imdbId) {
|
||||||
if (file.imdbSeason && file.imdbEpisode) {
|
if (file.imdbSeason && file.imdbEpisode) {
|
||||||
video.id = `${file.imdbId}:${file.imdbSeason}:${file.imdbEpisode}`;
|
video.id = `${file.imdbId}:${file.imdbSeason}:${file.imdbEpisode}`;
|
||||||
video.season = file.imdbSeason;
|
video.season = file.imdbSeason;
|
||||||
video.episode = file.imdbEpisode;
|
video.episode = file.imdbEpisode;
|
||||||
video.thumbnail = `https://episodes.metahub.space/${file.imdbId}/${video.season}/${video.episode}/w780.jpg`
|
video.thumbnail = `https://episodes.metahub.space/${file.imdbId}/${video.season}/${video.episode}/w780.jpg`
|
||||||
} else {
|
} else {
|
||||||
video.id = file.imdbId;
|
video.id = file.imdbId;
|
||||||
video.thumbnail = `${METAHUB_URL}/background/small/${file.imdbId}/img`;
|
video.thumbnail = `${METAHUB_URL}/background/small/${file.imdbId}/img`;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return video;
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
return video;
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return itemMeta
|
}
|
||||||
|
return itemMeta
|
||||||
}
|
}
|
||||||
|
|
||||||
export function sameFilename(filename, expectedFilename) {
|
export function sameFilename(filename, expectedFilename) {
|
||||||
const offset = filename.length - expectedFilename.length;
|
const offset = filename.length - expectedFilename.length;
|
||||||
for (let i = 0; i < expectedFilename.length; i++) {
|
for (let i = 0; i < expectedFilename.length; i++) {
|
||||||
if (filename[offset + i] !== expectedFilename[i] && expectedFilename[i] !== '<27>') {
|
if (filename[offset + i] !== expectedFilename[i] && expectedFilename[i] !== '<27>') {
|
||||||
return false;
|
return false;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return true;
|
}
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
function mostCommonValue(array) {
|
function mostCommonValue(array) {
|
||||||
return array.sort((a, b) => array.filter(v => v === a).length - array.filter(v => v === b).length).pop();
|
return array.sort((a, b) => array.filter(v => v === a).length - array.filter(v => v === b).length).pop();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,178 +9,178 @@ import StaticResponse from './static.js';
|
|||||||
const KEY = 'offcloud';
|
const KEY = 'offcloud';
|
||||||
|
|
||||||
export async function getCachedStreams(streams, apiKey) {
|
export async function getCachedStreams(streams, apiKey) {
|
||||||
const options = await getDefaultOptions();
|
const options = await getDefaultOptions();
|
||||||
const OC = new OffcloudClient(apiKey, options);
|
const OC = new OffcloudClient(apiKey, options);
|
||||||
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 100);
|
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 100);
|
||||||
const available = await Promise.all(hashBatches.map(hashes => OC.instant.cache(hashes)))
|
const available = await Promise.all(hashBatches.map(hashes => OC.instant.cache(hashes)))
|
||||||
.then(results => results.map(result => result.cachedItems))
|
.then(results => results.map(result => result.cachedItems))
|
||||||
.then(results => results.reduce((all, result) => all.concat(result), []))
|
.then(results => results.reduce((all, result) => all.concat(result), []))
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
if (toCommonError(error)) {
|
if (toCommonError(error)) {
|
||||||
return Promise.reject(error);
|
return Promise.reject(error);
|
||||||
}
|
}
|
||||||
console.warn('Failed Offcloud cached torrent availability request:', error);
|
console.warn('Failed Offcloud cached torrent availability request:', error);
|
||||||
return undefined;
|
return undefined;
|
||||||
});
|
});
|
||||||
return available && streams
|
return available && streams
|
||||||
.reduce((mochStreams, stream) => {
|
.reduce((mochStreams, stream) => {
|
||||||
const isCached = available.includes(stream.infoHash);
|
const isCached = available.includes(stream.infoHash);
|
||||||
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
|
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
|
||||||
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
||||||
const encodedFileName = encodeURIComponent(fileName);
|
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
|
||||||
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
|
const encodedFileName = encodeURIComponent(fileName);
|
||||||
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${stream.fileIdx}`,
|
mochStreams[stream.infoHash] = {
|
||||||
cached: isCached
|
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
|
||||||
};
|
cached: isCached
|
||||||
return mochStreams;
|
};
|
||||||
}, {})
|
return mochStreams;
|
||||||
|
}, {})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getCatalog(apiKey, offset = 0) {
|
export async function getCatalog(apiKey, offset = 0) {
|
||||||
if (offset > 0) {
|
if (offset > 0) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
const options = await getDefaultOptions();
|
const options = await getDefaultOptions();
|
||||||
const OC = new OffcloudClient(apiKey, options);
|
const OC = new OffcloudClient(apiKey, options);
|
||||||
return OC.cloud.history()
|
return OC.cloud.history()
|
||||||
.then(torrents => torrents)
|
.then(torrents => torrents)
|
||||||
.then(torrents => (torrents || [])
|
.then(torrents => (torrents || [])
|
||||||
.map(torrent => ({
|
.map(torrent => ({
|
||||||
id: `${KEY}:${torrent.requestId}`,
|
id: `${KEY}:${torrent.requestId}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: torrent.fileName
|
name: torrent.fileName
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getItemMeta(itemId, apiKey, ip) {
|
export async function getItemMeta(itemId, apiKey, ip) {
|
||||||
const options = await getDefaultOptions(ip);
|
const options = await getDefaultOptions(ip);
|
||||||
const OC = new OffcloudClient(apiKey, options);
|
const OC = new OffcloudClient(apiKey, options);
|
||||||
const torrents = await OC.cloud.history();
|
const torrents = await OC.cloud.history();
|
||||||
const torrent = torrents.find(torrent => torrent.requestId === itemId)
|
const torrent = torrents.find(torrent => torrent.requestId === itemId)
|
||||||
const infoHash = torrent && magnet.decode(torrent.originalLink).infoHash
|
const infoHash = torrent && magnet.decode(torrent.originalLink).infoHash
|
||||||
const createDate = torrent ? new Date(torrent.createdOn) : new Date();
|
const createDate = torrent ? new Date(torrent.createdOn) : new Date();
|
||||||
return _getFileUrls(OC, torrent)
|
return _getFileUrls(OC, torrent)
|
||||||
.then(files => ({
|
.then(files => ({
|
||||||
id: `${KEY}:${itemId}`,
|
id: `${KEY}:${itemId}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: torrent.name,
|
name: torrent.name,
|
||||||
infoHash: infoHash,
|
infoHash: infoHash,
|
||||||
videos: files
|
videos: files
|
||||||
.filter(file => isVideo(file))
|
.filter(file => isVideo(file))
|
||||||
.map((file, index) => ({
|
.map((file, index) => ({
|
||||||
id: `${KEY}:${itemId}:${index}`,
|
id: `${KEY}:${itemId}:${index}`,
|
||||||
title: file.split('/').pop(),
|
title: file.split('/').pop(),
|
||||||
released: new Date(createDate.getTime() - index).toISOString(),
|
released: new Date(createDate.getTime() - index).toISOString(),
|
||||||
streams: [{ url: file }]
|
streams: [{ url: file }]
|
||||||
}))
|
}))
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
|
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
|
||||||
console.log(`Unrestricting Offcloud ${infoHash} [${fileIndex}]`);
|
console.log(`Unrestricting Offcloud ${infoHash} [${fileIndex}]`);
|
||||||
const options = await getDefaultOptions(ip);
|
const options = await getDefaultOptions(ip);
|
||||||
const OC = new OffcloudClient(apiKey, options);
|
const OC = new OffcloudClient(apiKey, options);
|
||||||
|
|
||||||
return _resolve(OC, infoHash, cachedEntryInfo, fileIndex)
|
return _resolve(OC, infoHash, cachedEntryInfo, fileIndex)
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
if (errorExpiredSubscriptionError(error)) {
|
if (errorExpiredSubscriptionError(error)) {
|
||||||
console.log(`Access denied to Offcloud ${infoHash} [${fileIndex}]`);
|
console.log(`Access denied to Offcloud ${infoHash} [${fileIndex}]`);
|
||||||
return StaticResponse.FAILED_ACCESS;
|
return StaticResponse.FAILED_ACCESS;
|
||||||
}
|
}
|
||||||
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(error)}`);
|
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(error)}`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _resolve(OC, infoHash, cachedEntryInfo, fileIndex) {
|
async function _resolve(OC, infoHash, cachedEntryInfo, fileIndex) {
|
||||||
const torrent = await _createOrFindTorrent(OC, infoHash)
|
const torrent = await _createOrFindTorrent(OC, infoHash)
|
||||||
.then(info => info.requestId ? OC.cloud.status(info.requestId) : Promise.resolve(info))
|
.then(info => info.requestId ? OC.cloud.status(info.requestId) : Promise.resolve(info))
|
||||||
.then(info => info.status || info);
|
.then(info => info.status || info);
|
||||||
if (torrent && statusReady(torrent)) {
|
if (torrent && statusReady(torrent)) {
|
||||||
return _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex);
|
return _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex);
|
||||||
} else if (torrent && statusDownloading(torrent)) {
|
} else if (torrent && statusDownloading(torrent)) {
|
||||||
console.log(`Downloading to Offcloud ${infoHash} [${fileIndex}]...`);
|
console.log(`Downloading to Offcloud ${infoHash} [${fileIndex}]...`);
|
||||||
return StaticResponse.DOWNLOADING;
|
return StaticResponse.DOWNLOADING;
|
||||||
} else if (torrent && statusError(torrent)) {
|
} else if (torrent && statusError(torrent)) {
|
||||||
console.log(`Retry failed download in Offcloud ${infoHash} [${fileIndex}]...`);
|
console.log(`Retry failed download in Offcloud ${infoHash} [${fileIndex}]...`);
|
||||||
return _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex);
|
return _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(torrent)}`);
|
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(torrent)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _createOrFindTorrent(OC, infoHash) {
|
async function _createOrFindTorrent(OC, infoHash) {
|
||||||
return _findTorrent(OC, infoHash)
|
return _findTorrent(OC, infoHash)
|
||||||
.catch(() => _createTorrent(OC, infoHash));
|
.catch(() => _createTorrent(OC, infoHash));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _findTorrent(OC, infoHash) {
|
async function _findTorrent(OC, infoHash) {
|
||||||
const torrents = await OC.cloud.history();
|
const torrents = await OC.cloud.history();
|
||||||
const foundTorrents = torrents.filter(torrent => torrent.originalLink.toLowerCase().includes(infoHash));
|
const foundTorrents = torrents.filter(torrent => torrent.originalLink.toLowerCase().includes(infoHash));
|
||||||
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent));
|
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent));
|
||||||
const foundTorrent = nonFailedTorrent || foundTorrents[0];
|
const foundTorrent = nonFailedTorrent || foundTorrents[0];
|
||||||
return foundTorrent || Promise.reject('No recent torrent found');
|
return foundTorrent || Promise.reject('No recent torrent found');
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _createTorrent(OC, infoHash) {
|
async function _createTorrent(OC, infoHash) {
|
||||||
const magnetLink = await getMagnetLink(infoHash);
|
const magnetLink = await getMagnetLink(infoHash);
|
||||||
return OC.cloud.download(magnetLink)
|
return OC.cloud.download(magnetLink)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex) {
|
async function _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex) {
|
||||||
const newTorrent = await _createTorrent(OC, infoHash);
|
const newTorrent = await _createTorrent(OC, infoHash);
|
||||||
return newTorrent && statusReady(newTorrent.status)
|
return newTorrent && statusReady(newTorrent.status)
|
||||||
? _unrestrictLink(OC, infoHash, newTorrent, cachedEntryInfo, fileIndex)
|
? _unrestrictLink(OC, infoHash, newTorrent, cachedEntryInfo, fileIndex)
|
||||||
: StaticResponse.FAILED_DOWNLOAD;
|
: StaticResponse.FAILED_DOWNLOAD;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex) {
|
async function _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex) {
|
||||||
const targetFileName = decodeURIComponent(cachedEntryInfo);
|
const targetFileName = decodeURIComponent(cachedEntryInfo);
|
||||||
const files = await _getFileUrls(OC, torrent)
|
const files = await _getFileUrls(OC, torrent)
|
||||||
const targetFile = files.find(file => file.includes(`/${torrent.requestId}/${fileIndex}/`))
|
const targetFile = files.find(file => sameFilename(targetFileName, file.split('/').pop()))
|
||||||
|| files.find(file => sameFilename(targetFileName, file.split('/').pop()))
|
|| files.find(file => isVideo(file))
|
||||||
|| files.find(file => isVideo(file))
|
|| files.pop();
|
||||||
|| files.pop();
|
|
||||||
|
|
||||||
if (!targetFile) {
|
if (!targetFile) {
|
||||||
return Promise.reject(`No Offcloud links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
|
return Promise.reject(`No Offcloud links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
|
||||||
}
|
}
|
||||||
console.log(`Unrestricted Offcloud ${infoHash} [${fileIndex}] to ${targetFile}`);
|
console.log(`Unrestricted Offcloud ${infoHash} [${fileIndex}] to ${targetFile}`);
|
||||||
return targetFile;
|
return targetFile;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _getFileUrls(OC, torrent) {
|
async function _getFileUrls(OC, torrent) {
|
||||||
return OC.cloud.explore(torrent.requestId)
|
return OC.cloud.explore(torrent.requestId)
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
if (error === 'Bad archive') {
|
if (error === 'Bad archive') {
|
||||||
return [`https://${torrent.server}.offcloud.com/cloud/download/${torrent.requestId}/${torrent.fileName}`];
|
return [`https://${torrent.server}.offcloud.com/cloud/download/${torrent.requestId}/${torrent.fileName}`];
|
||||||
}
|
}
|
||||||
throw error;
|
throw error;
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getDefaultOptions(ip) {
|
async function getDefaultOptions(ip) {
|
||||||
return { ip, timeout: 10000 };
|
return { ip, timeout: 10000 };
|
||||||
}
|
}
|
||||||
|
|
||||||
export function toCommonError(error) {
|
export function toCommonError(error) {
|
||||||
if (error?.error === 'NOAUTH' || error?.message?.startsWith('Cannot read property')) {
|
if (error?.error === 'NOAUTH' || error?.message?.startsWith('Cannot read property')) {
|
||||||
return BadTokenError;
|
return BadTokenError;
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusDownloading(torrent) {
|
function statusDownloading(torrent) {
|
||||||
return ['downloading', 'created'].includes(torrent.status);
|
return ['downloading', 'created'].includes(torrent.status);
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusError(torrent) {
|
function statusError(torrent) {
|
||||||
return ['error', 'canceled'].includes(torrent.status);
|
return ['error', 'canceled'].includes(torrent.status);
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusReady(torrent) {
|
function statusReady(torrent) {
|
||||||
return torrent.status === 'downloaded';
|
return torrent.status === 'downloaded';
|
||||||
}
|
}
|
||||||
|
|
||||||
function errorExpiredSubscriptionError(error) {
|
function errorExpiredSubscriptionError(error) {
|
||||||
return error?.includes && (error.includes('not_available') || error.includes('NOAUTH') || error.includes('premium membership'));
|
return error?.includes && (error.includes('not_available') || error.includes('NOAUTH') || error.includes('premium membership'));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,187 +9,187 @@ import StaticResponse from './static.js';
|
|||||||
const KEY = 'premiumize';
|
const KEY = 'premiumize';
|
||||||
|
|
||||||
export async function getCachedStreams(streams, apiKey) {
|
export async function getCachedStreams(streams, apiKey) {
|
||||||
const options = await getDefaultOptions();
|
const options = await getDefaultOptions();
|
||||||
const PM = new PremiumizeClient(apiKey, options);
|
const PM = new PremiumizeClient(apiKey, options);
|
||||||
return Promise.all(chunkArray(streams, 100)
|
return Promise.all(chunkArray(streams, 100)
|
||||||
.map(chunkedStreams => _getCachedStreams(PM, apiKey, chunkedStreams)))
|
.map(chunkedStreams => _getCachedStreams(PM, apiKey, chunkedStreams)))
|
||||||
.then(results => results.reduce((all, result) => Object.assign(all, result), {}));
|
.then(results => results.reduce((all, result) => Object.assign(all, result), {}));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _getCachedStreams(PM, apiKey, streams) {
|
async function _getCachedStreams(PM, apiKey, streams) {
|
||||||
const hashes = streams.map(stream => stream.infoHash);
|
const hashes = streams.map(stream => stream.infoHash);
|
||||||
return PM.cache.check(hashes)
|
return PM.cache.check(hashes)
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
if (toCommonError(error)) {
|
if (toCommonError(error)) {
|
||||||
return Promise.reject(error);
|
return Promise.reject(error);
|
||||||
}
|
}
|
||||||
console.warn('Failed Premiumize cached torrent availability request:', error);
|
console.warn('Failed Premiumize cached torrent availability request:', error);
|
||||||
return undefined;
|
return undefined;
|
||||||
})
|
})
|
||||||
.then(available => streams
|
.then(available => streams
|
||||||
.reduce((mochStreams, stream, index) => {
|
.reduce((mochStreams, stream, index) => {
|
||||||
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
|
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
|
||||||
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
||||||
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
|
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
|
||||||
const encodedFileName = encodeURIComponent(fileName);
|
const encodedFileName = encodeURIComponent(fileName);
|
||||||
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
|
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
|
||||||
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
|
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
|
||||||
cached: available?.response[index]
|
cached: available?.response[index]
|
||||||
};
|
};
|
||||||
return mochStreams;
|
return mochStreams;
|
||||||
}, {}));
|
}, {}));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getCatalog(apiKey, offset = 0) {
|
export async function getCatalog(apiKey, offset = 0) {
|
||||||
if (offset > 0) {
|
if (offset > 0) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
const options = await getDefaultOptions();
|
const options = await getDefaultOptions();
|
||||||
const PM = new PremiumizeClient(apiKey, options);
|
const PM = new PremiumizeClient(apiKey, options);
|
||||||
return PM.folder.list()
|
return PM.folder.list()
|
||||||
.then(response => response.content)
|
.then(response => response.content)
|
||||||
.then(torrents => (torrents || [])
|
.then(torrents => (torrents || [])
|
||||||
.filter(torrent => torrent && torrent.type === 'folder')
|
.filter(torrent => torrent && torrent.type === 'folder')
|
||||||
.map(torrent => ({
|
.map(torrent => ({
|
||||||
id: `${KEY}:${torrent.id}`,
|
id: `${KEY}:${torrent.id}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: torrent.name
|
name: torrent.name
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getItemMeta(itemId, apiKey, ip) {
|
export async function getItemMeta(itemId, apiKey, ip) {
|
||||||
const options = await getDefaultOptions();
|
const options = await getDefaultOptions();
|
||||||
const PM = new PremiumizeClient(apiKey, options);
|
const PM = new PremiumizeClient(apiKey, options);
|
||||||
const rootFolder = await PM.folder.list(itemId, null);
|
const rootFolder = await PM.folder.list(itemId, null);
|
||||||
const infoHash = await _findInfoHash(PM, itemId);
|
const infoHash = await _findInfoHash(PM, itemId);
|
||||||
return getFolderContents(PM, itemId, ip)
|
return getFolderContents(PM, itemId, ip)
|
||||||
.then(contents => ({
|
.then(contents => ({
|
||||||
id: `${KEY}:${itemId}`,
|
id: `${KEY}:${itemId}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: rootFolder.name,
|
name: rootFolder.name,
|
||||||
infoHash: infoHash,
|
infoHash: infoHash,
|
||||||
videos: contents
|
videos: contents
|
||||||
.map((file, index) => ({
|
.map((file, index) => ({
|
||||||
id: `${KEY}:${file.id}:${index}`,
|
id: `${KEY}:${file.id}:${index}`,
|
||||||
title: file.name,
|
title: file.name,
|
||||||
released: new Date(file.created_at * 1000 - index).toISOString(),
|
released: new Date(file.created_at * 1000 - index).toISOString(),
|
||||||
streams: [{ url: file.link || file.stream_link }]
|
streams: [{ url: file.link || file.stream_link }]
|
||||||
}))
|
}))
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getFolderContents(PM, itemId, ip, folderPrefix = '') {
|
async function getFolderContents(PM, itemId, ip, folderPrefix = '') {
|
||||||
return PM.folder.list(itemId, null, ip)
|
return PM.folder.list(itemId, null, ip)
|
||||||
.then(response => response.content)
|
.then(response => response.content)
|
||||||
.then(contents => Promise.all(contents
|
.then(contents => Promise.all(contents
|
||||||
.filter(content => content.type === 'folder')
|
.filter(content => content.type === 'folder')
|
||||||
.map(content => getFolderContents(PM, content.id, ip, [folderPrefix, content.name].join('/'))))
|
.map(content => getFolderContents(PM, content.id, ip, [folderPrefix, content.name].join('/'))))
|
||||||
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
|
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
|
||||||
.then(otherContents => contents
|
.then(otherContents => contents
|
||||||
.filter(content => content.type === 'file' && isVideo(content.name))
|
.filter(content => content.type === 'file' && isVideo(content.name))
|
||||||
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
|
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
|
||||||
.concat(otherContents)));
|
.concat(otherContents)));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function resolve({ ip, isBrowser, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
|
export async function resolve({ ip, isBrowser, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
|
||||||
console.log(`Unrestricting Premiumize ${infoHash} [${fileIndex}] for IP ${ip} from browser=${isBrowser}`);
|
console.log(`Unrestricting Premiumize ${infoHash} [${fileIndex}] for IP ${ip} from browser=${isBrowser}`);
|
||||||
const options = await getDefaultOptions();
|
const options = await getDefaultOptions();
|
||||||
const PM = new PremiumizeClient(apiKey, options);
|
const PM = new PremiumizeClient(apiKey, options);
|
||||||
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser)
|
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser)
|
||||||
.catch(() => _resolve(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser))
|
.catch(() => _resolve(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser))
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
if (error?.message?.includes('Account not premium.')) {
|
if (error?.message?.includes('Account not premium.')) {
|
||||||
console.log(`Access denied to Premiumize ${infoHash} [${fileIndex}]`);
|
console.log(`Access denied to Premiumize ${infoHash} [${fileIndex}]`);
|
||||||
return StaticResponse.FAILED_ACCESS;
|
return StaticResponse.FAILED_ACCESS;
|
||||||
}
|
}
|
||||||
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(error)}`);
|
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(error)}`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _resolve(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser) {
|
async function _resolve(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser) {
|
||||||
const torrent = await _createOrFindTorrent(PM, infoHash);
|
const torrent = await _createOrFindTorrent(PM, infoHash);
|
||||||
if (torrent && statusReady(torrent.status)) {
|
if (torrent && statusReady(torrent.status)) {
|
||||||
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser);
|
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser);
|
||||||
} else if (torrent && statusDownloading(torrent.status)) {
|
} else if (torrent && statusDownloading(torrent.status)) {
|
||||||
console.log(`Downloading to Premiumize ${infoHash} [${fileIndex}]...`);
|
console.log(`Downloading to Premiumize ${infoHash} [${fileIndex}]...`);
|
||||||
return StaticResponse.DOWNLOADING;
|
return StaticResponse.DOWNLOADING;
|
||||||
} else if (torrent && statusError(torrent.status)) {
|
} else if (torrent && statusError(torrent.status)) {
|
||||||
console.log(`Retrying downloading to Premiumize ${infoHash} [${fileIndex}]...`);
|
console.log(`Retrying downloading to Premiumize ${infoHash} [${fileIndex}]...`);
|
||||||
return _retryCreateTorrent(PM, infoHash, cachedEntryInfo, fileIndex);
|
return _retryCreateTorrent(PM, infoHash, cachedEntryInfo, fileIndex);
|
||||||
}
|
}
|
||||||
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(torrent)}`);
|
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(torrent)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _getCachedLink(PM, infoHash, encodedFileName, fileIndex, ip, isBrowser) {
|
async function _getCachedLink(PM, infoHash, encodedFileName, fileIndex, ip, isBrowser) {
|
||||||
const cachedTorrent = await PM.transfer.directDownload(magnet.encode({ infoHash }), ip);
|
const cachedTorrent = await PM.transfer.directDownload(magnet.encode({ infoHash }), ip);
|
||||||
if (cachedTorrent?.content?.length) {
|
if (cachedTorrent?.content?.length) {
|
||||||
const targetFileName = decodeURIComponent(encodedFileName);
|
const targetFileName = decodeURIComponent(encodedFileName);
|
||||||
const videos = cachedTorrent.content.filter(file => isVideo(file.path)).sort((a, b) => b.size - a.size);
|
const videos = cachedTorrent.content.filter(file => isVideo(file.path));
|
||||||
const targetVideo = Number.isInteger(fileIndex)
|
const targetVideo = Number.isInteger(fileIndex)
|
||||||
? videos.find(video => sameFilename(video.path, targetFileName))
|
? videos.find(video => sameFilename(video.path, targetFileName))
|
||||||
: videos[0];
|
: videos.sort((a, b) => b.size - a.size)[0];
|
||||||
if (!targetVideo && videos.every(video => isArchive(video.path))) {
|
if (!targetVideo && videos.every(video => isArchive(video.path))) {
|
||||||
console.log(`Only Premiumize archive is available for [${infoHash}] ${fileIndex}`)
|
console.log(`Only Premiumize archive is available for [${infoHash}] ${fileIndex}`)
|
||||||
return StaticResponse.FAILED_RAR;
|
return StaticResponse.FAILED_RAR;
|
||||||
}
|
|
||||||
const streamLink = isBrowser && targetVideo.transcode_status === 'finished' && targetVideo.stream_link;
|
|
||||||
const unrestrictedLink = streamLink || targetVideo.link;
|
|
||||||
console.log(`Unrestricted Premiumize ${infoHash} [${fileIndex}] to ${unrestrictedLink}`);
|
|
||||||
return unrestrictedLink;
|
|
||||||
}
|
}
|
||||||
return Promise.reject('No cached entry found');
|
const streamLink = isBrowser && targetVideo.transcode_status === 'finished' && targetVideo.stream_link;
|
||||||
|
const unrestrictedLink = streamLink || targetVideo.link;
|
||||||
|
console.log(`Unrestricted Premiumize ${infoHash} [${fileIndex}] to ${unrestrictedLink}`);
|
||||||
|
return unrestrictedLink;
|
||||||
|
}
|
||||||
|
return Promise.reject('No cached entry found');
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _createOrFindTorrent(PM, infoHash) {
|
async function _createOrFindTorrent(PM, infoHash) {
|
||||||
return _findTorrent(PM, infoHash)
|
return _findTorrent(PM, infoHash)
|
||||||
.catch(() => _createTorrent(PM, infoHash));
|
.catch(() => _createTorrent(PM, infoHash));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _findTorrent(PM, infoHash) {
|
async function _findTorrent(PM, infoHash) {
|
||||||
const torrents = await PM.transfer.list().then(response => response.transfers);
|
const torrents = await PM.transfer.list().then(response => response.transfers);
|
||||||
const foundTorrents = torrents.filter(torrent => torrent.src.toLowerCase().includes(infoHash));
|
const foundTorrents = torrents.filter(torrent => torrent.src.toLowerCase().includes(infoHash));
|
||||||
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
|
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
|
||||||
const foundTorrent = nonFailedTorrent || foundTorrents[0];
|
const foundTorrent = nonFailedTorrent || foundTorrents[0];
|
||||||
return foundTorrent || Promise.reject('No recent torrent found');
|
return foundTorrent || Promise.reject('No recent torrent found');
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _findInfoHash(PM, itemId) {
|
async function _findInfoHash(PM, itemId) {
|
||||||
const torrents = await PM.transfer.list().then(response => response.transfers);
|
const torrents = await PM.transfer.list().then(response => response.transfers);
|
||||||
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === itemId || `${torrent.folder_id}` === itemId);
|
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === itemId || `${torrent.folder_id}` === itemId);
|
||||||
return foundTorrent?.src ? magnet.decode(foundTorrent.src).infoHash : undefined;
|
return foundTorrent?.src ? magnet.decode(foundTorrent.src).infoHash : undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _createTorrent(PM, infoHash) {
|
async function _createTorrent(PM, infoHash) {
|
||||||
const magnetLink = await getMagnetLink(infoHash);
|
const magnetLink = await getMagnetLink(infoHash);
|
||||||
return PM.transfer.create(magnetLink).then(() => _findTorrent(PM, infoHash));
|
return PM.transfer.create(magnetLink).then(() => _findTorrent(PM, infoHash));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _retryCreateTorrent(PM, infoHash, encodedFileName, fileIndex) {
|
async function _retryCreateTorrent(PM, infoHash, encodedFileName, fileIndex) {
|
||||||
const newTorrent = await _createTorrent(PM, infoHash).then(() => _findTorrent(PM, infoHash));
|
const newTorrent = await _createTorrent(PM, infoHash).then(() => _findTorrent(PM, infoHash));
|
||||||
return newTorrent && statusReady(newTorrent.status)
|
return newTorrent && statusReady(newTorrent.status)
|
||||||
? _getCachedLink(PM, infoHash, encodedFileName, fileIndex)
|
? _getCachedLink(PM, infoHash, encodedFileName, fileIndex)
|
||||||
: StaticResponse.FAILED_DOWNLOAD;
|
: StaticResponse.FAILED_DOWNLOAD;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function toCommonError(error) {
|
export function toCommonError(error) {
|
||||||
if (error && error.message === 'Not logged in.') {
|
if (error && error.message === 'Not logged in.') {
|
||||||
return BadTokenError;
|
return BadTokenError;
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusError(status) {
|
function statusError(status) {
|
||||||
return ['deleted', 'error', 'timeout'].includes(status);
|
return ['deleted', 'error', 'timeout'].includes(status);
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusDownloading(status) {
|
function statusDownloading(status) {
|
||||||
return ['waiting', 'queued', 'running'].includes(status);
|
return ['waiting', 'queued', 'running'].includes(status);
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusReady(status) {
|
function statusReady(status) {
|
||||||
return ['finished', 'seeding'].includes(status);
|
return ['finished', 'seeding'].includes(status);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getDefaultOptions(ip) {
|
async function getDefaultOptions(ip) {
|
||||||
return { timeout: 5000 };
|
return { timeout: 5000 };
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,205 +11,205 @@ const PutioAPI = PutioClient.default;
|
|||||||
const KEY = 'putio';
|
const KEY = 'putio';
|
||||||
|
|
||||||
export async function getCachedStreams(streams, apiKey) {
|
export async function getCachedStreams(streams, apiKey) {
|
||||||
return streams
|
return streams
|
||||||
.reduce((mochStreams, stream) => {
|
.reduce((mochStreams, stream) => {
|
||||||
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
|
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
|
||||||
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
||||||
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
|
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
|
||||||
const encodedFileName = encodeURIComponent(fileName);
|
const encodedFileName = encodeURIComponent(fileName);
|
||||||
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
|
mochStreams[stream.infoHash] = {
|
||||||
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
|
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
|
||||||
cached: false
|
cached: false
|
||||||
};
|
};
|
||||||
return mochStreams;
|
return mochStreams;
|
||||||
}, {});
|
}, {});
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getCatalog(apiKey, offset = 0) {
|
export async function getCatalog(apiKey, offset = 0) {
|
||||||
if (offset > 0) {
|
if (offset > 0) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
const Putio = createPutioAPI(apiKey)
|
const Putio = createPutioAPI(apiKey)
|
||||||
return Putio.Files.Query(0)
|
return Putio.Files.Query(0)
|
||||||
.then(response => response?.body?.files)
|
.then(response => response?.body?.files)
|
||||||
.then(files => (files || [])
|
.then(files => (files || [])
|
||||||
.map(file => ({
|
.map(file => ({
|
||||||
id: `${KEY}:${file.id}`,
|
id: `${KEY}:${file.id}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: file.name
|
name: file.name
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getItemMeta(itemId, apiKey) {
|
export async function getItemMeta(itemId, apiKey) {
|
||||||
const Putio = createPutioAPI(apiKey)
|
const Putio = createPutioAPI(apiKey)
|
||||||
const infoHash = await _findInfoHash(Putio, itemId)
|
const infoHash = await _findInfoHash(Putio, itemId)
|
||||||
return getFolderContents(Putio, itemId)
|
return getFolderContents(Putio, itemId)
|
||||||
.then(contents => ({
|
.then(contents => ({
|
||||||
id: `${KEY}:${itemId}`,
|
id: `${KEY}:${itemId}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: contents.name,
|
name: contents.name,
|
||||||
infoHash: infoHash,
|
infoHash: infoHash,
|
||||||
videos: contents
|
videos: contents
|
||||||
.map((file, index) => ({
|
.map((file, index) => ({
|
||||||
id: `${KEY}:${file.id}:${index}`,
|
id: `${KEY}:${file.id}:${index}`,
|
||||||
title: file.name,
|
title: file.name,
|
||||||
released: new Date(file.created_at).toISOString(),
|
released: new Date(file.created_at).toISOString(),
|
||||||
streams: [{ url: `${apiKey}/null/null/${file.id}` }]
|
streams: [{ url: `${apiKey}/null/null/${file.id}` }]
|
||||||
}))
|
}))
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getFolderContents(Putio, itemId, folderPrefix = '') {
|
async function getFolderContents(Putio, itemId, folderPrefix = '') {
|
||||||
return await Putio.Files.Query(itemId)
|
return await Putio.Files.Query(itemId)
|
||||||
.then(response => response?.body)
|
.then(response => response?.body)
|
||||||
.then(body => body?.files?.length ? body.files : [body?.parent].filter(x => x))
|
.then(body => body?.files?.length ? body.files : [body?.parent].filter(x => x))
|
||||||
.then(contents => Promise.all(contents
|
.then(contents => Promise.all(contents
|
||||||
.filter(content => content.file_type === 'FOLDER')
|
.filter(content => content.file_type === 'FOLDER')
|
||||||
.map(content => getFolderContents(Putio, content.id, [folderPrefix, content.name].join('/'))))
|
.map(content => getFolderContents(Putio, content.id, [folderPrefix, content.name].join('/'))))
|
||||||
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
|
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
|
||||||
.then(otherContents => contents
|
.then(otherContents => contents
|
||||||
.filter(content => content.file_type === 'VIDEO')
|
.filter(content => content.file_type === 'VIDEO')
|
||||||
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
|
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
|
||||||
.concat(otherContents)));
|
.concat(otherContents)));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
|
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
|
||||||
console.log(`Unrestricting Putio ${infoHash} [${fileIndex}]`);
|
console.log(`Unrestricting Putio ${infoHash} [${fileIndex}]`);
|
||||||
const Putio = createPutioAPI(apiKey)
|
const Putio = createPutioAPI(apiKey)
|
||||||
|
|
||||||
return _resolve(Putio, infoHash, cachedEntryInfo, fileIndex)
|
return _resolve(Putio, infoHash, cachedEntryInfo, fileIndex)
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
if (error?.data?.status_code === 401) {
|
if (error?.data?.status_code === 401) {
|
||||||
console.log(`Access denied to Putio ${infoHash} [${fileIndex}]`);
|
console.log(`Access denied to Putio ${infoHash} [${fileIndex}]`);
|
||||||
return StaticResponse.FAILED_ACCESS;
|
return StaticResponse.FAILED_ACCESS;
|
||||||
}
|
}
|
||||||
return Promise.reject(`Failed Putio adding torrent ${JSON.stringify(error.data || error)}`);
|
return Promise.reject(`Failed Putio adding torrent ${JSON.stringify(error.data || error)}`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _resolve(Putio, infoHash, cachedEntryInfo, fileIndex) {
|
async function _resolve(Putio, infoHash, cachedEntryInfo, fileIndex) {
|
||||||
if (infoHash === 'null') {
|
if (infoHash === 'null') {
|
||||||
return _unrestrictVideo(Putio, fileIndex);
|
return _unrestrictVideo(Putio, fileIndex);
|
||||||
}
|
}
|
||||||
const torrent = await _createOrFindTorrent(Putio, infoHash);
|
const torrent = await _createOrFindTorrent(Putio, infoHash);
|
||||||
if (torrent && statusReady(torrent.status)) {
|
if (torrent && statusReady(torrent.status)) {
|
||||||
return _unrestrictLink(Putio, torrent, cachedEntryInfo, fileIndex);
|
return _unrestrictLink(Putio, torrent, cachedEntryInfo, fileIndex);
|
||||||
} else if (torrent && statusDownloading(torrent.status)) {
|
} else if (torrent && statusDownloading(torrent.status)) {
|
||||||
console.log(`Downloading to Putio ${infoHash} [${fileIndex}]...`);
|
console.log(`Downloading to Putio ${infoHash} [${fileIndex}]...`);
|
||||||
return StaticResponse.DOWNLOADING;
|
return StaticResponse.DOWNLOADING;
|
||||||
} else if (torrent && statusError(torrent.status)) {
|
} else if (torrent && statusError(torrent.status)) {
|
||||||
console.log(`Retrying downloading to Putio ${infoHash} [${fileIndex}]...`);
|
console.log(`Retrying downloading to Putio ${infoHash} [${fileIndex}]...`);
|
||||||
return _retryCreateTorrent(Putio, infoHash, cachedEntryInfo, fileIndex);
|
return _retryCreateTorrent(Putio, infoHash, cachedEntryInfo, fileIndex);
|
||||||
}
|
}
|
||||||
return Promise.reject("Failed Putio adding torrent");
|
return Promise.reject("Failed Putio adding torrent");
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _createOrFindTorrent(Putio, infoHash) {
|
async function _createOrFindTorrent(Putio, infoHash) {
|
||||||
return _findTorrent(Putio, infoHash)
|
return _findTorrent(Putio, infoHash)
|
||||||
.catch(() => _createTorrent(Putio, infoHash));
|
.catch(() => _createTorrent(Putio, infoHash));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _retryCreateTorrent(Putio, infoHash, encodedFileName, fileIndex) {
|
async function _retryCreateTorrent(Putio, infoHash, encodedFileName, fileIndex) {
|
||||||
const newTorrent = await _createTorrent(Putio, infoHash);
|
const newTorrent = await _createTorrent(Putio, infoHash);
|
||||||
return newTorrent && statusReady(newTorrent.status)
|
return newTorrent && statusReady(newTorrent.status)
|
||||||
? _unrestrictLink(Putio, newTorrent, encodedFileName, fileIndex)
|
? _unrestrictLink(Putio, newTorrent, encodedFileName, fileIndex)
|
||||||
: StaticResponse.FAILED_DOWNLOAD;
|
: StaticResponse.FAILED_DOWNLOAD;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _findTorrent(Putio, infoHash) {
|
async function _findTorrent(Putio, infoHash) {
|
||||||
const torrents = await Putio.Transfers.Query().then(response => response.data.transfers);
|
const torrents = await Putio.Transfers.Query().then(response => response.data.transfers);
|
||||||
const foundTorrents = torrents.filter(torrent => torrent.source.toLowerCase().includes(infoHash));
|
const foundTorrents = torrents.filter(torrent => torrent.source.toLowerCase().includes(infoHash));
|
||||||
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.status));
|
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.status));
|
||||||
const foundTorrent = nonFailedTorrent || foundTorrents[0];
|
const foundTorrent = nonFailedTorrent || foundTorrents[0];
|
||||||
if (foundTorrents && !foundTorrents.userfile_exists) {
|
if (foundTorrents && !foundTorrents.userfile_exists) {
|
||||||
return await Putio.Transfers.Cancel(foundTorrents.id).then(() => Promise.reject())
|
return await Putio.Transfers.Cancel(foundTorrents.id).then(() => Promise.reject())
|
||||||
}
|
}
|
||||||
return foundTorrent || Promise.reject('No recent torrent found in Putio');
|
return foundTorrent || Promise.reject('No recent torrent found in Putio');
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _findInfoHash(Putio, fileId) {
|
async function _findInfoHash(Putio, fileId) {
|
||||||
const torrents = await Putio.Transfers.Query().then(response => response?.data?.transfers);
|
const torrents = await Putio.Transfers.Query().then(response => response?.data?.transfers);
|
||||||
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === fileId);
|
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === fileId);
|
||||||
return foundTorrent?.source ? decode(foundTorrent.source).infoHash : undefined;
|
return foundTorrent?.source ? decode(foundTorrent.source).infoHash : undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _createTorrent(Putio, infoHash) {
|
async function _createTorrent(Putio, infoHash) {
|
||||||
const magnetLink = await getMagnetLink(infoHash);
|
const magnetLink = await getMagnetLink(infoHash);
|
||||||
// Add the torrent and then delay for 3 secs for putio to process it and then check it's status.
|
// Add the torrent and then delay for 3 secs for putio to process it and then check it's status.
|
||||||
return Putio.Transfers.Add({ url: magnetLink })
|
return Putio.Transfers.Add({ url: magnetLink })
|
||||||
.then(response => _getNewTorrent(Putio, response.data.transfer.id));
|
.then(response => _getNewTorrent(Putio, response.data.transfer.id));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _getNewTorrent(Putio, torrentId, pollCounter = 0, pollRate = 2000, maxPollNumber = 15) {
|
async function _getNewTorrent(Putio, torrentId, pollCounter = 0, pollRate = 2000, maxPollNumber = 15) {
|
||||||
return Putio.Transfers.Get(torrentId)
|
return Putio.Transfers.Get(torrentId)
|
||||||
.then(response => response.data.transfer)
|
.then(response => response.data.transfer)
|
||||||
.then(torrent => statusProcessing(torrent.status) && pollCounter < maxPollNumber
|
.then(torrent => statusProcessing(torrent.status) && pollCounter < maxPollNumber
|
||||||
? delay(pollRate).then(() => _getNewTorrent(Putio, torrentId, pollCounter + 1))
|
? delay(pollRate).then(() => _getNewTorrent(Putio, torrentId, pollCounter + 1))
|
||||||
: torrent);
|
: torrent);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _unrestrictLink(Putio, torrent, encodedFileName, fileIndex) {
|
async function _unrestrictLink(Putio, torrent, encodedFileName, fileIndex) {
|
||||||
const targetVideo = await _getTargetFile(Putio, torrent, encodedFileName, fileIndex);
|
const targetVideo = await _getTargetFile(Putio, torrent, encodedFileName, fileIndex);
|
||||||
return _unrestrictVideo(Putio, targetVideo.id);
|
return _unrestrictVideo(Putio, targetVideo.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _unrestrictVideo(Putio, videoId) {
|
async function _unrestrictVideo(Putio, videoId) {
|
||||||
const response = await Putio.File.GetStorageURL(videoId);
|
const response = await Putio.File.GetStorageURL(videoId);
|
||||||
const downloadUrl = response.data.url
|
const downloadUrl = response.data.url
|
||||||
console.log(`Unrestricted Putio [${videoId}] to ${downloadUrl}`);
|
console.log(`Unrestricted Putio [${videoId}] to ${downloadUrl}`);
|
||||||
return downloadUrl;
|
return downloadUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _getTargetFile(Putio, torrent, encodedFileName, fileIndex) {
|
async function _getTargetFile(Putio, torrent, encodedFileName, fileIndex) {
|
||||||
const targetFileName = decodeURIComponent(encodedFileName);
|
const targetFileName = decodeURIComponent(encodedFileName);
|
||||||
let targetFile;
|
let targetFile;
|
||||||
let files = await _getFiles(Putio, torrent.file_id);
|
let files = await _getFiles(Putio, torrent.file_id);
|
||||||
let videos = [];
|
let videos = [];
|
||||||
|
|
||||||
while (!targetFile && files.length) {
|
while (!targetFile && files.length) {
|
||||||
const folders = files.filter(file => file.file_type === 'FOLDER');
|
const folders = files.filter(file => file.file_type === 'FOLDER');
|
||||||
videos = videos.concat(files.filter(file => isVideo(file.name))).sort((a, b) => b.size - a.size);
|
videos = videos.concat(files.filter(file => isVideo(file.name)));
|
||||||
// when specific file index is defined search by filename
|
// when specific file index is defined search by filename
|
||||||
// when it's not defined find all videos and take the largest one
|
// when it's not defined find all videos and take the largest one
|
||||||
targetFile = Number.isInteger(fileIndex)
|
targetFile = Number.isInteger(fileIndex)
|
||||||
? videos.find(video => sameFilename(targetFileName, video.name))
|
? videos.find(video => sameFilename(targetFileName, video.name))
|
||||||
: !folders.length && videos[0];
|
: !folders.length && videos.sort((a, b) => b.size - a.size)[0];
|
||||||
files = !targetFile
|
files = !targetFile
|
||||||
? await Promise.all(folders.map(folder => _getFiles(Putio, folder.id)))
|
? await Promise.all(folders.map(folder => _getFiles(Putio, folder.id)))
|
||||||
.then(results => results.reduce((a, b) => a.concat(b), []))
|
.then(results => results.reduce((a, b) => a.concat(b), []))
|
||||||
: [];
|
: [];
|
||||||
}
|
}
|
||||||
return targetFile || Promise.reject(`No target file found for Putio [${torrent.hash}] ${targetFileName}`);
|
return targetFile || Promise.reject(`No target file found for Putio [${torrent.hash}] ${targetFileName}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _getFiles(Putio, fileId) {
|
async function _getFiles(Putio, fileId) {
|
||||||
const response = await Putio.Files.Query(fileId)
|
const response = await Putio.Files.Query(fileId)
|
||||||
.catch(error => Promise.reject({ ...error.data, path: error.request.path }));
|
.catch(error => Promise.reject({ ...error.data, path: error.request.path }));
|
||||||
return response.data.files.length
|
return response.data.files.length
|
||||||
? response.data.files
|
? response.data.files
|
||||||
: [response.data.parent];
|
: [response.data.parent];
|
||||||
}
|
}
|
||||||
|
|
||||||
function createPutioAPI(apiKey) {
|
function createPutioAPI(apiKey) {
|
||||||
const clientId = apiKey.replace(/@.*/, '');
|
const clientId = apiKey.replace(/@.*/, '');
|
||||||
const token = apiKey.replace(/.*@/, '');
|
const token = apiKey.replace(/.*@/, '');
|
||||||
const Putio = new PutioAPI({ clientID: clientId });
|
const Putio = new PutioAPI({ clientID: clientId });
|
||||||
Putio.setToken(token);
|
Putio.setToken(token);
|
||||||
return Putio;
|
return Putio;
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusError(status) {
|
function statusError(status) {
|
||||||
return ['ERROR'].includes(status);
|
return ['ERROR'].includes(status);
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusDownloading(status) {
|
function statusDownloading(status) {
|
||||||
return ['WAITING', 'IN_QUEUE', 'DOWNLOADING'].includes(status);
|
return ['WAITING', 'IN_QUEUE', 'DOWNLOADING'].includes(status);
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusProcessing(status) {
|
function statusProcessing(status) {
|
||||||
return ['WAITING', 'IN_QUEUE', 'COMPLETING'].includes(status);
|
return ['WAITING', 'IN_QUEUE', 'COMPLETING'].includes(status);
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusReady(status) {
|
function statusReady(status) {
|
||||||
return ['COMPLETED', 'SEEDING'].includes(status);
|
return ['COMPLETED', 'SEEDING'].includes(status);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,385 +15,385 @@ const KEY = 'realdebrid';
|
|||||||
const DEBRID_DOWNLOADS = 'Downloads';
|
const DEBRID_DOWNLOADS = 'Downloads';
|
||||||
|
|
||||||
export async function getCachedStreams(streams, apiKey) {
|
export async function getCachedStreams(streams, apiKey) {
|
||||||
const hashes = streams.map(stream => stream.infoHash);
|
const hashes = streams.map(stream => stream.infoHash);
|
||||||
const available = await _getInstantAvailable(hashes, apiKey);
|
const available = await _getInstantAvailable(hashes, apiKey);
|
||||||
return available && streams
|
return available && streams
|
||||||
.reduce((mochStreams, stream) => {
|
.reduce((mochStreams, stream) => {
|
||||||
const cachedEntry = available[stream.infoHash];
|
const cachedEntry = available[stream.infoHash];
|
||||||
const cachedIds = _getCachedFileIds(stream.fileIdx, cachedEntry);
|
const cachedIds = _getCachedFileIds(stream.fileIdx, cachedEntry);
|
||||||
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
|
mochStreams[stream.infoHash] = {
|
||||||
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
|
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
|
||||||
cached: !!cachedIds.length
|
cached: !!cachedIds.length
|
||||||
};
|
};
|
||||||
return mochStreams;
|
return mochStreams;
|
||||||
}, {})
|
}, {})
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _getInstantAvailable(hashes, apiKey, retries = 3, maxChunkSize = 150) {
|
async function _getInstantAvailable(hashes, apiKey, retries = 3, maxChunkSize = 150) {
|
||||||
const cachedResults = await getCachedAvailabilityResults(hashes);
|
const cachedResults = await getCachedAvailabilityResults(hashes);
|
||||||
const missingHashes = hashes.filter(infoHash => !cachedResults[infoHash]);
|
const missingHashes = hashes.filter(infoHash => !cachedResults[infoHash]);
|
||||||
if (!missingHashes.length) {
|
if (!missingHashes.length) {
|
||||||
return cachedResults
|
return cachedResults
|
||||||
}
|
}
|
||||||
const options = await getDefaultOptions();
|
const options = await getDefaultOptions();
|
||||||
const RD = new RealDebridClient(apiKey, options);
|
const RD = new RealDebridClient(apiKey, options);
|
||||||
const hashBatches = chunkArray(missingHashes, maxChunkSize)
|
const hashBatches = chunkArray(missingHashes, maxChunkSize)
|
||||||
return Promise.all(hashBatches.map(batch => RD.torrents.instantAvailability(batch)
|
return Promise.all(hashBatches.map(batch => RD.torrents.instantAvailability(batch)
|
||||||
.then(response => {
|
.then(response => {
|
||||||
if (typeof response !== 'object') {
|
if (typeof response !== 'object') {
|
||||||
return Promise.reject(new Error('RD returned non JSON response: ' + response));
|
return Promise.reject(new Error('RD returned non JSON response: ' + response));
|
||||||
}
|
}
|
||||||
return processAvailabilityResults(response);
|
return processAvailabilityResults(response);
|
||||||
})))
|
})))
|
||||||
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
|
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
|
||||||
.then(results => cacheAvailabilityResults(results))
|
.then(results => cacheAvailabilityResults(results))
|
||||||
.then(results => Object.assign(cachedResults, results))
|
.then(results => Object.assign(cachedResults, results))
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
if (toCommonError(error)) {
|
if (toCommonError(error)) {
|
||||||
return Promise.reject(error);
|
return Promise.reject(error);
|
||||||
}
|
}
|
||||||
if (!error && maxChunkSize !== 1) {
|
if (!error && maxChunkSize !== 1) {
|
||||||
// sometimes due to large response size RD responds with an empty body. Reduce chunk size to reduce body
|
// sometimes due to large response size RD responds with an empty body. Reduce chunk size to reduce body
|
||||||
console.log(`Reducing chunk size for availability request: ${hashes[0]}`);
|
console.log(`Reducing chunk size for availability request: ${hashes[0]}`);
|
||||||
return _getInstantAvailable(hashes, apiKey, retries - 1, Math.ceil(maxChunkSize / 10));
|
return _getInstantAvailable(hashes, apiKey, retries - 1, Math.ceil(maxChunkSize / 10));
|
||||||
}
|
}
|
||||||
if (retries > 0 && NON_BLACKLIST_ERRORS.some(v => error?.message?.includes(v))) {
|
if (retries > 0 && NON_BLACKLIST_ERRORS.some(v => error?.message?.includes(v))) {
|
||||||
return _getInstantAvailable(hashes, apiKey, retries - 1);
|
return _getInstantAvailable(hashes, apiKey, retries - 1);
|
||||||
}
|
}
|
||||||
console.warn(`Failed RealDebrid cached [${hashes[0]}] torrent availability request:`, error.message);
|
console.warn(`Failed RealDebrid cached [${hashes[0]}] torrent availability request:`, error.message);
|
||||||
return undefined;
|
return undefined;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function processAvailabilityResults(availabilityResults) {
|
function processAvailabilityResults(availabilityResults) {
|
||||||
const processedResults = {};
|
const processedResults = {};
|
||||||
Object.entries(availabilityResults)
|
Object.entries(availabilityResults)
|
||||||
.forEach(([infoHash, hosterResults]) => processedResults[infoHash] = getCachedIds(hosterResults));
|
.forEach(([infoHash, hosterResults]) => processedResults[infoHash] = getCachedIds(hosterResults));
|
||||||
return processedResults;
|
return processedResults;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getCachedIds(hosterResults) {
|
function getCachedIds(hosterResults) {
|
||||||
if (!hosterResults || Array.isArray(hosterResults)) {
|
if (!hosterResults || Array.isArray(hosterResults)) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
// if not all cached files are videos, then the torrent will be zipped to a rar
|
// if not all cached files are videos, then the torrent will be zipped to a rar
|
||||||
return Object.values(hosterResults)
|
return Object.values(hosterResults)
|
||||||
.reduce((a, b) => a.concat(b), [])
|
.reduce((a, b) => a.concat(b), [])
|
||||||
.filter(cached => Object.keys(cached).length && Object.values(cached).every(file => isVideo(file.filename)))
|
.filter(cached => Object.keys(cached).length && Object.values(cached).every(file => isVideo(file.filename)))
|
||||||
.map(cached => Object.keys(cached))
|
.map(cached => Object.keys(cached))
|
||||||
.sort((a, b) => b.length - a.length)
|
.sort((a, b) => b.length - a.length)
|
||||||
.filter((cached, index, array) => index === 0 || cached.some(id => !array[0].includes(id)));
|
.filter((cached, index, array) => index === 0 || cached.some(id => !array[0].includes(id)));
|
||||||
}
|
}
|
||||||
|
|
||||||
function _getCachedFileIds(fileIndex, cachedResults) {
|
function _getCachedFileIds(fileIndex, cachedResults) {
|
||||||
if (!cachedResults || !Array.isArray(cachedResults)) {
|
if (!cachedResults || !Array.isArray(cachedResults)) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
const cachedIds = Number.isInteger(fileIndex)
|
const cachedIds = Number.isInteger(fileIndex)
|
||||||
? cachedResults.find(ids => ids.includes(`${fileIndex + 1}`))
|
? cachedResults.find(ids => ids.includes(`${fileIndex + 1}`))
|
||||||
: cachedResults[0];
|
: cachedResults[0];
|
||||||
return cachedIds || [];
|
return cachedIds || [];
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getCatalog(apiKey, offset, ip) {
|
export async function getCatalog(apiKey, offset, ip) {
|
||||||
if (offset > 0) {
|
if (offset > 0) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
const options = await getDefaultOptions(ip);
|
const options = await getDefaultOptions(ip);
|
||||||
const RD = new RealDebridClient(apiKey, options);
|
const RD = new RealDebridClient(apiKey, options);
|
||||||
const downloadsMeta = {
|
const downloadsMeta = {
|
||||||
id: `${KEY}:${DEBRID_DOWNLOADS}`,
|
id: `${KEY}:${DEBRID_DOWNLOADS}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: DEBRID_DOWNLOADS
|
name: DEBRID_DOWNLOADS
|
||||||
};
|
};
|
||||||
const torrentMetas = await _getAllTorrents(RD)
|
const torrentMetas = await _getAllTorrents(RD)
|
||||||
.then(torrents => Array.isArray(torrents) ? torrents : [])
|
.then(torrents => Array.isArray(torrents) ? torrents : [])
|
||||||
.then(torrents => torrents
|
.then(torrents => torrents
|
||||||
.filter(torrent => torrent && statusReady(torrent.status))
|
.filter(torrent => torrent && statusReady(torrent.status))
|
||||||
.map(torrent => ({
|
.map(torrent => ({
|
||||||
id: `${KEY}:${torrent.id}`,
|
id: `${KEY}:${torrent.id}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: torrent.filename
|
name: torrent.filename
|
||||||
})));
|
})));
|
||||||
return [downloadsMeta].concat(torrentMetas)
|
return [downloadsMeta].concat(torrentMetas)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getItemMeta(itemId, apiKey, ip) {
|
export async function getItemMeta(itemId, apiKey, ip) {
|
||||||
const options = await getDefaultOptions(ip);
|
const options = await getDefaultOptions(ip);
|
||||||
const RD = new RealDebridClient(apiKey, options);
|
const RD = new RealDebridClient(apiKey, options);
|
||||||
if (itemId === DEBRID_DOWNLOADS) {
|
if (itemId === DEBRID_DOWNLOADS) {
|
||||||
const videos = await _getAllDownloads(RD)
|
const videos = await _getAllDownloads(RD)
|
||||||
.then(downloads => downloads
|
.then(downloads => downloads
|
||||||
.map(download => ({
|
.map(download => ({
|
||||||
id: `${KEY}:${DEBRID_DOWNLOADS}:${download.id}`,
|
id: `${KEY}:${DEBRID_DOWNLOADS}:${download.id}`,
|
||||||
// infoHash: allTorrents
|
// infoHash: allTorrents
|
||||||
// .filter(torrent => (torrent.links || []).find(link => link === download.link))
|
// .filter(torrent => (torrent.links || []).find(link => link === download.link))
|
||||||
// .map(torrent => torrent.hash.toLowerCase())[0],
|
// .map(torrent => torrent.hash.toLowerCase())[0],
|
||||||
title: download.filename,
|
title: download.filename,
|
||||||
released: new Date(download.generated).toISOString(),
|
released: new Date(download.generated).toISOString(),
|
||||||
streams: [{ url: download.download }]
|
streams: [{ url: download.download }]
|
||||||
})));
|
})));
|
||||||
return {
|
return {
|
||||||
id: `${KEY}:${DEBRID_DOWNLOADS}`,
|
id: `${KEY}:${DEBRID_DOWNLOADS}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: DEBRID_DOWNLOADS,
|
name: DEBRID_DOWNLOADS,
|
||||||
videos: videos
|
videos: videos
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
return _getTorrentInfo(RD, itemId)
|
return _getTorrentInfo(RD, itemId)
|
||||||
.then(torrent => ({
|
.then(torrent => ({
|
||||||
id: `${KEY}:${torrent.id}`,
|
id: `${KEY}:${torrent.id}`,
|
||||||
type: Type.OTHER,
|
type: Type.OTHER,
|
||||||
name: torrent.filename,
|
name: torrent.filename,
|
||||||
infoHash: torrent.hash.toLowerCase(),
|
infoHash: torrent.hash.toLowerCase(),
|
||||||
videos: torrent.files
|
videos: torrent.files
|
||||||
.filter(file => file.selected)
|
.filter(file => file.selected)
|
||||||
.filter(file => isVideo(file.path))
|
.filter(file => isVideo(file.path))
|
||||||
.map((file, index) => ({
|
.map((file, index) => ({
|
||||||
id: `${KEY}:${torrent.id}:${file.id}`,
|
id: `${KEY}:${torrent.id}:${file.id}`,
|
||||||
title: file.path,
|
title: file.path,
|
||||||
released: new Date(new Date(torrent.added).getTime() - index).toISOString(),
|
released: new Date(new Date(torrent.added).getTime() - index).toISOString(),
|
||||||
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/null/${file.id - 1}` }]
|
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/null/${file.id - 1}` }]
|
||||||
}))
|
}))
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _getAllTorrents(RD, page = 1) {
|
async function _getAllTorrents(RD, page = 1) {
|
||||||
return RD.torrents.get(page - 1, page, CATALOG_PAGE_SIZE)
|
return RD.torrents.get(page - 1, page, CATALOG_PAGE_SIZE)
|
||||||
.then(torrents => torrents && torrents.length === CATALOG_PAGE_SIZE && page < CATALOG_MAX_PAGE
|
.then(torrents => torrents && torrents.length === CATALOG_PAGE_SIZE && page < CATALOG_MAX_PAGE
|
||||||
? _getAllTorrents(RD, page + 1)
|
? _getAllTorrents(RD, page + 1)
|
||||||
.then(nextTorrents => torrents.concat(nextTorrents))
|
.then(nextTorrents => torrents.concat(nextTorrents))
|
||||||
.catch(() => torrents)
|
.catch(() => torrents)
|
||||||
: torrents)
|
: torrents)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _getAllDownloads(RD, page = 1) {
|
async function _getAllDownloads(RD, page = 1) {
|
||||||
return RD.downloads.get(page - 1, page, CATALOG_PAGE_SIZE);
|
return RD.downloads.get(page - 1, page, CATALOG_PAGE_SIZE);
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function resolve({ ip, isBrowser, apiKey, infoHash, fileIndex }) {
|
export async function resolve({ ip, isBrowser, apiKey, infoHash, fileIndex }) {
|
||||||
console.log(`Unrestricting RealDebrid ${infoHash} [${fileIndex}]`);
|
console.log(`Unrestricting RealDebrid ${infoHash} [${fileIndex}]`);
|
||||||
const options = await getDefaultOptions(ip);
|
const options = await getDefaultOptions(ip);
|
||||||
const RD = new RealDebridClient(apiKey, options);
|
const RD = new RealDebridClient(apiKey, options);
|
||||||
const cachedFileIds = await _resolveCachedFileIds(infoHash, fileIndex, apiKey);
|
const cachedFileIds = await _resolveCachedFileIds(infoHash, fileIndex, apiKey);
|
||||||
|
|
||||||
return _resolve(RD, infoHash, cachedFileIds, fileIndex, isBrowser)
|
return _resolve(RD, infoHash, cachedFileIds, fileIndex, isBrowser)
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
if (accessDeniedError(error)) {
|
if (accessDeniedError(error)) {
|
||||||
console.log(`Access denied to RealDebrid ${infoHash} [${fileIndex}]`);
|
console.log(`Access denied to RealDebrid ${infoHash} [${fileIndex}]`);
|
||||||
return StaticResponse.FAILED_ACCESS;
|
return StaticResponse.FAILED_ACCESS;
|
||||||
}
|
}
|
||||||
if (infringingFile(error)) {
|
if (infringingFile(error)) {
|
||||||
console.log(`Infringing file removed from RealDebrid ${infoHash} [${fileIndex}]`);
|
console.log(`Infringing file removed from RealDebrid ${infoHash} [${fileIndex}]`);
|
||||||
return StaticResponse.FAILED_INFRINGEMENT;
|
return StaticResponse.FAILED_INFRINGEMENT;
|
||||||
}
|
}
|
||||||
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(error)}`);
|
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(error)}`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _resolveCachedFileIds(infoHash, fileIndex, apiKey) {
|
async function _resolveCachedFileIds(infoHash, fileIndex, apiKey) {
|
||||||
const available = await _getInstantAvailable([infoHash], apiKey);
|
const available = await _getInstantAvailable([infoHash], apiKey);
|
||||||
const cachedEntry = available?.[infoHash];
|
const cachedEntry = available?.[infoHash];
|
||||||
const cachedIds = _getCachedFileIds(fileIndex, cachedEntry);
|
const cachedIds = _getCachedFileIds(fileIndex, cachedEntry);
|
||||||
return cachedIds?.join(',');
|
return cachedIds?.join(',');
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _resolve(RD, infoHash, cachedFileIds, fileIndex, isBrowser) {
|
async function _resolve(RD, infoHash, cachedFileIds, fileIndex, isBrowser) {
|
||||||
const torrentId = await _createOrFindTorrentId(RD, infoHash, cachedFileIds, fileIndex);
|
const torrentId = await _createOrFindTorrentId(RD, infoHash, cachedFileIds, fileIndex);
|
||||||
const torrent = await _getTorrentInfo(RD, torrentId);
|
const torrent = await _getTorrentInfo(RD, torrentId);
|
||||||
if (torrent && statusReady(torrent.status)) {
|
if (torrent && statusReady(torrent.status)) {
|
||||||
return _unrestrictLink(RD, torrent, fileIndex, isBrowser);
|
return _unrestrictLink(RD, torrent, fileIndex, isBrowser);
|
||||||
} else if (torrent && statusDownloading(torrent.status)) {
|
} else if (torrent && statusDownloading(torrent.status)) {
|
||||||
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
|
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
|
||||||
return StaticResponse.DOWNLOADING;
|
return StaticResponse.DOWNLOADING;
|
||||||
} else if (torrent && statusMagnetError(torrent.status)) {
|
} else if (torrent && statusMagnetError(torrent.status)) {
|
||||||
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}] due to magnet error`);
|
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}] due to magnet error`);
|
||||||
return StaticResponse.FAILED_OPENING;
|
return StaticResponse.FAILED_OPENING;
|
||||||
} else if (torrent && statusError(torrent.status)) {
|
} else if (torrent && statusError(torrent.status)) {
|
||||||
return _retryCreateTorrent(RD, infoHash, fileIndex);
|
return _retryCreateTorrent(RD, infoHash, fileIndex);
|
||||||
} else if (torrent && (statusWaitingSelection(torrent.status) || statusOpening(torrent.status))) {
|
} else if (torrent && (statusWaitingSelection(torrent.status) || statusOpening(torrent.status))) {
|
||||||
console.log(`Trying to select files on RealDebrid ${infoHash} [${fileIndex}]...`);
|
console.log(`Trying to select files on RealDebrid ${infoHash} [${fileIndex}]...`);
|
||||||
return _selectTorrentFiles(RD, torrent)
|
return _selectTorrentFiles(RD, torrent)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
|
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
|
||||||
return StaticResponse.DOWNLOADING
|
return StaticResponse.DOWNLOADING
|
||||||
})
|
})
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}]:`, error);
|
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}]:`, error);
|
||||||
return StaticResponse.FAILED_OPENING;
|
return StaticResponse.FAILED_OPENING;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(torrent)}`);
|
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(torrent)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _createOrFindTorrentId(RD, infoHash, cachedFileIds, fileIndex) {
|
async function _createOrFindTorrentId(RD, infoHash, cachedFileIds, fileIndex) {
|
||||||
return _findTorrent(RD, infoHash, fileIndex)
|
return _findTorrent(RD, infoHash, fileIndex)
|
||||||
.catch(() => _createTorrentId(RD, infoHash, cachedFileIds));
|
.catch(() => _createTorrentId(RD, infoHash, cachedFileIds));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _findTorrent(RD, infoHash, fileIndex) {
|
async function _findTorrent(RD, infoHash, fileIndex) {
|
||||||
const torrents = await RD.torrents.get(0, 1) || [];
|
const torrents = await RD.torrents.get(0, 1) || [];
|
||||||
const foundTorrents = torrents
|
const foundTorrents = torrents
|
||||||
.filter(torrent => torrent.hash.toLowerCase() === infoHash)
|
.filter(torrent => torrent.hash.toLowerCase() === infoHash)
|
||||||
.filter(torrent => !statusError(torrent.status));
|
.filter(torrent => !statusError(torrent.status));
|
||||||
const foundTorrent = await _findBestFitTorrent(RD, foundTorrents, fileIndex);
|
const foundTorrent = await _findBestFitTorrent(RD, foundTorrents, fileIndex);
|
||||||
return foundTorrent?.id || Promise.reject('No recent torrent found');
|
return foundTorrent?.id || Promise.reject('No recent torrent found');
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _findBestFitTorrent(RD, torrents, fileIndex) {
|
async function _findBestFitTorrent(RD, torrents, fileIndex) {
|
||||||
if (torrents.length === 1) {
|
if (torrents.length === 1) {
|
||||||
return torrents[0];
|
return torrents[0];
|
||||||
}
|
}
|
||||||
const torrentInfos = await Promise.all(torrents.map(torrent => _getTorrentInfo(RD, torrent.id)));
|
const torrentInfos = await Promise.all(torrents.map(torrent => _getTorrentInfo(RD, torrent.id)));
|
||||||
const bestFitTorrents = torrentInfos
|
const bestFitTorrents = torrentInfos
|
||||||
.filter(torrent => torrent.files.find(f => f.id === fileIndex + 1 && f.selected))
|
.filter(torrent => torrent.files.find(f => f.id === fileIndex + 1 && f.selected))
|
||||||
.sort((a, b) => b.links.length - a.links.length);
|
.sort((a, b) => b.links.length - a.links.length);
|
||||||
return bestFitTorrents[0] || torrents[0];
|
return bestFitTorrents[0] || torrents[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _getTorrentInfo(RD, torrentId) {
|
async function _getTorrentInfo(RD, torrentId) {
|
||||||
if (!torrentId || typeof torrentId === 'object') {
|
if (!torrentId || typeof torrentId === 'object') {
|
||||||
return torrentId || Promise.reject('No RealDebrid torrentId provided')
|
return torrentId || Promise.reject('No RealDebrid torrentId provided')
|
||||||
}
|
}
|
||||||
return RD.torrents.info(torrentId);
|
return RD.torrents.info(torrentId);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _createTorrentId(RD, infoHash, cachedFileIds) {
|
async function _createTorrentId(RD, infoHash, cachedFileIds) {
|
||||||
const magnetLink = await getMagnetLink(infoHash);
|
const magnetLink = await getMagnetLink(infoHash);
|
||||||
const addedMagnet = await RD.torrents.addMagnet(magnetLink);
|
const addedMagnet = await RD.torrents.addMagnet(magnetLink);
|
||||||
if (cachedFileIds && !['null', 'undefined'].includes(cachedFileIds)) {
|
if (cachedFileIds && !['null', 'undefined'].includes(cachedFileIds)) {
|
||||||
await RD.torrents.selectFiles(addedMagnet.id, cachedFileIds);
|
await RD.torrents.selectFiles(addedMagnet.id, cachedFileIds);
|
||||||
}
|
}
|
||||||
return addedMagnet.id;
|
return addedMagnet.id;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _recreateTorrentId(RD, infoHash, fileIndex) {
|
async function _recreateTorrentId(RD, infoHash, fileIndex) {
|
||||||
const newTorrentId = await _createTorrentId(RD, infoHash);
|
const newTorrentId = await _createTorrentId(RD, infoHash);
|
||||||
await _selectTorrentFiles(RD, { id: newTorrentId }, fileIndex);
|
await _selectTorrentFiles(RD, { id: newTorrentId }, fileIndex);
|
||||||
return newTorrentId;
|
return newTorrentId;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _retryCreateTorrent(RD, infoHash, fileIndex) {
|
async function _retryCreateTorrent(RD, infoHash, fileIndex) {
|
||||||
console.log(`Retry failed download in RealDebrid ${infoHash} [${fileIndex}]...`);
|
console.log(`Retry failed download in RealDebrid ${infoHash} [${fileIndex}]...`);
|
||||||
const newTorrentId = await _recreateTorrentId(RD, infoHash, fileIndex);
|
const newTorrentId = await _recreateTorrentId(RD, infoHash, fileIndex);
|
||||||
const newTorrent = await _getTorrentInfo(RD, newTorrentId);
|
const newTorrent = await _getTorrentInfo(RD, newTorrentId);
|
||||||
return newTorrent && statusReady(newTorrent.status)
|
return newTorrent && statusReady(newTorrent.status)
|
||||||
? _unrestrictLink(RD, newTorrent, fileIndex)
|
? _unrestrictLink(RD, newTorrent, fileIndex)
|
||||||
: StaticResponse.FAILED_DOWNLOAD;
|
: StaticResponse.FAILED_DOWNLOAD;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _selectTorrentFiles(RD, torrent, fileIndex) {
|
async function _selectTorrentFiles(RD, torrent, fileIndex) {
|
||||||
torrent = statusWaitingSelection(torrent.status) ? torrent : await _openTorrent(RD, torrent.id);
|
torrent = statusWaitingSelection(torrent.status) ? torrent : await _openTorrent(RD, torrent.id);
|
||||||
if (torrent?.files && statusWaitingSelection(torrent.status)) {
|
if (torrent?.files && statusWaitingSelection(torrent.status)) {
|
||||||
const videoFileIds = Number.isInteger(fileIndex) ? `${fileIndex + 1}` : torrent.files
|
const videoFileIds = Number.isInteger(fileIndex) ? `${fileIndex + 1}` : torrent.files
|
||||||
.filter(file => isVideo(file.path))
|
.filter(file => isVideo(file.path))
|
||||||
.filter(file => file.bytes > MIN_SIZE)
|
.filter(file => file.bytes > MIN_SIZE)
|
||||||
.map(file => file.id)
|
.map(file => file.id)
|
||||||
.join(',');
|
.join(',');
|
||||||
return RD.torrents.selectFiles(torrent.id, videoFileIds);
|
return RD.torrents.selectFiles(torrent.id, videoFileIds);
|
||||||
}
|
}
|
||||||
return Promise.reject('Failed RealDebrid torrent file selection')
|
return Promise.reject('Failed RealDebrid torrent file selection')
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _openTorrent(RD, torrentId, pollCounter = 0, pollRate = 2000, maxPollNumber = 15) {
|
async function _openTorrent(RD, torrentId, pollCounter = 0, pollRate = 2000, maxPollNumber = 15) {
|
||||||
return _getTorrentInfo(RD, torrentId)
|
return _getTorrentInfo(RD, torrentId)
|
||||||
.then(torrent => torrent && statusOpening(torrent.status) && pollCounter < maxPollNumber
|
.then(torrent => torrent && statusOpening(torrent.status) && pollCounter < maxPollNumber
|
||||||
? delay(pollRate).then(() => _openTorrent(RD, torrentId, pollCounter + 1))
|
? delay(pollRate).then(() => _openTorrent(RD, torrentId, pollCounter + 1))
|
||||||
: torrent);
|
: torrent);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _unrestrictLink(RD, torrent, fileIndex, isBrowser) {
|
async function _unrestrictLink(RD, torrent, fileIndex, isBrowser) {
|
||||||
const targetFile = torrent.files.find(file => file.id === fileIndex + 1)
|
const targetFile = torrent.files.find(file => file.id === fileIndex + 1)
|
||||||
|| torrent.files.filter(file => file.selected).sort((a, b) => b.bytes - a.bytes)[0];
|
|| torrent.files.filter(file => file.selected).sort((a, b) => b.bytes - a.bytes)[0];
|
||||||
if (!targetFile.selected) {
|
if (!targetFile.selected) {
|
||||||
console.log(`Target RealDebrid file is not downloaded: ${JSON.stringify(targetFile)}`);
|
console.log(`Target RealDebrid file is not downloaded: ${JSON.stringify(targetFile)}`);
|
||||||
await _recreateTorrentId(RD, torrent.hash.toLowerCase(), fileIndex);
|
await _recreateTorrentId(RD, torrent.hash.toLowerCase(), fileIndex);
|
||||||
return StaticResponse.DOWNLOADING;
|
return StaticResponse.DOWNLOADING;
|
||||||
}
|
}
|
||||||
|
|
||||||
const selectedFiles = torrent.files.filter(file => file.selected);
|
const selectedFiles = torrent.files.filter(file => file.selected);
|
||||||
const fileLink = torrent.links.length === 1
|
const fileLink = torrent.links.length === 1
|
||||||
? torrent.links[0]
|
? torrent.links[0]
|
||||||
: torrent.links[selectedFiles.indexOf(targetFile)];
|
: torrent.links[selectedFiles.indexOf(targetFile)];
|
||||||
|
|
||||||
if (!fileLink?.length) {
|
if (!fileLink?.length) {
|
||||||
console.log(`No RealDebrid links found for ${torrent.hash} [${fileIndex}]`);
|
console.log(`No RealDebrid links found for ${torrent.hash} [${fileIndex}]`);
|
||||||
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
|
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
|
||||||
}
|
}
|
||||||
|
|
||||||
return _unrestrictFileLink(RD, fileLink, torrent, fileIndex, isBrowser);
|
return _unrestrictFileLink(RD, fileLink, torrent, fileIndex, isBrowser);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function _unrestrictFileLink(RD, fileLink, torrent, fileIndex, isBrowser) {
|
async function _unrestrictFileLink(RD, fileLink, torrent, fileIndex, isBrowser) {
|
||||||
return RD.unrestrict.link(fileLink)
|
return RD.unrestrict.link(fileLink)
|
||||||
.then(response => {
|
.then(response => {
|
||||||
if (isArchive(response.download)) {
|
if (isArchive(response.download)) {
|
||||||
if (torrent.files.filter(file => file.selected).length > 1) {
|
if (torrent.files.filter(file => file.selected).length > 1) {
|
||||||
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
|
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
|
||||||
}
|
}
|
||||||
return StaticResponse.FAILED_RAR;
|
return StaticResponse.FAILED_RAR;
|
||||||
}
|
}
|
||||||
// if (isBrowser && response.streamable) {
|
// if (isBrowser && response.streamable) {
|
||||||
// return RD.streaming.transcode(response.id)
|
// return RD.streaming.transcode(response.id)
|
||||||
// .then(streamResponse => streamResponse.apple.full)
|
// .then(streamResponse => streamResponse.apple.full)
|
||||||
// }
|
// }
|
||||||
return response.download;
|
return response.download;
|
||||||
})
|
})
|
||||||
.then(unrestrictedLink => {
|
.then(unrestrictedLink => {
|
||||||
console.log(`Unrestricted RealDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
|
console.log(`Unrestricted RealDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
|
||||||
return unrestrictedLink;
|
return unrestrictedLink;
|
||||||
})
|
})
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
if (error.code === 19) {
|
if (error.code === 19) {
|
||||||
return _retryCreateTorrent(RD, torrent.hash.toLowerCase(), fileIndex);
|
return _retryCreateTorrent(RD, torrent.hash.toLowerCase(), fileIndex);
|
||||||
}
|
}
|
||||||
return Promise.reject(error);
|
return Promise.reject(error);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function toCommonError(error) {
|
export function toCommonError(error) {
|
||||||
if (error && error.code === 8) {
|
if (error && error.code === 8) {
|
||||||
return BadTokenError;
|
return BadTokenError;
|
||||||
}
|
}
|
||||||
if (error && accessDeniedError(error)) {
|
if (error && accessDeniedError(error)) {
|
||||||
return AccessDeniedError;
|
return AccessDeniedError;
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusError(status) {
|
function statusError(status) {
|
||||||
return ['error', 'magnet_error'].includes(status);
|
return ['error', 'magnet_error'].includes(status);
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusMagnetError(status) {
|
function statusMagnetError(status) {
|
||||||
return status === 'magnet_error';
|
return status === 'magnet_error';
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusOpening(status) {
|
function statusOpening(status) {
|
||||||
return status === 'magnet_conversion';
|
return status === 'magnet_conversion';
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusWaitingSelection(status) {
|
function statusWaitingSelection(status) {
|
||||||
return status === 'waiting_files_selection';
|
return status === 'waiting_files_selection';
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusDownloading(status) {
|
function statusDownloading(status) {
|
||||||
return ['downloading', 'uploading', 'queued'].includes(status);
|
return ['downloading', 'uploading', 'queued'].includes(status);
|
||||||
}
|
}
|
||||||
|
|
||||||
function statusReady(status) {
|
function statusReady(status) {
|
||||||
return ['downloaded', 'dead'].includes(status);
|
return ['downloaded', 'dead'].includes(status);
|
||||||
}
|
}
|
||||||
|
|
||||||
function accessDeniedError(error) {
|
function accessDeniedError(error) {
|
||||||
return [9, 20].includes(error?.code);
|
return [9, 20].includes(error?.code);
|
||||||
}
|
}
|
||||||
|
|
||||||
function infringingFile(error) {
|
function infringingFile(error) {
|
||||||
return error && error.code === 35;
|
return error && error.code === 35;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getDefaultOptions(ip) {
|
async function getDefaultOptions(ip) {
|
||||||
return { ip, timeout: 15000 };
|
return { ip, timeout: 10000 };
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,13 +16,14 @@ public static class DebridMetaToTorrentMeta
|
|||||||
foreach (var metadataEntry in Metadata.Where(m => Filetypes.VideoFileExtensions.Any(ext => m.Value.Filename.EndsWith(ext))))
|
foreach (var metadataEntry in Metadata.Where(m => Filetypes.VideoFileExtensions.Any(ext => m.Value.Filename.EndsWith(ext))))
|
||||||
{
|
{
|
||||||
var validFileIndex = int.TryParse(metadataEntry.Key, out var fileIndex);
|
var validFileIndex = int.TryParse(metadataEntry.Key, out var fileIndex);
|
||||||
|
var fileIndexMinusOne = Math.Max(0, fileIndex - 1);
|
||||||
|
|
||||||
var file = new TorrentFile
|
var file = new TorrentFile
|
||||||
{
|
{
|
||||||
ImdbId = ImdbId,
|
ImdbId = ImdbId,
|
||||||
KitsuId = 0,
|
KitsuId = 0,
|
||||||
InfoHash = torrent.InfoHash,
|
InfoHash = torrent.InfoHash,
|
||||||
FileIndex = validFileIndex ? fileIndex : 0,
|
FileIndex = validFileIndex ? fileIndexMinusOne : 0,
|
||||||
Title = metadataEntry.Value.Filename,
|
Title = metadataEntry.Value.Filename,
|
||||||
Size = metadataEntry.Value.Filesize.GetValueOrDefault(),
|
Size = metadataEntry.Value.Filesize.GetValueOrDefault(),
|
||||||
};
|
};
|
||||||
@@ -66,13 +67,14 @@ public static class DebridMetaToTorrentMeta
|
|||||||
foreach (var metadataEntry in Metadata.Where(m => Filetypes.SubtitleFileExtensions.Any(ext => m.Value.Filename.EndsWith(ext))))
|
foreach (var metadataEntry in Metadata.Where(m => Filetypes.SubtitleFileExtensions.Any(ext => m.Value.Filename.EndsWith(ext))))
|
||||||
{
|
{
|
||||||
var validFileIndex = int.TryParse(metadataEntry.Key, out var fileIndex);
|
var validFileIndex = int.TryParse(metadataEntry.Key, out var fileIndex);
|
||||||
|
var fileIndexMinusOne = Math.Max(0, fileIndex - 1);
|
||||||
var fileId = torrentFiles.FirstOrDefault(
|
var fileId = torrentFiles.FirstOrDefault(
|
||||||
t => Path.GetFileNameWithoutExtension(t.Title) == Path.GetFileNameWithoutExtension(metadataEntry.Value.Filename))?.Id ?? 0;
|
t => Path.GetFileNameWithoutExtension(t.Title) == Path.GetFileNameWithoutExtension(metadataEntry.Value.Filename))?.Id ?? 0;
|
||||||
|
|
||||||
var file = new SubtitleFile
|
var file = new SubtitleFile
|
||||||
{
|
{
|
||||||
InfoHash = InfoHash,
|
InfoHash = InfoHash,
|
||||||
FileIndex = validFileIndex ? fileIndex : 0,
|
FileIndex = validFileIndex ? fileIndexMinusOne : 0,
|
||||||
FileId = fileId,
|
FileId = fileId,
|
||||||
Title = metadataEntry.Value.Filename,
|
Title = metadataEntry.Value.Filename,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
rank-torrent-name==0.2.5
|
rank-torrent-name==0.2.13
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
-- Drop Duplicate Files in Files Table
|
||||||
|
DELETE FROM public.files
|
||||||
|
WHERE id NOT IN (
|
||||||
|
SELECT MAX(id)
|
||||||
|
FROM public.files
|
||||||
|
GROUP BY "infoHash", "fileIndex"
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Add Index to files table
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1
|
||||||
|
FROM pg_constraint
|
||||||
|
WHERE conname = 'files_unique_infohash_fileindex'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.files
|
||||||
|
ADD CONSTRAINT files_unique_infohash_fileindex UNIQUE ("infoHash", "fileIndex");
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
|
||||||
|
-- Drop Duplicate subtitles in Subtitles Table
|
||||||
|
DELETE FROM public.subtitles
|
||||||
|
WHERE id NOT IN (
|
||||||
|
SELECT MAX(id)
|
||||||
|
FROM public.subtitles
|
||||||
|
GROUP BY "infoHash", "fileIndex"
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Add Index to subtitles table
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1
|
||||||
|
FROM pg_constraint
|
||||||
|
WHERE conname = 'subtitles_unique_infohash_fileindex'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE public.subtitles
|
||||||
|
ADD CONSTRAINT subtitles_unique_infohash_fileindex UNIQUE ("infoHash", "fileIndex");
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
@@ -4,31 +4,38 @@
|
|||||||
{
|
{
|
||||||
"Name": "SyncEzTvJob",
|
"Name": "SyncEzTvJob",
|
||||||
"IntervalSeconds": 60,
|
"IntervalSeconds": 60,
|
||||||
"Enabled": true
|
"Enabled": true,
|
||||||
|
"Url": "https://eztv1.xyz/ezrss.xml",
|
||||||
|
"XmlNamespace": "http://xmlns.ezrss.it/0.1/"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Name": "SyncNyaaJob",
|
"Name": "SyncNyaaJob",
|
||||||
"IntervalSeconds": 60,
|
"IntervalSeconds": 60,
|
||||||
"Enabled": true
|
"Enabled": true,
|
||||||
|
"Url": "https://nyaa.si/?page=rss&c=1_2&f=0",
|
||||||
|
"XmlNamespace": "https://nyaa.si/xmlns/nyaa"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Name": "SyncTpbJob",
|
"Name": "SyncTpbJob",
|
||||||
"IntervalSeconds": 60,
|
"IntervalSeconds": 60,
|
||||||
"Enabled": true
|
"Enabled": true,
|
||||||
|
"Url": "https://apibay.org/precompiled/data_top100_recent.json"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Name": "SyncYtsJob",
|
"Name": "SyncYtsJob",
|
||||||
"IntervalSeconds": 60,
|
"IntervalSeconds": 60,
|
||||||
"Enabled": true
|
"Enabled": true,
|
||||||
|
"Url": "https://yts.am/rss"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Name": "SyncTgxJob",
|
"Name": "SyncTgxJob",
|
||||||
"IntervalSeconds": 60,
|
"IntervalSeconds": 60,
|
||||||
"Enabled": true
|
"Enabled": true,
|
||||||
|
"Url": "https://tgx.rs/rss"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Name": "SyncDmmJob",
|
"Name": "SyncDmmJob",
|
||||||
"IntervalSeconds": 1800,
|
"IntervalSeconds": 10800,
|
||||||
"Enabled": true
|
"Enabled": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -6,6 +6,12 @@ public abstract class BaseJsonCrawler(IHttpClientFactory httpClientFactory, ILog
|
|||||||
|
|
||||||
protected virtual async Task Execute(string collectionName)
|
protected virtual async Task Execute(string collectionName)
|
||||||
{
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(Url))
|
||||||
|
{
|
||||||
|
logger.LogWarning("No URL provided for {Source} crawl", Source);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
logger.LogInformation("Starting {Source} crawl", Source);
|
logger.LogInformation("Starting {Source} crawl", Source);
|
||||||
|
|
||||||
using var client = httpClientFactory.CreateClient("Scraper");
|
using var client = httpClientFactory.CreateClient("Scraper");
|
||||||
|
|||||||
@@ -4,6 +4,12 @@ public abstract class BaseXmlCrawler(IHttpClientFactory httpClientFactory, ILogg
|
|||||||
{
|
{
|
||||||
public override async Task Execute()
|
public override async Task Execute()
|
||||||
{
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(Url))
|
||||||
|
{
|
||||||
|
logger.LogWarning("No URL provided for {Source} crawl", Source);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
logger.LogInformation("Starting {Source} crawl", Source);
|
logger.LogInformation("Starting {Source} crawl", Source);
|
||||||
|
|
||||||
using var client = httpClientFactory.CreateClient(Literals.CrawlerClient);
|
using var client = httpClientFactory.CreateClient(Literals.CrawlerClient);
|
||||||
|
|||||||
@@ -7,4 +7,8 @@ public class Scraper
|
|||||||
public int IntervalSeconds { get; set; } = 60;
|
public int IntervalSeconds { get; set; } = 60;
|
||||||
|
|
||||||
public bool Enabled { get; set; } = true;
|
public bool Enabled { get; set; } = true;
|
||||||
|
|
||||||
|
public string? Url { get; set; }
|
||||||
|
|
||||||
|
public string? XmlNamespace { get; set; }
|
||||||
}
|
}
|
||||||
|
|||||||
70
src/producer/src/Features/Crawlers/Dmm/DMMFileDownloader.cs
Normal file
70
src/producer/src/Features/Crawlers/Dmm/DMMFileDownloader.cs
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
namespace Producer.Features.Crawlers.Dmm;
|
||||||
|
|
||||||
|
public class DMMFileDownloader(HttpClient client, ILogger<DMMFileDownloader> logger) : IDMMFileDownloader
|
||||||
|
{
|
||||||
|
private const string Filename = "main.zip";
|
||||||
|
private readonly IReadOnlyCollection<string> _filesToIgnore = [
|
||||||
|
"index.html",
|
||||||
|
"404.html",
|
||||||
|
"dedupe.sh",
|
||||||
|
"CNAME",
|
||||||
|
];
|
||||||
|
|
||||||
|
public const string ClientName = "DmmFileDownloader";
|
||||||
|
|
||||||
|
public async Task<string> DownloadFileToTempPath(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
logger.LogInformation("Downloading DMM Hashlists");
|
||||||
|
|
||||||
|
var response = await client.GetAsync(Filename, cancellationToken);
|
||||||
|
|
||||||
|
var tempDirectory = Path.Combine(Path.GetTempPath(), "DMMHashlists");
|
||||||
|
|
||||||
|
EnsureDirectoryIsClean(tempDirectory);
|
||||||
|
|
||||||
|
response.EnsureSuccessStatusCode();
|
||||||
|
|
||||||
|
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken);
|
||||||
|
using var archive = new ZipArchive(stream);
|
||||||
|
|
||||||
|
logger.LogInformation("Extracting DMM Hashlists to {TempDirectory}", tempDirectory);
|
||||||
|
|
||||||
|
foreach (var entry in archive.Entries)
|
||||||
|
{
|
||||||
|
var entryPath = Path.Combine(tempDirectory, Path.GetFileName(entry.FullName));
|
||||||
|
if (!entry.FullName.EndsWith('/')) // It's a file
|
||||||
|
{
|
||||||
|
entry.ExtractToFile(entryPath, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
foreach (var file in _filesToIgnore)
|
||||||
|
{
|
||||||
|
CleanRepoExtras(tempDirectory, file);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.LogInformation("Downloaded and extracted Repository to {TempDirectory}", tempDirectory);
|
||||||
|
|
||||||
|
return tempDirectory;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void CleanRepoExtras(string tempDirectory, string fileName)
|
||||||
|
{
|
||||||
|
var repoIndex = Path.Combine(tempDirectory, fileName);
|
||||||
|
|
||||||
|
if (File.Exists(repoIndex))
|
||||||
|
{
|
||||||
|
File.Delete(repoIndex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void EnsureDirectoryIsClean(string tempDirectory)
|
||||||
|
{
|
||||||
|
if (Directory.Exists(tempDirectory))
|
||||||
|
{
|
||||||
|
Directory.Delete(tempDirectory, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
Directory.CreateDirectory(tempDirectory);
|
||||||
|
}
|
||||||
|
}
|
||||||
6
src/producer/src/Features/Crawlers/Dmm/DMMHttpClient.cs
Normal file
6
src/producer/src/Features/Crawlers/Dmm/DMMHttpClient.cs
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
namespace Producer.Features.Crawlers.Dmm;
|
||||||
|
|
||||||
|
public class DMMHttpClient
|
||||||
|
{
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,64 +1,99 @@
|
|||||||
namespace Producer.Features.Crawlers.Dmm;
|
namespace Producer.Features.Crawlers.Dmm;
|
||||||
|
|
||||||
public partial class DebridMediaManagerCrawler(
|
public partial class DebridMediaManagerCrawler(
|
||||||
IHttpClientFactory httpClientFactory,
|
IDMMFileDownloader dmmFileDownloader,
|
||||||
ILogger<DebridMediaManagerCrawler> logger,
|
ILogger<DebridMediaManagerCrawler> logger,
|
||||||
IDataStorage storage,
|
IDataStorage storage,
|
||||||
GithubConfiguration githubConfiguration,
|
|
||||||
IRankTorrentName rankTorrentName,
|
IRankTorrentName rankTorrentName,
|
||||||
IDistributedCache cache) : BaseCrawler(logger, storage)
|
IDistributedCache cache) : BaseCrawler(logger, storage)
|
||||||
{
|
{
|
||||||
[GeneratedRegex("""<iframe src="https:\/\/debridmediamanager.com\/hashlist#(.*)"></iframe>""")]
|
[GeneratedRegex("""<iframe src="https:\/\/debridmediamanager.com\/hashlist#(.*)"></iframe>""")]
|
||||||
private static partial Regex HashCollectionMatcher();
|
private static partial Regex HashCollectionMatcher();
|
||||||
|
protected override string Url => "";
|
||||||
private const string DownloadBaseUrl = "https://raw.githubusercontent.com/debridmediamanager/hashlists/main";
|
|
||||||
protected override IReadOnlyDictionary<string, string> Mappings => new Dictionary<string, string>();
|
protected override IReadOnlyDictionary<string, string> Mappings => new Dictionary<string, string>();
|
||||||
protected override string Url => "https://api.github.com/repos/debridmediamanager/hashlists/git/trees/main?recursive=1";
|
|
||||||
protected override string Source => "DMM";
|
protected override string Source => "DMM";
|
||||||
|
|
||||||
|
private const int ParallelismCount = 4;
|
||||||
|
|
||||||
public override async Task Execute()
|
public override async Task Execute()
|
||||||
{
|
{
|
||||||
var client = httpClientFactory.CreateClient("Scraper");
|
var tempDirectory = await dmmFileDownloader.DownloadFileToTempPath(CancellationToken.None);
|
||||||
client.DefaultRequestHeaders.Authorization = new("Bearer", githubConfiguration.PAT);
|
|
||||||
client.DefaultRequestHeaders.UserAgent.ParseAdd("curl");
|
|
||||||
|
|
||||||
var jsonBody = await client.GetStringAsync(Url);
|
var files = Directory.GetFiles(tempDirectory, "*.html", SearchOption.AllDirectories);
|
||||||
|
|
||||||
var json = JsonDocument.Parse(jsonBody);
|
logger.LogInformation("Found {Files} files to parse", files.Length);
|
||||||
|
|
||||||
var entriesArray = json.RootElement.GetProperty("tree");
|
var options = new ParallelOptions { MaxDegreeOfParallelism = ParallelismCount };
|
||||||
|
|
||||||
logger.LogInformation("Found {Entries} total DMM pages", entriesArray.GetArrayLength());
|
await Parallel.ForEachAsync(files, options, async (file, token) =>
|
||||||
|
|
||||||
foreach (var entry in entriesArray.EnumerateArray())
|
|
||||||
{
|
{
|
||||||
await ParsePage(entry, client);
|
var fileName = Path.GetFileName(file);
|
||||||
}
|
var torrentDictionary = await ExtractPageContents(file, fileName);
|
||||||
|
|
||||||
|
if (torrentDictionary == null)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await ParseTitlesWithRtn(fileName, torrentDictionary);
|
||||||
|
var results = await ParseTorrents(torrentDictionary);
|
||||||
|
|
||||||
|
if (results.Count <= 0)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await InsertTorrents(results);
|
||||||
|
await Storage.MarkPageAsIngested(fileName, token);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task ParsePage(JsonElement entry, HttpClient client)
|
private async Task ParseTitlesWithRtn(string fileName, IDictionary<string, DmmContent> page)
|
||||||
{
|
{
|
||||||
var (pageIngested, name) = await IsAlreadyIngested(entry);
|
logger.LogInformation("Parsing titles for {Page}", fileName);
|
||||||
|
|
||||||
if (string.IsNullOrEmpty(name) || pageIngested)
|
var batchProcessables = page.Select(value => new RtnBatchProcessable(value.Key, value.Value.Filename)).ToList();
|
||||||
|
var parsedResponses = rankTorrentName.BatchParse(
|
||||||
|
batchProcessables.Select<RtnBatchProcessable, string>(bp => bp.Filename).ToList(), trashGarbage: false);
|
||||||
|
|
||||||
|
// Filter out unsuccessful responses and match RawTitle to requesting title
|
||||||
|
var successfulResponses = parsedResponses
|
||||||
|
.Where(response => response != null && response.Success)
|
||||||
|
.GroupBy(response => response.Response.RawTitle!)
|
||||||
|
.ToDictionary(group => group.Key, group => group.First());
|
||||||
|
|
||||||
|
var options = new ParallelOptions { MaxDegreeOfParallelism = ParallelismCount };
|
||||||
|
|
||||||
|
await Parallel.ForEachAsync(batchProcessables.Select(t => t.InfoHash), options, (infoHash, _) =>
|
||||||
{
|
{
|
||||||
return;
|
if (page.TryGetValue(infoHash, out var dmmContent) &&
|
||||||
}
|
successfulResponses.TryGetValue(dmmContent.Filename, out var parsedResponse))
|
||||||
|
{
|
||||||
var pageSource = await client.GetStringAsync($"{DownloadBaseUrl}/{name}");
|
page[infoHash] = dmmContent with {ParseResponse = parsedResponse};
|
||||||
|
}
|
||||||
await ExtractPageContents(pageSource, name);
|
|
||||||
|
return ValueTask.CompletedTask;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task ExtractPageContents(string pageSource, string name)
|
private async Task<ConcurrentDictionary<string, DmmContent>?> ExtractPageContents(string filePath, string filenameOnly)
|
||||||
{
|
{
|
||||||
|
var (pageIngested, name) = await IsAlreadyIngested(filenameOnly);
|
||||||
|
|
||||||
|
if (pageIngested)
|
||||||
|
{
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
var pageSource = await File.ReadAllTextAsync(filePath);
|
||||||
|
|
||||||
var match = HashCollectionMatcher().Match(pageSource);
|
var match = HashCollectionMatcher().Match(pageSource);
|
||||||
|
|
||||||
if (!match.Success)
|
if (!match.Success)
|
||||||
{
|
{
|
||||||
logger.LogWarning("Failed to match hash collection for {Name}", name);
|
logger.LogWarning("Failed to match hash collection for {Name}", name);
|
||||||
await Storage.MarkPageAsIngested(name);
|
await Storage.MarkPageAsIngested(filenameOnly);
|
||||||
return;
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
var encodedJson = match.Groups.Values.ElementAtOrDefault(1);
|
var encodedJson = match.Groups.Values.ElementAtOrDefault(1);
|
||||||
@@ -66,90 +101,92 @@ public partial class DebridMediaManagerCrawler(
|
|||||||
if (string.IsNullOrEmpty(encodedJson?.Value))
|
if (string.IsNullOrEmpty(encodedJson?.Value))
|
||||||
{
|
{
|
||||||
logger.LogWarning("Failed to extract encoded json for {Name}", name);
|
logger.LogWarning("Failed to extract encoded json for {Name}", name);
|
||||||
return;
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
await ProcessExtractedContentsAsTorrentCollection(encodedJson.Value, name);
|
var decodedJson = LZString.DecompressFromEncodedURIComponent(encodedJson.Value);
|
||||||
}
|
|
||||||
|
|
||||||
private async Task ProcessExtractedContentsAsTorrentCollection(string encodedJson, string name)
|
|
||||||
{
|
|
||||||
var decodedJson = LZString.DecompressFromEncodedURIComponent(encodedJson);
|
|
||||||
|
|
||||||
var json = JsonDocument.Parse(decodedJson);
|
var json = JsonDocument.Parse(decodedJson);
|
||||||
|
|
||||||
|
var torrents = await json.RootElement.EnumerateArray()
|
||||||
|
.ToAsyncEnumerable()
|
||||||
|
.Select(ParsePageContent)
|
||||||
|
.Where(t => t is not null)
|
||||||
|
.ToListAsync();
|
||||||
|
|
||||||
await InsertTorrentsForPage(json);
|
if (torrents.Count == 0)
|
||||||
|
|
||||||
var result = await Storage.MarkPageAsIngested(name);
|
|
||||||
|
|
||||||
if (!result.IsSuccess)
|
|
||||||
{
|
{
|
||||||
logger.LogWarning("Failed to mark page as ingested: [{Error}]", result.Failure.ErrorMessage);
|
logger.LogWarning("No torrents found in {Name}", name);
|
||||||
return;
|
await Storage.MarkPageAsIngested(filenameOnly);
|
||||||
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var torrentDictionary = torrents
|
||||||
|
.Where(x => x is not null)
|
||||||
|
.GroupBy(x => x.InfoHash)
|
||||||
|
.ToConcurrentDictionary(g => g.Key, g => new DmmContent(g.First().Filename, g.First().Bytes, null));
|
||||||
|
|
||||||
logger.LogInformation("Successfully marked page as ingested");
|
logger.LogInformation("Parsed {Torrents} torrents for {Name}", torrentDictionary.Count, name);
|
||||||
|
|
||||||
|
return torrentDictionary;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task<IngestedTorrent?> ParseTorrent(JsonElement item)
|
private async Task<List<IngestedTorrent>> ParseTorrents(IDictionary<string, DmmContent> page)
|
||||||
{
|
{
|
||||||
|
var ingestedTorrents = new List<IngestedTorrent>();
|
||||||
|
|
||||||
if (!item.TryGetProperty("filename", out var filenameElement) ||
|
var options = new ParallelOptions { MaxDegreeOfParallelism = ParallelismCount };
|
||||||
!item.TryGetProperty("bytes", out var bytesElement) ||
|
|
||||||
!item.TryGetProperty("hash", out var hashElement))
|
await Parallel.ForEachAsync(page, options, async (kvp, ct) =>
|
||||||
{
|
{
|
||||||
return null;
|
var (infoHash, dmmContent) = kvp;
|
||||||
}
|
var parsedTorrent = dmmContent.ParseResponse;
|
||||||
|
if (parsedTorrent is not {Success: true})
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
var torrentTitle = filenameElement.GetString();
|
var torrentType = parsedTorrent.Response.IsMovie ? "movie" : "tvSeries";
|
||||||
|
var cacheKey = GetCacheKey(torrentType, parsedTorrent.Response.ParsedTitle, parsedTorrent.Response.Year);
|
||||||
|
var (cached, cachedResult) = await CheckIfInCacheAndReturn(cacheKey);
|
||||||
|
|
||||||
if (torrentTitle.IsNullOrEmpty())
|
if (cached)
|
||||||
{
|
{
|
||||||
return null;
|
logger.LogInformation("[{ImdbId}] Found cached imdb result for {Title}", cachedResult.ImdbId, parsedTorrent.Response.ParsedTitle);
|
||||||
}
|
lock (ingestedTorrents)
|
||||||
|
{
|
||||||
var parsedTorrent = rankTorrentName.Parse(torrentTitle);
|
ingestedTorrents.Add(MapToTorrent(cachedResult, dmmContent.Bytes, infoHash, parsedTorrent));
|
||||||
|
}
|
||||||
if (!parsedTorrent.Success)
|
return;
|
||||||
{
|
}
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
var torrentType = parsedTorrent.Response.IsMovie ? "movie" : "tvSeries";
|
|
||||||
|
|
||||||
var cacheKey = GetCacheKey(torrentType, parsedTorrent.Response.ParsedTitle, parsedTorrent.Response.Year);
|
|
||||||
|
|
||||||
var (cached, cachedResult) = await CheckIfInCacheAndReturn(cacheKey);
|
|
||||||
|
|
||||||
if (cached)
|
|
||||||
{
|
|
||||||
logger.LogInformation("[{ImdbId}] Found cached imdb result for {Title}", cachedResult.ImdbId, parsedTorrent.Response.ParsedTitle);
|
|
||||||
return MapToTorrent(cachedResult, bytesElement, hashElement, parsedTorrent);
|
|
||||||
}
|
|
||||||
|
|
||||||
int? year = parsedTorrent.Response.Year != 0 ? parsedTorrent.Response.Year : null;
|
int? year = parsedTorrent.Response.Year != 0 ? parsedTorrent.Response.Year : null;
|
||||||
var imdbEntry = await Storage.FindImdbMetadata(parsedTorrent.Response.ParsedTitle, torrentType, year);
|
var imdbEntry = await Storage.FindImdbMetadata(parsedTorrent.Response.ParsedTitle, torrentType, year, ct);
|
||||||
|
|
||||||
if (imdbEntry is null)
|
if (imdbEntry is null)
|
||||||
{
|
{
|
||||||
return null;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await AddToCache(cacheKey, imdbEntry);
|
|
||||||
|
|
||||||
logger.LogInformation("[{ImdbId}] Found best match for {Title}: {BestMatch} with score {Score}", imdbEntry.ImdbId, parsedTorrent.Response.ParsedTitle, imdbEntry.Title, imdbEntry.Score);
|
|
||||||
|
|
||||||
return MapToTorrent(imdbEntry, bytesElement, hashElement, parsedTorrent);
|
await AddToCache(cacheKey, imdbEntry);
|
||||||
|
logger.LogInformation("[{ImdbId}] Found best match for {Title}: {BestMatch} with score {Score}", imdbEntry.ImdbId, parsedTorrent.Response.ParsedTitle, imdbEntry.Title, imdbEntry.Score);
|
||||||
|
lock (ingestedTorrents)
|
||||||
|
{
|
||||||
|
ingestedTorrents.Add(MapToTorrent(imdbEntry, dmmContent.Bytes, infoHash, parsedTorrent));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return ingestedTorrents;
|
||||||
}
|
}
|
||||||
|
|
||||||
private IngestedTorrent MapToTorrent(ImdbEntry result, JsonElement bytesElement, JsonElement hashElement, ParseTorrentTitleResponse parsedTorrent) =>
|
private IngestedTorrent MapToTorrent(ImdbEntry result, long size, string infoHash, ParseTorrentTitleResponse parsedTorrent) =>
|
||||||
new()
|
new()
|
||||||
{
|
{
|
||||||
Source = Source,
|
Source = Source,
|
||||||
Name = result.Title,
|
Name = result.Title,
|
||||||
Imdb = result.ImdbId,
|
Imdb = result.ImdbId,
|
||||||
Size = bytesElement.GetInt64().ToString(),
|
Size = size.ToString(),
|
||||||
InfoHash = hashElement.ToString(),
|
InfoHash = infoHash,
|
||||||
Seeders = 0,
|
Seeders = 0,
|
||||||
Leechers = 0,
|
Leechers = 0,
|
||||||
Category = AssignCategory(result),
|
Category = AssignCategory(result),
|
||||||
@@ -179,35 +216,11 @@ public partial class DebridMediaManagerCrawler(
|
|||||||
return (false, null);
|
return (false, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task InsertTorrentsForPage(JsonDocument json)
|
private async Task<(bool Success, string? Name)> IsAlreadyIngested(string filename)
|
||||||
{
|
{
|
||||||
var torrents = await json.RootElement.EnumerateArray()
|
var pageIngested = await Storage.PageIngested(filename);
|
||||||
.ToAsyncEnumerable()
|
|
||||||
.SelectAwait(async x => await ParseTorrent(x))
|
|
||||||
.Where(t => t is not null)
|
|
||||||
.ToListAsync();
|
|
||||||
|
|
||||||
if (torrents.Count == 0)
|
return (pageIngested, filename);
|
||||||
{
|
|
||||||
logger.LogWarning("No torrents found in {Source} response", Source);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await InsertTorrents(torrents!);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async Task<(bool Success, string? Name)> IsAlreadyIngested(JsonElement entry)
|
|
||||||
{
|
|
||||||
var name = entry.GetProperty("path").GetString();
|
|
||||||
|
|
||||||
if (string.IsNullOrEmpty(name))
|
|
||||||
{
|
|
||||||
return (false, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
var pageIngested = await Storage.PageIngested(name);
|
|
||||||
|
|
||||||
return (pageIngested, name);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string AssignCategory(ImdbEntry entry) =>
|
private static string AssignCategory(ImdbEntry entry) =>
|
||||||
@@ -219,4 +232,20 @@ public partial class DebridMediaManagerCrawler(
|
|||||||
};
|
};
|
||||||
|
|
||||||
private static string GetCacheKey(string category, string title, int year) => $"{category.ToLowerInvariant()}:{year}:{title.ToLowerInvariant()}";
|
private static string GetCacheKey(string category, string title, int year) => $"{category.ToLowerInvariant()}:{year}:{title.ToLowerInvariant()}";
|
||||||
|
|
||||||
|
private static ExtractedDMMContent? ParsePageContent(JsonElement item)
|
||||||
|
{
|
||||||
|
if (!item.TryGetProperty("filename", out var filenameElement) ||
|
||||||
|
!item.TryGetProperty("bytes", out var bytesElement) ||
|
||||||
|
!item.TryGetProperty("hash", out var hashElement))
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new(filenameElement.GetString(), bytesElement.GetInt64(), hashElement.GetString());
|
||||||
|
}
|
||||||
|
|
||||||
|
private record DmmContent(string Filename, long Bytes, ParseTorrentTitleResponse? ParseResponse);
|
||||||
|
private record ExtractedDMMContent(string Filename, long Bytes, string InfoHash);
|
||||||
|
private record RtnBatchProcessable(string InfoHash, string Filename);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +0,0 @@
|
|||||||
namespace Producer.Features.Crawlers.Dmm;
|
|
||||||
|
|
||||||
public class GithubConfiguration
|
|
||||||
{
|
|
||||||
private const string Prefix = "GITHUB";
|
|
||||||
private const string PatVariable = "PAT";
|
|
||||||
|
|
||||||
public string? PAT { get; init; } = Prefix.GetOptionalEnvironmentVariableAsString(PatVariable);
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
namespace Producer.Features.Crawlers.Dmm;
|
||||||
|
|
||||||
|
public interface IDMMFileDownloader
|
||||||
|
{
|
||||||
|
Task<string> DownloadFileToTempPath(CancellationToken cancellationToken);
|
||||||
|
}
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
namespace Producer.Features.Crawlers.Dmm;
|
||||||
|
|
||||||
|
public static class ServiceCollectionExtensions
|
||||||
|
{
|
||||||
|
public static IServiceCollection AddDmmSupport(this IServiceCollection services)
|
||||||
|
{
|
||||||
|
services.AddHttpClient<IDMMFileDownloader, DMMFileDownloader>(DMMFileDownloader.ClientName, client =>
|
||||||
|
{
|
||||||
|
client.BaseAddress = new("https://github.com/debridmediamanager/hashlists/zipball/main/");
|
||||||
|
client.DefaultRequestHeaders.Add("Accept-Encoding", "gzip");
|
||||||
|
client.DefaultRequestHeaders.UserAgent.ParseAdd("curl");
|
||||||
|
});
|
||||||
|
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,11 +1,10 @@
|
|||||||
namespace Producer.Features.Crawlers.EzTv;
|
namespace Producer.Features.Crawlers.EzTv;
|
||||||
|
|
||||||
public class EzTvCrawler(IHttpClientFactory httpClientFactory, ILogger<EzTvCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
public class EzTvCrawler(IHttpClientFactory httpClientFactory, ILogger<EzTvCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
||||||
{
|
{
|
||||||
protected override string Url => "https://eztv1.xyz/ezrss.xml";
|
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncEzTvJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
||||||
protected override string Source => "EZTV";
|
protected override string Source => "EZTV";
|
||||||
|
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncEzTvJob", StringComparison.OrdinalIgnoreCase))?.XmlNamespace ?? string.Empty;
|
||||||
private static readonly XNamespace XmlNamespace = "http://xmlns.ezrss.it/0.1/";
|
|
||||||
|
|
||||||
protected override IReadOnlyDictionary<string, string> Mappings =>
|
protected override IReadOnlyDictionary<string, string> Mappings =>
|
||||||
new Dictionary<string, string>
|
new Dictionary<string, string>
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
namespace Producer.Features.Crawlers.Nyaa;
|
namespace Producer.Features.Crawlers.Nyaa;
|
||||||
|
|
||||||
public class NyaaCrawler(IHttpClientFactory httpClientFactory, ILogger<NyaaCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
public class NyaaCrawler(IHttpClientFactory httpClientFactory, ILogger<NyaaCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
||||||
{
|
{
|
||||||
protected override string Url => "https://nyaa.si/?page=rss&c=1_2&f=0";
|
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncNyaaJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
||||||
protected override string Source => "Nyaa";
|
protected override string Source => "Nyaa";
|
||||||
|
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncNyaaJob", StringComparison.OrdinalIgnoreCase))?.XmlNamespace ?? string.Empty;
|
||||||
private static readonly XNamespace XmlNamespace = "https://nyaa.si/xmlns/nyaa";
|
|
||||||
|
|
||||||
protected override IReadOnlyDictionary<string, string> Mappings =>
|
protected override IReadOnlyDictionary<string, string> Mappings =>
|
||||||
new Dictionary<string, string>
|
new Dictionary<string, string>
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
namespace Producer.Features.Crawlers.Tgx;
|
namespace Producer.Features.Crawlers.Tgx;
|
||||||
|
|
||||||
public partial class TgxCrawler(IHttpClientFactory httpClientFactory, ILogger<TgxCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
public partial class TgxCrawler(IHttpClientFactory httpClientFactory, ILogger<TgxCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
||||||
{
|
{
|
||||||
[GeneratedRegex(@"Size:\s+(.+?)\s+Added")]
|
[GeneratedRegex(@"Size:\s+(.+?)\s+Added")]
|
||||||
private static partial Regex SizeStringExtractor();
|
private static partial Regex SizeStringExtractor();
|
||||||
[GeneratedRegex(@"(?i)\b(\d+(\.\d+)?)\s*([KMGT]?B)\b", RegexOptions.None, "en-GB")]
|
[GeneratedRegex(@"(?i)\b(\d+(\.\d+)?)\s*([KMGT]?B)\b", RegexOptions.None, "en-GB")]
|
||||||
private static partial Regex SizeStringParser();
|
private static partial Regex SizeStringParser();
|
||||||
|
|
||||||
protected override string Url => "https://tgx.rs/rss";
|
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncTgxJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
||||||
|
|
||||||
protected override string Source => "TorrentGalaxy";
|
protected override string Source => "TorrentGalaxy";
|
||||||
protected override IReadOnlyDictionary<string, string> Mappings
|
protected override IReadOnlyDictionary<string, string> Mappings
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
namespace Producer.Features.Crawlers.Tpb;
|
namespace Producer.Features.Crawlers.Tpb;
|
||||||
|
|
||||||
public class TpbCrawler(IHttpClientFactory httpClientFactory, ILogger<TpbCrawler> logger, IDataStorage storage) : BaseJsonCrawler(httpClientFactory, logger, storage)
|
public class TpbCrawler(IHttpClientFactory httpClientFactory, ILogger<TpbCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseJsonCrawler(httpClientFactory, logger, storage)
|
||||||
{
|
{
|
||||||
protected override string Url => "https://apibay.org/precompiled/data_top100_recent.json";
|
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncTpbJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
||||||
|
|
||||||
protected override string Source => "TPB";
|
protected override string Source => "TPB";
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
namespace Producer.Features.Crawlers.Yts;
|
namespace Producer.Features.Crawlers.Yts;
|
||||||
|
|
||||||
public class YtsCrawler(IHttpClientFactory httpClientFactory, ILogger<YtsCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
public class YtsCrawler(IHttpClientFactory httpClientFactory, ILogger<YtsCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseXmlCrawler(httpClientFactory, logger, storage)
|
||||||
{
|
{
|
||||||
protected override string Url => "https://yts.am/rss";
|
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncYtsJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
|
||||||
|
|
||||||
protected override string Source => "YTS";
|
protected override string Source => "YTS";
|
||||||
protected override IReadOnlyDictionary<string, string> Mappings
|
protected override IReadOnlyDictionary<string, string> Mappings
|
||||||
=> new Dictionary<string, string>
|
=> new Dictionary<string, string>
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ internal static class ServiceCollectionExtensions
|
|||||||
internal static IServiceCollection AddQuartz(this IServiceCollection services, IConfiguration configuration)
|
internal static IServiceCollection AddQuartz(this IServiceCollection services, IConfiguration configuration)
|
||||||
{
|
{
|
||||||
var scrapeConfiguration = services.LoadConfigurationFromConfig<ScrapeConfiguration>(configuration, ScrapeConfiguration.SectionName);
|
var scrapeConfiguration = services.LoadConfigurationFromConfig<ScrapeConfiguration>(configuration, ScrapeConfiguration.SectionName);
|
||||||
var githubConfiguration = services.LoadConfigurationFromEnv<GithubConfiguration>();
|
|
||||||
var rabbitConfiguration = services.LoadConfigurationFromEnv<RabbitMqConfiguration>();
|
var rabbitConfiguration = services.LoadConfigurationFromEnv<RabbitMqConfiguration>();
|
||||||
|
|
||||||
var jobTypes = Assembly.GetAssembly(typeof(BaseJob))
|
var jobTypes = Assembly.GetAssembly(typeof(BaseJob))
|
||||||
@@ -19,18 +18,13 @@ internal static class ServiceCollectionExtensions
|
|||||||
services.AddTransient(type);
|
services.AddTransient(type);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrEmpty(githubConfiguration.PAT))
|
|
||||||
{
|
|
||||||
services.AddTransient<SyncDmmJob>();
|
|
||||||
}
|
|
||||||
|
|
||||||
var openMethod = typeof(ServiceCollectionExtensions).GetMethod(nameof(AddJobWithTrigger), BindingFlags.NonPublic | BindingFlags.Static | BindingFlags.Instance);
|
var openMethod = typeof(ServiceCollectionExtensions).GetMethod(nameof(AddJobWithTrigger), BindingFlags.NonPublic | BindingFlags.Static | BindingFlags.Instance);
|
||||||
|
|
||||||
services.AddQuartz(
|
services.AddQuartz(
|
||||||
quartz =>
|
quartz =>
|
||||||
{
|
{
|
||||||
RegisterAutomaticRegistrationJobs(jobTypes, openMethod, quartz, scrapeConfiguration);
|
RegisterAutomaticRegistrationJobs(jobTypes, openMethod, quartz, scrapeConfiguration);
|
||||||
RegisterDmmJob(githubConfiguration, quartz, scrapeConfiguration);
|
RegisterDmmJob(quartz, scrapeConfiguration);
|
||||||
RegisterTorrentioJob(services, quartz, configuration, scrapeConfiguration);
|
RegisterTorrentioJob(services, quartz, configuration, scrapeConfiguration);
|
||||||
RegisterPublisher(quartz, rabbitConfiguration);
|
RegisterPublisher(quartz, rabbitConfiguration);
|
||||||
});
|
});
|
||||||
@@ -64,13 +58,8 @@ internal static class ServiceCollectionExtensions
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void RegisterDmmJob(GithubConfiguration githubConfiguration, IServiceCollectionQuartzConfigurator quartz, ScrapeConfiguration scrapeConfiguration)
|
private static void RegisterDmmJob(IServiceCollectionQuartzConfigurator quartz, ScrapeConfiguration scrapeConfiguration) =>
|
||||||
{
|
AddJobWithTrigger<SyncDmmJob>(quartz, SyncDmmJob.Key, SyncDmmJob.Trigger, scrapeConfiguration);
|
||||||
if (!string.IsNullOrEmpty(githubConfiguration.PAT))
|
|
||||||
{
|
|
||||||
AddJobWithTrigger<SyncDmmJob>(quartz, SyncDmmJob.Key, SyncDmmJob.Trigger, scrapeConfiguration);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void RegisterTorrentioJob(
|
private static void RegisterTorrentioJob(
|
||||||
IServiceCollection services,
|
IServiceCollection services,
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
// Global using directives
|
// Global using directives
|
||||||
|
|
||||||
|
global using System.Collections.Concurrent;
|
||||||
|
global using System.IO.Compression;
|
||||||
global using System.Reflection;
|
global using System.Reflection;
|
||||||
global using System.Text;
|
global using System.Text;
|
||||||
global using System.Text.Json;
|
global using System.Text.Json;
|
||||||
global using System.Text.RegularExpressions;
|
global using System.Text.RegularExpressions;
|
||||||
global using System.Xml.Linq;
|
global using System.Xml.Linq;
|
||||||
global using FuzzySharp;
|
|
||||||
global using FuzzySharp.Extractor;
|
|
||||||
global using FuzzySharp.PreProcess;
|
global using FuzzySharp.PreProcess;
|
||||||
global using FuzzySharp.SimilarityRatio.Scorer;
|
global using FuzzySharp.SimilarityRatio.Scorer;
|
||||||
global using FuzzySharp.SimilarityRatio.Scorer.StrategySensitive;
|
global using FuzzySharp.SimilarityRatio.Scorer.StrategySensitive;
|
||||||
|
|||||||
@@ -12,7 +12,8 @@ builder.Services
|
|||||||
.RegisterMassTransit()
|
.RegisterMassTransit()
|
||||||
.AddDataStorage()
|
.AddDataStorage()
|
||||||
.AddCrawlers()
|
.AddCrawlers()
|
||||||
|
.AddDmmSupport()
|
||||||
.AddQuartz(builder.Configuration);
|
.AddQuartz(builder.Configuration);
|
||||||
|
|
||||||
var app = builder.Build();
|
var app = builder.Build();
|
||||||
app.Run();
|
app.Run();
|
||||||
@@ -1 +1 @@
|
|||||||
rank-torrent-name==0.2.5
|
rank-torrent-name==0.2.13
|
||||||
@@ -44,6 +44,7 @@ public static class ServiceCollectionExtensions
|
|||||||
{
|
{
|
||||||
var rabbitConfiguration = services.LoadConfigurationFromEnv<RabbitMqConfiguration>();
|
var rabbitConfiguration = services.LoadConfigurationFromEnv<RabbitMqConfiguration>();
|
||||||
var redisConfiguration = services.LoadConfigurationFromEnv<RedisConfiguration>();
|
var redisConfiguration = services.LoadConfigurationFromEnv<RedisConfiguration>();
|
||||||
|
var qbitConfiguration = services.LoadConfigurationFromEnv<QbitConfiguration>();
|
||||||
|
|
||||||
services.AddStackExchangeRedisCache(
|
services.AddStackExchangeRedisCache(
|
||||||
option =>
|
option =>
|
||||||
@@ -80,8 +81,8 @@ public static class ServiceCollectionExtensions
|
|||||||
e.ConfigureConsumer<WriteQbitMetadataConsumer>(context);
|
e.ConfigureConsumer<WriteQbitMetadataConsumer>(context);
|
||||||
e.ConfigureConsumer<PerformQbitMetadataRequestConsumer>(context);
|
e.ConfigureConsumer<PerformQbitMetadataRequestConsumer>(context);
|
||||||
e.ConfigureSaga<QbitMetadataSagaState>(context);
|
e.ConfigureSaga<QbitMetadataSagaState>(context);
|
||||||
e.ConcurrentMessageLimit = 5;
|
e.ConcurrentMessageLimit = qbitConfiguration.Concurrency;
|
||||||
e.PrefetchCount = 5;
|
e.PrefetchCount = qbitConfiguration.Concurrency;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -98,7 +99,7 @@ public static class ServiceCollectionExtensions
|
|||||||
cfg.UseTimeout(
|
cfg.UseTimeout(
|
||||||
timeout =>
|
timeout =>
|
||||||
{
|
{
|
||||||
timeout.Timeout = TimeSpan.FromMinutes(1);
|
timeout.Timeout = TimeSpan.FromMinutes(3);
|
||||||
});
|
});
|
||||||
})
|
})
|
||||||
.RedisRepository(redisConfiguration.ConnectionString, options =>
|
.RedisRepository(redisConfiguration.ConnectionString, options =>
|
||||||
@@ -110,7 +111,7 @@ public static class ServiceCollectionExtensions
|
|||||||
{
|
{
|
||||||
var qbitConfiguration = services.LoadConfigurationFromEnv<QbitConfiguration>();
|
var qbitConfiguration = services.LoadConfigurationFromEnv<QbitConfiguration>();
|
||||||
var client = new QBittorrentClient(new(qbitConfiguration.Host));
|
var client = new QBittorrentClient(new(qbitConfiguration.Host));
|
||||||
client.Timeout = TimeSpan.FromSeconds(10);
|
client.Timeout = TimeSpan.FromSeconds(20);
|
||||||
|
|
||||||
services.AddSingleton<IQBittorrentClient>(client);
|
services.AddSingleton<IQBittorrentClient>(client);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
namespace QBitCollector.Features.Qbit;
|
namespace QBitCollector.Features.Qbit;
|
||||||
|
|
||||||
public class QbitRequestProcessor(IQBittorrentClient client, ITrackersService trackersService, ILogger<QbitRequestProcessor> logger)
|
public class QbitRequestProcessor(IQBittorrentClient client, ITrackersService trackersService, ILogger<QbitRequestProcessor> logger, QbitConfiguration configuration)
|
||||||
{
|
{
|
||||||
public async Task<IReadOnlyList<TorrentContent>?> ProcessAsync(string infoHash, CancellationToken cancellationToken = default)
|
public async Task<IReadOnlyList<TorrentContent>?> ProcessAsync(string infoHash, CancellationToken cancellationToken = default)
|
||||||
{
|
{
|
||||||
@@ -14,7 +14,7 @@ public class QbitRequestProcessor(IQBittorrentClient client, ITrackersService tr
|
|||||||
|
|
||||||
using var timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
using var timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||||
|
|
||||||
timeoutCts.CancelAfter(TimeSpan.FromSeconds(30));
|
timeoutCts.CancelAfter(TimeSpan.FromSeconds(60));
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
@@ -30,7 +30,7 @@ public class QbitRequestProcessor(IQBittorrentClient client, ITrackersService tr
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
await Task.Delay(TimeSpan.FromSeconds(1), timeoutCts.Token);
|
await Task.Delay(TimeSpan.FromMilliseconds(200), timeoutCts.Token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (OperationCanceledException) when (timeoutCts.IsCancellationRequested)
|
catch (OperationCanceledException) when (timeoutCts.IsCancellationRequested)
|
||||||
|
|||||||
@@ -5,7 +5,10 @@ public class QbitConfiguration
|
|||||||
private const string Prefix = "QBIT";
|
private const string Prefix = "QBIT";
|
||||||
private const string HOST_VARIABLE = "HOST";
|
private const string HOST_VARIABLE = "HOST";
|
||||||
private const string TRACKERS_URL_VARIABLE = "TRACKERS_URL";
|
private const string TRACKERS_URL_VARIABLE = "TRACKERS_URL";
|
||||||
|
private const string CONCURRENCY_VARIABLE = "CONCURRENCY";
|
||||||
|
|
||||||
public string? Host { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(HOST_VARIABLE);
|
public string? Host { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(HOST_VARIABLE);
|
||||||
public string? TrackersUrl { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(TRACKERS_URL_VARIABLE);
|
public string? TrackersUrl { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(TRACKERS_URL_VARIABLE);
|
||||||
|
|
||||||
|
public int Concurrency { get; init; } = Prefix.GetEnvironmentVariableAsInt(CONCURRENCY_VARIABLE, 8);
|
||||||
}
|
}
|
||||||
@@ -5,6 +5,12 @@ public class WriteQbitMetadataConsumer(IRankTorrentName rankTorrentName, IDataSt
|
|||||||
public async Task Consume(ConsumeContext<WriteQbitMetadata> context)
|
public async Task Consume(ConsumeContext<WriteQbitMetadata> context)
|
||||||
{
|
{
|
||||||
var request = context.Message;
|
var request = context.Message;
|
||||||
|
|
||||||
|
if (request.Metadata.Metadata.Count == 0)
|
||||||
|
{
|
||||||
|
await context.Publish(new QbitMetadataWritten(request.Metadata, false));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
var torrentFiles = QbitMetaToTorrentMeta.MapMetadataToFilesCollection(
|
var torrentFiles = QbitMetaToTorrentMeta.MapMetadataToFilesCollection(
|
||||||
rankTorrentName, request.Torrent, request.ImdbId, request.Metadata.Metadata, logger);
|
rankTorrentName, request.Torrent, request.ImdbId, request.Metadata.Metadata, logger);
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
rank-torrent-name==0.2.5
|
rank-torrent-name==0.2.13
|
||||||
@@ -152,7 +152,8 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
|
|||||||
INSERT INTO files
|
INSERT INTO files
|
||||||
("infoHash", "fileIndex", title, "size", "imdbId", "imdbSeason", "imdbEpisode", "kitsuId", "kitsuEpisode", "createdAt", "updatedAt")
|
("infoHash", "fileIndex", title, "size", "imdbId", "imdbSeason", "imdbEpisode", "kitsuId", "kitsuEpisode", "createdAt", "updatedAt")
|
||||||
VALUES
|
VALUES
|
||||||
(@InfoHash, @FileIndex, @Title, @Size, @ImdbId, @ImdbSeason, @ImdbEpisode, @KitsuId, @KitsuEpisode, Now(), Now());
|
(@InfoHash, @FileIndex, @Title, @Size, @ImdbId, @ImdbSeason, @ImdbEpisode, @KitsuId, @KitsuEpisode, Now(), Now())
|
||||||
|
ON CONFLICT ("infoHash", "fileIndex") DO NOTHING;
|
||||||
""";
|
""";
|
||||||
|
|
||||||
await connection.ExecuteAsync(query, files);
|
await connection.ExecuteAsync(query, files);
|
||||||
@@ -167,7 +168,8 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
|
|||||||
INSERT INTO subtitles
|
INSERT INTO subtitles
|
||||||
("infoHash", "fileIndex", "fileId", "title")
|
("infoHash", "fileIndex", "fileId", "title")
|
||||||
VALUES
|
VALUES
|
||||||
(@InfoHash, @FileIndex, @FileId, @Title);
|
(@InfoHash, @FileIndex, @FileId, @Title)
|
||||||
|
ON CONFLICT ("infoHash", "fileIndex") DO NOTHING;
|
||||||
""";
|
""";
|
||||||
|
|
||||||
await connection.ExecuteAsync(query, subtitles);
|
await connection.ExecuteAsync(query, subtitles);
|
||||||
|
|||||||
19
src/shared/Extensions/DictionaryExtensions.cs
Normal file
19
src/shared/Extensions/DictionaryExtensions.cs
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
namespace SharedContracts.Extensions;
|
||||||
|
|
||||||
|
public static class DictionaryExtensions
|
||||||
|
{
|
||||||
|
public static ConcurrentDictionary<TKey, TValue> ToConcurrentDictionary<TSource, TKey, TValue>(
|
||||||
|
this IEnumerable<TSource> source,
|
||||||
|
Func<TSource, TKey> keySelector,
|
||||||
|
Func<TSource, TValue> valueSelector) where TKey : notnull
|
||||||
|
{
|
||||||
|
var concurrentDictionary = new ConcurrentDictionary<TKey, TValue>();
|
||||||
|
|
||||||
|
foreach (var element in source)
|
||||||
|
{
|
||||||
|
concurrentDictionary.TryAdd(keySelector(element), valueSelector(element));
|
||||||
|
}
|
||||||
|
|
||||||
|
return concurrentDictionary;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
// Global using directives
|
// Global using directives
|
||||||
|
|
||||||
|
global using System.Collections.Concurrent;
|
||||||
global using System.Text.Json;
|
global using System.Text.Json;
|
||||||
global using System.Text.Json.Serialization;
|
global using System.Text.Json.Serialization;
|
||||||
global using System.Text.RegularExpressions;
|
global using System.Text.RegularExpressions;
|
||||||
|
|||||||
@@ -2,5 +2,6 @@ namespace SharedContracts.Python.RTN;
|
|||||||
|
|
||||||
public interface IRankTorrentName
|
public interface IRankTorrentName
|
||||||
{
|
{
|
||||||
ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true);
|
ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true, bool logErrors = false, bool throwOnErrors = false);
|
||||||
|
List<ParseTorrentTitleResponse?> BatchParse(IReadOnlyCollection<string> titles, int chunkSize = 500, int workers = 20, bool trashGarbage = true, bool logErrors = false, bool throwOnErrors = false);
|
||||||
}
|
}
|
||||||
@@ -12,41 +12,102 @@ public class RankTorrentName : IRankTorrentName
|
|||||||
_pythonEngineService = pythonEngineService;
|
_pythonEngineService = pythonEngineService;
|
||||||
InitModules();
|
InitModules();
|
||||||
}
|
}
|
||||||
|
|
||||||
public ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true) =>
|
|
||||||
_pythonEngineService.ExecutePythonOperationWithDefault(
|
|
||||||
() =>
|
|
||||||
{
|
|
||||||
var result = _rtn?.parse(title, trashGarbage);
|
|
||||||
return ParseResult(result);
|
|
||||||
}, new ParseTorrentTitleResponse(false, null), nameof(Parse), throwOnErrors: false, logErrors: false);
|
|
||||||
|
|
||||||
private static ParseTorrentTitleResponse ParseResult(dynamic result)
|
public ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true, bool logErrors = false, bool throwOnErrors = false)
|
||||||
{
|
{
|
||||||
if (result == null)
|
try
|
||||||
|
{
|
||||||
|
using var gil = Py.GIL();
|
||||||
|
var result = _rtn?.parse(title, trashGarbage);
|
||||||
|
return ParseResult(result);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
if (logErrors)
|
||||||
|
{
|
||||||
|
_pythonEngineService.Logger.LogError(ex, "Python Error: {Message} ({OperationName})", ex.Message, nameof(Parse));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (throwOnErrors)
|
||||||
|
{
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new(false, null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<ParseTorrentTitleResponse?> BatchParse(IReadOnlyCollection<string> titles, int chunkSize = 500, int workers = 20, bool trashGarbage = true, bool logErrors = false, bool throwOnErrors = false)
|
||||||
|
{
|
||||||
|
var responses = new List<ParseTorrentTitleResponse?>();
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if (titles.Count == 0)
|
||||||
|
{
|
||||||
|
return responses;
|
||||||
|
}
|
||||||
|
|
||||||
|
using var gil = Py.GIL();
|
||||||
|
var pythonList = new PyList(titles.Select(x => new PyString(x).As<PyObject>()).ToArray());
|
||||||
|
PyList results = _rtn?.batch_parse(pythonList, trashGarbage, chunkSize, workers);
|
||||||
|
|
||||||
|
if (results == null)
|
||||||
|
{
|
||||||
|
return responses;
|
||||||
|
}
|
||||||
|
|
||||||
|
responses.AddRange(results.Select(ParseResult));
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
if (logErrors)
|
||||||
|
{
|
||||||
|
_pythonEngineService.Logger.LogError(ex, "Python Error: {Message} ({OperationName})", ex.Message, nameof(Parse));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (throwOnErrors)
|
||||||
|
{
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return responses;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ParseTorrentTitleResponse? ParseResult(dynamic result)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if (result == null)
|
||||||
|
{
|
||||||
|
return new(false, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
var json = result.model_dump_json()?.As<string?>();
|
||||||
|
|
||||||
|
if (json is null || string.IsNullOrEmpty(json))
|
||||||
|
{
|
||||||
|
return new(false, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
var mediaType = result.GetAttr("type")?.As<string>();
|
||||||
|
|
||||||
|
if (string.IsNullOrEmpty(mediaType))
|
||||||
|
{
|
||||||
|
return new(false, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
var response = JsonSerializer.Deserialize<RtnResponse>(json);
|
||||||
|
|
||||||
|
response.IsMovie = mediaType.Equals("movie", StringComparison.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
return new(true, response);
|
||||||
|
}
|
||||||
|
catch
|
||||||
{
|
{
|
||||||
return new(false, null);
|
return new(false, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
var json = result.model_dump_json()?.As<string?>();
|
|
||||||
|
|
||||||
if (json is null || string.IsNullOrEmpty(json))
|
|
||||||
{
|
|
||||||
return new(false, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
var mediaType = result.GetAttr("type")?.As<string>();
|
|
||||||
|
|
||||||
if (string.IsNullOrEmpty(mediaType))
|
|
||||||
{
|
|
||||||
return new(false, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
var response = JsonSerializer.Deserialize<RtnResponse>(json);
|
|
||||||
|
|
||||||
response.IsMovie = mediaType.Equals("movie", StringComparison.OrdinalIgnoreCase);
|
|
||||||
|
|
||||||
return new(true, response);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void InitModules() =>
|
private void InitModules() =>
|
||||||
|
|||||||
Reference in New Issue
Block a user