10 Commits

Author SHA1 Message Date
purple_emily
2e65ff9276 Pull changes from Torrentio 2024-04-01 17:20:33 +01:00
iPromKnight
684dbba2f0 RTN-025 and title category parsing (#195)
* update rtn to 025

* Implement movie / show type parsing

* switch to RTN in collectors

* ensure env for pythonnet is loaded, and that requirements copy for qbit

* version bump
2024-03-31 22:01:09 +01:00
iPromKnight
c75ecd2707 add qbit housekeeping service to remove stale torrents (#193)
* Add housekeeping service to clean stale torrents

* version bump
2024-03-30 11:52:23 +00:00
iPromKnight
c493ef3376 Hotfix category, and roll back RTN to 0.1.8 (#192)
* Hotfix categories

Also roll back RTN to 0.1.8 as regression introduced in 0.2

* bump version
2024-03-30 04:47:36 +00:00
iPromKnight
655a39e35c patch the query with execute (#191) 2024-03-30 01:54:06 +00:00
iPromKnight
cfeee62f6b patch ratio (#190)
* add configurable threshold, default 0.95

* version bump
2024-03-30 01:43:21 +00:00
iPromKnight
c6d4c06d70 hotfix categories from imdb result instead (#189)
* category mapping from imdb

* version bump
2024-03-30 01:26:02 +00:00
iPromKnight
08639a3254 Patch isMovie (#188)
* fix is movie

* version bump
2024-03-30 00:28:35 +00:00
iPromKnight
d430850749 Patch message contract names (#187)
* ensure unique message contract names per collector type

* version bump
2024-03-30 00:09:13 +00:00
iPromKnight
82c0ea459b change qbittorrent settings (#186) 2024-03-29 23:35:27 +00:00
52 changed files with 1472 additions and 1245 deletions

4
.gitignore vendored
View File

@@ -612,3 +612,7 @@ fabric.properties
# Mac directory indexes
.DS_Store
deployment/docker/stack.env
src/producer/src/python/
src/debrid-collector/python/
src/qbit-collector/python/

View File

@@ -12,8 +12,11 @@ enabled=false
program=
[BitTorrent]
Session\AnonymousModeEnabled=true
Session\BTProtocol=TCP
Session\DefaultSavePath=/downloads/
Session\ExcludedFileNames=
Session\MaxActiveCheckingTorrents=5
Session\MaxActiveDownloads=10
Session\MaxActiveTorrents=50
Session\MaxActiveUploads=50
@@ -50,9 +53,10 @@ MailNotification\req_auth=true
WebUI\Address=*
WebUI\AuthSubnetWhitelist=0.0.0.0/0
WebUI\AuthSubnetWhitelistEnabled=true
WebUI\HostHeaderValidation=false
WebUI\LocalHostAuth=false
WebUI\ServerDomains=*
[RSS]
AutoDownloader\DownloadRepacks=true
AutoDownloader\SmartEpisodeFilter=s(\\d+)e(\\d+), (\\d+)x(\\d+), "(\\d{4}[.\\-]\\d{1,2}[.\\-]\\d{1,2})", "(\\d{1,2}[.\\-]\\d{1,2}[.\\-]\\d{4})"
AutoDownloader\SmartEpisodeFilter=s(\\d+)e(\\d+), (\\d+)x(\\d+), "(\\d{4}[.\\-]\\d{1,2}[.\\-]\\d{1,2})", "(\\d{1,2}[.\\-]\\d{1,2}[.\\-]\\d{4})"

View File

@@ -94,7 +94,7 @@ services:
condition: service_healthy
env_file: stack.env
hostname: knightcrawler-addon
image: gabisonfire/knightcrawler-addon:2.0.10
image: gabisonfire/knightcrawler-addon:2.0.18
labels:
logging: promtail
networks:
@@ -117,7 +117,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-consumer:2.0.10
image: gabisonfire/knightcrawler-consumer:2.0.18
labels:
logging: promtail
networks:
@@ -138,7 +138,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-debrid-collector:2.0.10
image: gabisonfire/knightcrawler-debrid-collector:2.0.18
labels:
logging: promtail
networks:
@@ -152,7 +152,7 @@ services:
migrator:
condition: service_completed_successfully
env_file: stack.env
image: gabisonfire/knightcrawler-metadata:2.0.10
image: gabisonfire/knightcrawler-metadata:2.0.18
networks:
- knightcrawler-network
restart: "no"
@@ -163,7 +163,7 @@ services:
postgres:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-migrator:2.0.10
image: gabisonfire/knightcrawler-migrator:2.0.18
networks:
- knightcrawler-network
restart: "no"
@@ -182,7 +182,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-producer:2.0.10
image: gabisonfire/knightcrawler-producer:2.0.18
labels:
logging: promtail
networks:
@@ -207,7 +207,7 @@ services:
deploy:
replicas: ${QBIT_REPLICAS:-0}
env_file: stack.env
image: gabisonfire/knightcrawler-qbit-collector:2.0.10
image: gabisonfire/knightcrawler-qbit-collector:2.0.18
labels:
logging: promtail
networks:

View File

@@ -20,7 +20,7 @@ x-depends: &knightcrawler-app-depends
services:
metadata:
image: gabisonfire/knightcrawler-metadata:2.0.10
image: gabisonfire/knightcrawler-metadata:2.0.18
env_file: ../../.env
networks:
- knightcrawler-network
@@ -30,7 +30,7 @@ services:
condition: service_completed_successfully
migrator:
image: gabisonfire/knightcrawler-migrator:2.0.10
image: gabisonfire/knightcrawler-migrator:2.0.18
env_file: ../../.env
networks:
- knightcrawler-network
@@ -40,7 +40,7 @@ services:
condition: service_healthy
addon:
image: gabisonfire/knightcrawler-addon:2.0.10
image: gabisonfire/knightcrawler-addon:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
hostname: knightcrawler-addon
@@ -48,22 +48,22 @@ services:
- "7000:7000"
consumer:
image: gabisonfire/knightcrawler-consumer:2.0.10
image: gabisonfire/knightcrawler-consumer:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
debridcollector:
image: gabisonfire/knightcrawler-debrid-collector:2.0.10
image: gabisonfire/knightcrawler-debrid-collector:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
producer:
image: gabisonfire/knightcrawler-producer:2.0.10
image: gabisonfire/knightcrawler-producer:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
qbitcollector:
image: gabisonfire/knightcrawler-qbit-collector:2.0.10
image: gabisonfire/knightcrawler-qbit-collector:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
depends_on:

View File

@@ -9,187 +9,187 @@ const KEY = 'alldebrid';
const AGENT = 'knightcrawler';
export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
const hashes = streams.map(stream => stream.infoHash);
const available = await AD.magnet.instant(hashes)
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn(`Failed AllDebrid cached [${hashes[0]}] torrent availability request:`, error);
return undefined;
});
return available?.data?.magnets && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available.data.magnets.find(magnet => stream.infoHash === magnet.hash.toLowerCase());
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: cachedEntry?.instant
}
return mochStreams;
}, {})
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
const hashes = streams.map(stream => stream.infoHash);
const available = await AD.magnet.instant(hashes)
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn(`Failed AllDebrid cached [${hashes[0]}] torrent availability request:`, error);
return undefined;
});
return available?.data?.magnets && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available.data.magnets.find(magnet => stream.infoHash === magnet.hash.toLowerCase());
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: cachedEntry?.instant
}
return mochStreams;
}, {})
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
return AD.magnet.status()
.then(response => response.data.magnets)
.then(torrents => (torrents || [])
.filter(torrent => torrent && statusReady(torrent.statusCode))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename
})));
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
return AD.magnet.status()
.then(response => response.data.magnets)
.then(torrents => (torrents || [])
.filter(torrent => torrent && statusReady(torrent.statusCode))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename
})));
}
export async function getItemMeta(itemId, apiKey) {
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
return AD.magnet.status(itemId)
.then(response => response.data.magnets)
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename,
infoHash: torrent.hash.toLowerCase(),
videos: torrent.links
.filter(file => isVideo(file.filename))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${index}`,
title: file.filename,
released: new Date(torrent.uploadDate * 1000 - index).toISOString(),
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/${encodeURIComponent(file.filename)}/${index}` }]
}))
}))
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
return AD.magnet.status(itemId)
.then(response => response.data.magnets)
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename,
infoHash: torrent.hash.toLowerCase(),
videos: torrent.links
.filter(file => isVideo(file.filename))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${index}`,
title: file.filename,
released: new Date(torrent.uploadDate * 1000 - index).toISOString(),
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/${encodeURIComponent(file.filename)}/${index}` }]
}))
}))
}
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
console.log(`Unrestricting AllDebrid ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const AD = new AllDebridClient(apiKey, options);
console.log(`Unrestricting AllDebrid ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const AD = new AllDebridClient(apiKey, options);
return _resolve(AD, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to AllDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
} else if (error.code === 'MAGNET_TOO_MANY') {
console.log(`Deleting and retrying adding to AllDebrid ${infoHash} [${fileIndex}]...`);
return _deleteAndRetry(AD, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(error)}`);
});
return _resolve(AD, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to AllDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
} else if (error.code === 'MAGNET_TOO_MANY') {
console.log(`Deleting and retrying adding to AllDebrid ${infoHash} [${fileIndex}]...`);
return _deleteAndRetry(AD, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolve(AD, infoHash, cachedEntryInfo, fileIndex) {
const torrent = await _createOrFindTorrent(AD, infoHash);
if (torrent && statusReady(torrent.statusCode)) {
return _unrestrictLink(AD, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent.statusCode)) {
console.log(`Downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusHandledError(torrent.statusCode)) {
console.log(`Retrying downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(AD, infoHash, cachedEntryInfo, fileIndex);
}
const torrent = await _createOrFindTorrent(AD, infoHash);
if (torrent && statusReady(torrent.statusCode)) {
return _unrestrictLink(AD, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent.statusCode)) {
console.log(`Downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusHandledError(torrent.statusCode)) {
console.log(`Retrying downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(AD, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(torrent)}`);
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(torrent)}`);
}
async function _createOrFindTorrent(AD, infoHash) {
return _findTorrent(AD, infoHash)
.catch(() => _createTorrent(AD, infoHash));
return _findTorrent(AD, infoHash)
.catch(() => _createTorrent(AD, infoHash));
}
async function _retryCreateTorrent(AD, infoHash, encodedFileName, fileIndex) {
const newTorrent = await _createTorrent(AD, infoHash);
return newTorrent && statusReady(newTorrent.statusCode)
? _unrestrictLink(AD, newTorrent, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
const newTorrent = await _createTorrent(AD, infoHash);
return newTorrent && statusReady(newTorrent.statusCode)
? _unrestrictLink(AD, newTorrent, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
async function _deleteAndRetry(AD, infoHash, encodedFileName, fileIndex) {
const torrents = await AD.magnet.status().then(response => response.data.magnets);
const lastTorrent = torrents[torrents.length - 1];
return AD.magnet.delete(lastTorrent.id)
.then(() => _retryCreateTorrent(AD, infoHash, encodedFileName, fileIndex));
const torrents = await AD.magnet.status().then(response => response.data.magnets);
const lastTorrent = torrents[torrents.length - 1];
return AD.magnet.delete(lastTorrent.id)
.then(() => _retryCreateTorrent(AD, infoHash, encodedFileName, fileIndex));
}
async function _findTorrent(AD, infoHash) {
const torrents = await AD.magnet.status().then(response => response.data.magnets);
const foundTorrents = torrents.filter(torrent => torrent.hash.toLowerCase() === infoHash);
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
const torrents = await AD.magnet.status().then(response => response.data.magnets);
const foundTorrents = torrents.filter(torrent => torrent.hash.toLowerCase() === infoHash);
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
}
async function _createTorrent(AD, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
const uploadResponse = await AD.magnet.upload(magnetLink);
const torrentId = uploadResponse.data.magnets[0].id;
return AD.magnet.status(torrentId).then(statusResponse => statusResponse.data.magnets);
const magnetLink = await getMagnetLink(infoHash);
const uploadResponse = await AD.magnet.upload(magnetLink);
const torrentId = uploadResponse.data.magnets[0].id;
return AD.magnet.status(torrentId).then(statusResponse => statusResponse.data.magnets);
}
async function _unrestrictLink(AD, torrent, encodedFileName, fileIndex) {
const targetFileName = decodeURIComponent(encodedFileName);
const videos = torrent.links.filter(link => isVideo(link.filename));
const targetVideo = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(targetFileName, video.filename))
: videos.sort((a, b) => b.size - a.size)[0];
const targetFileName = decodeURIComponent(encodedFileName);
const videos = torrent.links.filter(link => isVideo(link.filename)).sort((a, b) => b.size - a.size);
const targetVideo = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(targetFileName, video.filename))
: videos[0];
if (!targetVideo && torrent.links.every(link => isArchive(link.filename))) {
console.log(`Only AllDebrid archive is available for [${torrent.hash}] ${encodedFileName}`)
return StaticResponse.FAILED_RAR;
}
if (!targetVideo || !targetVideo.link || !targetVideo.link.length) {
return Promise.reject(`No AllDebrid links found for [${torrent.hash}] ${encodedFileName}`);
}
const unrestrictedLink = await AD.link.unlock(targetVideo.link).then(response => response.data.link);
console.log(`Unrestricted AllDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
if (!targetVideo && torrent.links.every(link => isArchive(link.filename))) {
console.log(`Only AllDebrid archive is available for [${torrent.hash}] ${encodedFileName}`)
return StaticResponse.FAILED_RAR;
}
if (!targetVideo || !targetVideo.link || !targetVideo.link.length) {
return Promise.reject(`No AllDebrid links found for [${torrent.hash}] ${encodedFileName}`);
}
const unrestrictedLink = await AD.link.unlock(targetVideo.link).then(response => response.data.link);
console.log(`Unrestricted AllDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
}
async function getDefaultOptions(ip) {
return { base_agent: AGENT, timeout: 10000 };
return { base_agent: AGENT, timeout: 10000 };
}
export function toCommonError(error) {
if (error && error.code === 'AUTH_BAD_APIKEY') {
return BadTokenError;
}
if (error && error.code === 'AUTH_USER_BANNED') {
return AccessDeniedError;
}
return undefined;
if (error && error.code === 'AUTH_BAD_APIKEY') {
return BadTokenError;
}
if (error && error.code === 'AUTH_USER_BANNED') {
return AccessDeniedError;
}
return undefined;
}
function statusError(statusCode) {
return [5, 6, 7, 8, 9, 10, 11].includes(statusCode);
return [5, 6, 7, 8, 9, 10, 11].includes(statusCode);
}
function statusHandledError(statusCode) {
return [5, 7, 9, 10, 11].includes(statusCode);
return [5, 7, 9, 10, 11].includes(statusCode);
}
function statusDownloading(statusCode) {
return [0, 1, 2, 3].includes(statusCode);
return [0, 1, 2, 3].includes(statusCode);
}
function statusReady(statusCode) {
return statusCode === 4;
return statusCode === 4;
}
function errorExpiredSubscriptionError(error) {
return ['AUTH_BAD_APIKEY', 'MUST_BE_PREMIUM', 'MAGNET_MUST_BE_PREMIUM', 'FREE_TRIAL_LIMIT_REACHED', 'AUTH_USER_BANNED']
.includes(error.code);
return ['AUTH_BAD_APIKEY', 'MUST_BE_PREMIUM', 'MAGNET_MUST_BE_PREMIUM', 'FREE_TRIAL_LIMIT_REACHED', 'AUTH_USER_BANNED']
.includes(error.code);
}

View File

@@ -8,148 +8,148 @@ import StaticResponse from './static.js';
const KEY = 'debridlink';
export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions();
const DL = new DebridLinkClient(apiKey, options);
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 50)
.map(batch => batch.join(','));
const available = await Promise.all(hashBatches.map(hashes => DL.seedbox.cached(hashes)))
.then(results => results.map(result => result.value))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed DebridLink cached torrent availability request:', error);
return undefined;
});
return available && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available[stream.infoHash];
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
cached: !!cachedEntry
};
return mochStreams;
}, {})
const options = await getDefaultOptions();
const DL = new DebridLinkClient(apiKey, options);
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 50)
.map(batch => batch.join(','));
const available = await Promise.all(hashBatches.map(hashes => DL.seedbox.cached(hashes)))
.then(results => results.map(result => result.value))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed DebridLink cached torrent availability request:', error);
return undefined;
});
return available && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available[stream.infoHash];
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
cached: !!cachedEntry
};
return mochStreams;
}, {})
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const DL = new DebridLinkClient(apiKey, options);
return DL.seedbox.list()
.then(response => response.value)
.then(torrents => (torrents || [])
.filter(torrent => torrent && statusReady(torrent))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name
})));
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const DL = new DebridLinkClient(apiKey, options);
return DL.seedbox.list()
.then(response => response.value)
.then(torrents => (torrents || [])
.filter(torrent => torrent && statusReady(torrent))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name
})));
}
export async function getItemMeta(itemId, apiKey, ip) {
const options = await getDefaultOptions(ip);
const DL = new DebridLinkClient(apiKey, options);
return DL.seedbox.list(itemId)
.then(response => response.value[0])
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name,
infoHash: torrent.hashString.toLowerCase(),
videos: torrent.files
.filter(file => isVideo(file.name))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${index}`,
title: file.name,
released: new Date(torrent.created * 1000 - index).toISOString(),
streams: [{ url: file.downloadUrl }]
}))
}))
const options = await getDefaultOptions(ip);
const DL = new DebridLinkClient(apiKey, options);
return DL.seedbox.list(itemId)
.then(response => response.value[0])
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name,
infoHash: torrent.hashString.toLowerCase(),
videos: torrent.files
.filter(file => isVideo(file.name))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${index}`,
title: file.name,
released: new Date(torrent.created * 1000 - index).toISOString(),
streams: [{ url: file.downloadUrl }]
}))
}))
}
export async function resolve({ ip, apiKey, infoHash, fileIndex }) {
console.log(`Unrestricting DebridLink ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const DL = new DebridLinkClient(apiKey, options);
console.log(`Unrestricting DebridLink ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const DL = new DebridLinkClient(apiKey, options);
return _resolve(DL, infoHash, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to DebridLink ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(error)}`);
});
return _resolve(DL, infoHash, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to DebridLink ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolve(DL, infoHash, fileIndex) {
const torrent = await _createOrFindTorrent(DL, infoHash);
if (torrent && statusReady(torrent)) {
return _unrestrictLink(DL, torrent, fileIndex);
} else if (torrent && statusDownloading(torrent)) {
console.log(`Downloading to DebridLink ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
}
const torrent = await _createOrFindTorrent(DL, infoHash);
if (torrent && statusReady(torrent)) {
return _unrestrictLink(DL, torrent, fileIndex);
} else if (torrent && statusDownloading(torrent)) {
console.log(`Downloading to DebridLink ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
}
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(torrent)}`);
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(torrent)}`);
}
async function _createOrFindTorrent(DL, infoHash) {
return _findTorrent(DL, infoHash)
.catch(() => _createTorrent(DL, infoHash));
return _findTorrent(DL, infoHash)
.catch(() => _createTorrent(DL, infoHash));
}
async function _findTorrent(DL, infoHash) {
const torrents = await DL.seedbox.list().then(response => response.value);
const foundTorrents = torrents.filter(torrent => torrent.hashString.toLowerCase() === infoHash);
return foundTorrents[0] || Promise.reject('No recent torrent found');
const torrents = await DL.seedbox.list().then(response => response.value);
const foundTorrents = torrents.filter(torrent => torrent.hashString.toLowerCase() === infoHash);
return foundTorrents[0] || Promise.reject('No recent torrent found');
}
async function _createTorrent(DL, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
const uploadResponse = await DL.seedbox.add(magnetLink, null, true);
return uploadResponse.value;
const magnetLink = await getMagnetLink(infoHash);
const uploadResponse = await DL.seedbox.add(magnetLink, null, true);
return uploadResponse.value;
}
async function _unrestrictLink(DL, torrent, fileIndex) {
const targetFile = Number.isInteger(fileIndex)
? torrent.files[fileIndex]
: torrent.files.filter(file => file.downloadPercent === 100).sort((a, b) => b.size - a.size)[0];
const targetFile = Number.isInteger(fileIndex)
? torrent.files[fileIndex]
: torrent.files.filter(file => file.downloadPercent === 100).sort((a, b) => b.size - a.size)[0];
if (!targetFile && torrent.files.every(file => isArchive(file.downloadUrl))) {
console.log(`Only DebridLink archive is available for [${torrent.hashString}] ${fileIndex}`)
return StaticResponse.FAILED_RAR;
}
if (!targetFile || !targetFile.downloadUrl) {
return Promise.reject(`No DebridLink links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
}
console.log(`Unrestricted DebridLink ${torrent.hashString} [${fileIndex}] to ${targetFile.downloadUrl}`);
return targetFile.downloadUrl;
if (!targetFile && torrent.files.every(file => isArchive(file.downloadUrl))) {
console.log(`Only DebridLink archive is available for [${torrent.hashString}] ${fileIndex}`)
return StaticResponse.FAILED_RAR;
}
if (!targetFile || !targetFile.downloadUrl) {
return Promise.reject(`No DebridLink links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
}
console.log(`Unrestricted DebridLink ${torrent.hashString} [${fileIndex}] to ${targetFile.downloadUrl}`);
return targetFile.downloadUrl;
}
async function getDefaultOptions(ip) {
return { ip, timeout: 10000 };
return { ip, timeout: 10000 };
}
export function toCommonError(error) {
if (error === 'badToken') {
return BadTokenError;
}
return undefined;
if (error === 'badToken') {
return BadTokenError;
}
return undefined;
}
function statusDownloading(torrent) {
return torrent.downloadPercent < 100
return torrent.downloadPercent < 100
}
function statusReady(torrent) {
return torrent.downloadPercent === 100;
return torrent.downloadPercent === 100;
}
function errorExpiredSubscriptionError(error) {
return ['freeServerOverload', 'maxTorrent', 'maxLink', 'maxLinkHost', 'maxData', 'maxDataHost'].includes(error);
return ['freeServerOverload', 'maxTorrent', 'maxLink', 'maxLinkHost', 'maxData', 'maxDataHost'].includes(error);
}

View File

@@ -15,226 +15,226 @@ const RESOLVE_TIMEOUT = 2 * 60 * 1000; // 2 minutes
const MIN_API_KEY_SYMBOLS = 15;
const TOKEN_BLACKLIST = [];
export const MochOptions = {
realdebrid: {
key: 'realdebrid',
instance: realdebrid,
name: "RealDebrid",
shortName: 'RD',
catalog: true
},
premiumize: {
key: 'premiumize',
instance: premiumize,
name: 'Premiumize',
shortName: 'PM',
catalog: true
},
alldebrid: {
key: 'alldebrid',
instance: alldebrid,
name: 'AllDebrid',
shortName: 'AD',
catalog: true
},
debridlink: {
key: 'debridlink',
instance: debridlink,
name: 'DebridLink',
shortName: 'DL',
catalog: true
},
offcloud: {
key: 'offcloud',
instance: offcloud,
name: 'Offcloud',
shortName: 'OC',
catalog: true
},
putio: {
key: 'putio',
instance: putio,
name: 'Put.io',
shortName: 'Putio',
catalog: true
}
realdebrid: {
key: 'realdebrid',
instance: realdebrid,
name: "RealDebrid",
shortName: 'RD',
catalog: true
},
premiumize: {
key: 'premiumize',
instance: premiumize,
name: 'Premiumize',
shortName: 'PM',
catalog: true
},
alldebrid: {
key: 'alldebrid',
instance: alldebrid,
name: 'AllDebrid',
shortName: 'AD',
catalog: true
},
debridlink: {
key: 'debridlink',
instance: debridlink,
name: 'DebridLink',
shortName: 'DL',
catalog: true
},
offcloud: {
key: 'offcloud',
instance: offcloud,
name: 'Offcloud',
shortName: 'OC',
catalog: true
},
putio: {
key: 'putio',
instance: putio,
name: 'Put.io',
shortName: 'Putio',
catalog: true
}
};
const unrestrictQueues = {}
Object.values(MochOptions)
.map(moch => moch.key)
.forEach(mochKey => unrestrictQueues[mochKey] = new namedQueue((task, callback) => task.method()
.then(result => callback(false, result))
.catch((error => callback(error))), 200));
.then(result => callback(false, result))
.catch((error => callback(error))), 200));
export function hasMochConfigured(config) {
return Object.keys(MochOptions).find(moch => config?.[moch])
return Object.keys(MochOptions).find(moch => config?.[moch])
}
export async function applyMochs(streams, config) {
if (!streams?.length || !hasMochConfigured(config)) {
return streams;
}
return Promise.all(Object.keys(config)
.filter(configKey => MochOptions[configKey])
.map(configKey => MochOptions[configKey])
.map(moch => {
if (isInvalidToken(config[moch.key], moch.key)) {
return { moch, error: BadTokenError };
}
return moch.instance.getCachedStreams(streams, config[moch.key])
.then(mochStreams => ({ moch, mochStreams }))
.catch(rawError => {
const error = moch.instance.toCommonError(rawError) || rawError;
if (error === BadTokenError) {
blackListToken(config[moch.key], moch.key);
}
return { moch, error };
})
}))
.then(results => processMochResults(streams, config, results));
if (!streams?.length || !hasMochConfigured(config)) {
return streams;
}
return Promise.all(Object.keys(config)
.filter(configKey => MochOptions[configKey])
.map(configKey => MochOptions[configKey])
.map(moch => {
if (isInvalidToken(config[moch.key], moch.key)) {
return { moch, error: BadTokenError };
}
return moch.instance.getCachedStreams(streams, config[moch.key])
.then(mochStreams => ({ moch, mochStreams }))
.catch(rawError => {
const error = moch.instance.toCommonError(rawError) || rawError;
if (error === BadTokenError) {
blackListToken(config[moch.key], moch.key);
}
return { moch, error };
})
}))
.then(results => processMochResults(streams, config, results));
}
export async function resolve(parameters) {
const moch = MochOptions[parameters.mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${parameters.mochKey}`));
}
const moch = MochOptions[parameters.mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${parameters.mochKey}`));
}
if (!parameters.apiKey || !parameters.infoHash || !parameters.cachedEntryInfo) {
return Promise.reject(new Error("No valid parameters passed"));
}
const id = `${parameters.ip}_${parameters.mochKey}_${parameters.apiKey}_${parameters.infoHash}_${parameters.fileIndex}`;
const method = () => timeout(RESOLVE_TIMEOUT, cacheWrapResolvedUrl(id, () => moch.instance.resolve(parameters)))
.catch(error => {
console.warn(error);
return StaticResponse.FAILED_UNEXPECTED;
})
.then(url => isStaticUrl(url) ? `${parameters.host}/${url}` : url);
const unrestrictQueue = unrestrictQueues[moch.key];
return new Promise(((resolve, reject) => {
unrestrictQueue.push({ id, method }, (error, result) => result ? resolve(result) : reject(error));
}));
if (!parameters.apiKey || !parameters.infoHash || !parameters.cachedEntryInfo) {
return Promise.reject(new Error("No valid parameters passed"));
}
const id = `${parameters.ip}_${parameters.mochKey}_${parameters.apiKey}_${parameters.infoHash}_${parameters.fileIndex}`;
const method = () => timeout(RESOLVE_TIMEOUT, cacheWrapResolvedUrl(id, () => moch.instance.resolve(parameters)))
.catch(error => {
console.warn(error);
return StaticResponse.FAILED_UNEXPECTED;
})
.then(url => isStaticUrl(url) ? `${parameters.host}/${url}` : url);
const unrestrictQueue = unrestrictQueues[moch.key];
return new Promise(((resolve, reject) => {
unrestrictQueue.push({ id, method }, (error, result) => result ? resolve(result) : reject(error));
}));
}
export async function getMochCatalog(mochKey, config) {
const moch = MochOptions[mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
}
if (isInvalidToken(config[mochKey], mochKey)) {
return Promise.reject(new Error(`Invalid API key for moch provider: ${mochKey}`));
}
return moch.instance.getCatalog(config[moch.key], config.skip, config.ip)
.catch(rawError => {
const commonError = moch.instance.toCommonError(rawError);
if (commonError === BadTokenError) {
blackListToken(config[moch.key], moch.key);
}
return commonError ? [] : Promise.reject(rawError);
});
const moch = MochOptions[mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
}
if (isInvalidToken(config[mochKey], mochKey)) {
return Promise.reject(new Error(`Invalid API key for moch provider: ${mochKey}`));
}
return moch.instance.getCatalog(config[moch.key], config.skip, config.ip)
.catch(rawError => {
const commonError = moch.instance.toCommonError(rawError);
if (commonError === BadTokenError) {
blackListToken(config[moch.key], moch.key);
}
return commonError ? [] : Promise.reject(rawError);
});
}
export async function getMochItemMeta(mochKey, itemId, config) {
const moch = MochOptions[mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
}
const moch = MochOptions[mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
}
return moch.instance.getItemMeta(itemId, config[moch.key], config.ip)
.then(meta => enrichMeta(meta))
.then(meta => {
meta.videos.forEach(video => video.streams.forEach(stream => {
if (!stream.url.startsWith('http')) {
stream.url = `${config.host}/${moch.key}/${stream.url}/${streamFilename(video)}`
}
stream.behaviorHints = { bingeGroup: itemId }
}))
return meta;
});
return moch.instance.getItemMeta(itemId, config[moch.key], config.ip)
.then(meta => enrichMeta(meta))
.then(meta => {
meta.videos.forEach(video => video.streams.forEach(stream => {
if (!stream.url.startsWith('http')) {
stream.url = `${config.host}/${moch.key}/${stream.url}/${streamFilename(video)}`
}
stream.behaviorHints = { bingeGroup: itemId }
}))
return meta;
});
}
function processMochResults(streams, config, results) {
const errorResults = results
.map(result => errorStreamResponse(result.moch.key, result.error, config))
.filter(errorResponse => errorResponse);
if (errorResults.length) {
return errorResults;
}
const errorResults = results
.map(result => errorStreamResponse(result.moch.key, result.error, config))
.filter(errorResponse => errorResponse);
if (errorResults.length) {
return errorResults;
}
const includeTorrentLinks = options.includeTorrentLinks(config);
const excludeDownloadLinks = options.excludeDownloadLinks(config);
const mochResults = results.filter(result => result?.mochStreams);
const includeTorrentLinks = options.includeTorrentLinks(config);
const excludeDownloadLinks = options.excludeDownloadLinks(config);
const mochResults = results.filter(result => result?.mochStreams);
const cachedStreams = mochResults
.reduce((resultStreams, mochResult) => populateCachedLinks(resultStreams, mochResult, config), streams);
const resultStreams = excludeDownloadLinks ? cachedStreams : populateDownloadLinks(cachedStreams, mochResults, config);
return includeTorrentLinks ? resultStreams : resultStreams.filter(stream => stream.url);
const cachedStreams = mochResults
.reduce((resultStreams, mochResult) => populateCachedLinks(resultStreams, mochResult, config), streams);
const resultStreams = excludeDownloadLinks ? cachedStreams : populateDownloadLinks(cachedStreams, mochResults, config);
return includeTorrentLinks ? resultStreams : resultStreams.filter(stream => stream.url);
}
function populateCachedLinks(streams, mochResult, config) {
return streams.map(stream => {
const cachedEntry = stream.infoHash && mochResult.mochStreams[stream.infoHash];
if (cachedEntry?.cached) {
return {
name: `[${mochResult.moch.shortName}+] ${stream.name}`,
title: stream.title,
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
behaviorHints: stream.behaviorHints
};
}
return stream;
});
return streams.map(stream => {
const cachedEntry = stream.infoHash && mochResult.mochStreams[`${stream.infoHash}@${stream.fileIdx}`];
if (cachedEntry?.cached) {
return {
name: `[${mochResult.moch.shortName}+] ${stream.name}`,
title: stream.title,
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
behaviorHints: stream.behaviorHints
};
}
return stream;
});
}
function populateDownloadLinks(streams, mochResults, config) {
const torrentStreams = streams.filter(stream => stream.infoHash);
const seededStreams = streams.filter(stream => !stream.title.includes('👤 0'));
torrentStreams.forEach(stream => mochResults.forEach(mochResult => {
const cachedEntry = mochResult.mochStreams[stream.infoHash];
const isCached = cachedEntry?.cached;
if (!isCached && isHealthyStreamForDebrid(seededStreams, stream)) {
streams.push({
name: `[${mochResult.moch.shortName} download] ${stream.name}`,
title: stream.title,
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
behaviorHints: stream.behaviorHints
})
}
}));
return streams;
const torrentStreams = streams.filter(stream => stream.infoHash);
const seededStreams = streams.filter(stream => !stream.title.includes('👤 0'));
torrentStreams.forEach(stream => mochResults.forEach(mochResult => {
const cachedEntry = mochResult.mochStreams[`${stream.infoHash}@${stream.fileIdx}`];
const isCached = cachedEntry?.cached;
if (!isCached && isHealthyStreamForDebrid(seededStreams, stream)) {
streams.push({
name: `[${mochResult.moch.shortName} download] ${stream.name}`,
title: stream.title,
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
behaviorHints: stream.behaviorHints
})
}
}));
return streams;
}
function isHealthyStreamForDebrid(streams, stream) {
const isZeroSeeders = stream.title.includes('👤 0');
const is4kStream = stream.name.includes('4k');
const isNotEnoughOptions = streams.length <= 5;
return !isZeroSeeders || is4kStream || isNotEnoughOptions;
const isZeroSeeders = stream.title.includes('👤 0');
const is4kStream = stream.name.includes('4k');
const isNotEnoughOptions = streams.length <= 5;
return !isZeroSeeders || is4kStream || isNotEnoughOptions;
}
function isInvalidToken(token, mochKey) {
return token.length < MIN_API_KEY_SYMBOLS || TOKEN_BLACKLIST.includes(`${mochKey}|${token}`);
return token.length < MIN_API_KEY_SYMBOLS || TOKEN_BLACKLIST.includes(`${mochKey}|${token}`);
}
function blackListToken(token, mochKey) {
const tokenKey = `${mochKey}|${token}`;
console.log(`Blacklisting invalid token: ${tokenKey}`)
TOKEN_BLACKLIST.push(tokenKey);
const tokenKey = `${mochKey}|${token}`;
console.log(`Blacklisting invalid token: ${tokenKey}`)
TOKEN_BLACKLIST.push(tokenKey);
}
function errorStreamResponse(mochKey, error, config) {
if (error === BadTokenError) {
return {
name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
title: `Invalid ${MochOptions[mochKey].name} ApiKey/Token!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
};
}
if (error === AccessDeniedError) {
return {
name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
title: `Expired/invalid ${MochOptions[mochKey].name} subscription!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
};
}
return undefined;
if (error === BadTokenError) {
return {
name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
title: `Invalid ${MochOptions[mochKey].name} ApiKey/Token!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
};
}
if (error === AccessDeniedError) {
return {
name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
title: `Expired/invalid ${MochOptions[mochKey].name} subscription!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
};
}
return undefined;
}

View File

@@ -1,63 +1,63 @@
import * as repository from '../lib/repository.js';
import * as repository from '../lib/repository.js';
const METAHUB_URL = 'https://images.metahub.space'
export const BadTokenError = { code: 'BAD_TOKEN' }
export const AccessDeniedError = { code: 'ACCESS_DENIED' }
export function chunkArray(arr, size) {
return arr.length > size
? [arr.slice(0, size), ...chunkArray(arr.slice(size), size)]
: [arr];
return arr.length > size
? [arr.slice(0, size), ...chunkArray(arr.slice(size), size)]
: [arr];
}
export function streamFilename(stream) {
const titleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const filename = titleParts.pop().split('/').pop();
return encodeURIComponent(filename)
const titleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const filename = titleParts.pop().split('/').pop();
return encodeURIComponent(filename)
}
export async function enrichMeta(itemMeta) {
const infoHashes = [...new Set([itemMeta.infoHash]
.concat(itemMeta.videos.map(video => video.infoHash))
.filter(infoHash => infoHash))];
const files = infoHashes.length ? await repository.getFiles(infoHashes).catch(() => []) : [];
const commonImdbId = itemMeta.infoHash && mostCommonValue(files.map(file => file.imdbId));
if (files.length) {
return {
...itemMeta,
logo: commonImdbId && `${METAHUB_URL}/logo/medium/${commonImdbId}/img`,
poster: commonImdbId && `${METAHUB_URL}/poster/medium/${commonImdbId}/img`,
background: commonImdbId && `${METAHUB_URL}/background/medium/${commonImdbId}/img`,
videos: itemMeta.videos.map(video => {
const file = files.find(file => sameFilename(video.title, file.title));
if (file?.imdbId) {
if (file.imdbSeason && file.imdbEpisode) {
video.id = `${file.imdbId}:${file.imdbSeason}:${file.imdbEpisode}`;
video.season = file.imdbSeason;
video.episode = file.imdbEpisode;
video.thumbnail = `https://episodes.metahub.space/${file.imdbId}/${video.season}/${video.episode}/w780.jpg`
} else {
video.id = file.imdbId;
video.thumbnail = `${METAHUB_URL}/background/small/${file.imdbId}/img`;
}
const infoHashes = [...new Set([itemMeta.infoHash]
.concat(itemMeta.videos.map(video => video.infoHash))
.filter(infoHash => infoHash))];
const files = infoHashes.length ? await repository.getFiles(infoHashes).catch(() => []) : [];
const commonImdbId = itemMeta.infoHash && mostCommonValue(files.map(file => file.imdbId));
if (files.length) {
return {
...itemMeta,
logo: commonImdbId && `${METAHUB_URL}/logo/medium/${commonImdbId}/img`,
poster: commonImdbId && `${METAHUB_URL}/poster/medium/${commonImdbId}/img`,
background: commonImdbId && `${METAHUB_URL}/background/medium/${commonImdbId}/img`,
videos: itemMeta.videos.map(video => {
const file = files.find(file => sameFilename(video.title, file.title));
if (file?.imdbId) {
if (file.imdbSeason && file.imdbEpisode) {
video.id = `${file.imdbId}:${file.imdbSeason}:${file.imdbEpisode}`;
video.season = file.imdbSeason;
video.episode = file.imdbEpisode;
video.thumbnail = `https://episodes.metahub.space/${file.imdbId}/${video.season}/${video.episode}/w780.jpg`
} else {
video.id = file.imdbId;
video.thumbnail = `${METAHUB_URL}/background/small/${file.imdbId}/img`;
}
}
return video;
})
}
return video;
})
}
}
return itemMeta
return itemMeta
}
export function sameFilename(filename, expectedFilename) {
const offset = filename.length - expectedFilename.length;
for (let i = 0; i < expectedFilename.length; i++) {
if (filename[offset + i] !== expectedFilename[i] && expectedFilename[i] !== '<27>') {
return false;
const offset = filename.length - expectedFilename.length;
for (let i = 0; i < expectedFilename.length; i++) {
if (filename[offset + i] !== expectedFilename[i] && expectedFilename[i] !== '<27>') {
return false;
}
}
}
return true;
return true;
}
function mostCommonValue(array) {
return array.sort((a, b) => array.filter(v => v === a).length - array.filter(v => v === b).length).pop();
return array.sort((a, b) => array.filter(v => v === a).length - array.filter(v => v === b).length).pop();
}

View File

@@ -9,178 +9,178 @@ import StaticResponse from './static.js';
const KEY = 'offcloud';
export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions();
const OC = new OffcloudClient(apiKey, options);
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 100);
const available = await Promise.all(hashBatches.map(hashes => OC.instant.cache(hashes)))
.then(results => results.map(result => result.cachedItems))
.then(results => results.reduce((all, result) => all.concat(result), []))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed Offcloud cached torrent availability request:', error);
return undefined;
});
return available && streams
.reduce((mochStreams, stream) => {
const isCached = available.includes(stream.infoHash);
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: isCached
};
return mochStreams;
}, {})
const options = await getDefaultOptions();
const OC = new OffcloudClient(apiKey, options);
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 100);
const available = await Promise.all(hashBatches.map(hashes => OC.instant.cache(hashes)))
.then(results => results.map(result => result.cachedItems))
.then(results => results.reduce((all, result) => all.concat(result), []))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed Offcloud cached torrent availability request:', error);
return undefined;
});
return available && streams
.reduce((mochStreams, stream) => {
const isCached = available.includes(stream.infoHash);
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const encodedFileName = encodeURIComponent(fileName);
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${stream.fileIdx}`,
cached: isCached
};
return mochStreams;
}, {})
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const OC = new OffcloudClient(apiKey, options);
return OC.cloud.history()
.then(torrents => torrents)
.then(torrents => (torrents || [])
.map(torrent => ({
id: `${KEY}:${torrent.requestId}`,
type: Type.OTHER,
name: torrent.fileName
})));
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const OC = new OffcloudClient(apiKey, options);
return OC.cloud.history()
.then(torrents => torrents)
.then(torrents => (torrents || [])
.map(torrent => ({
id: `${KEY}:${torrent.requestId}`,
type: Type.OTHER,
name: torrent.fileName
})));
}
export async function getItemMeta(itemId, apiKey, ip) {
const options = await getDefaultOptions(ip);
const OC = new OffcloudClient(apiKey, options);
const torrents = await OC.cloud.history();
const torrent = torrents.find(torrent => torrent.requestId === itemId)
const infoHash = torrent && magnet.decode(torrent.originalLink).infoHash
const createDate = torrent ? new Date(torrent.createdOn) : new Date();
return _getFileUrls(OC, torrent)
.then(files => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: torrent.name,
infoHash: infoHash,
videos: files
.filter(file => isVideo(file))
.map((file, index) => ({
id: `${KEY}:${itemId}:${index}`,
title: file.split('/').pop(),
released: new Date(createDate.getTime() - index).toISOString(),
streams: [{ url: file }]
}))
}))
const options = await getDefaultOptions(ip);
const OC = new OffcloudClient(apiKey, options);
const torrents = await OC.cloud.history();
const torrent = torrents.find(torrent => torrent.requestId === itemId)
const infoHash = torrent && magnet.decode(torrent.originalLink).infoHash
const createDate = torrent ? new Date(torrent.createdOn) : new Date();
return _getFileUrls(OC, torrent)
.then(files => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: torrent.name,
infoHash: infoHash,
videos: files
.filter(file => isVideo(file))
.map((file, index) => ({
id: `${KEY}:${itemId}:${index}`,
title: file.split('/').pop(),
released: new Date(createDate.getTime() - index).toISOString(),
streams: [{ url: file }]
}))
}))
}
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
console.log(`Unrestricting Offcloud ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const OC = new OffcloudClient(apiKey, options);
console.log(`Unrestricting Offcloud ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const OC = new OffcloudClient(apiKey, options);
return _resolve(OC, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to Offcloud ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(error)}`);
});
return _resolve(OC, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to Offcloud ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolve(OC, infoHash, cachedEntryInfo, fileIndex) {
const torrent = await _createOrFindTorrent(OC, infoHash)
.then(info => info.requestId ? OC.cloud.status(info.requestId) : Promise.resolve(info))
.then(info => info.status || info);
if (torrent && statusReady(torrent)) {
return _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent)) {
console.log(`Downloading to Offcloud ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent)) {
console.log(`Retry failed download in Offcloud ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex);
}
const torrent = await _createOrFindTorrent(OC, infoHash)
.then(info => info.requestId ? OC.cloud.status(info.requestId) : Promise.resolve(info))
.then(info => info.status || info);
if (torrent && statusReady(torrent)) {
return _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent)) {
console.log(`Downloading to Offcloud ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent)) {
console.log(`Retry failed download in Offcloud ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(torrent)}`);
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(torrent)}`);
}
async function _createOrFindTorrent(OC, infoHash) {
return _findTorrent(OC, infoHash)
.catch(() => _createTorrent(OC, infoHash));
return _findTorrent(OC, infoHash)
.catch(() => _createTorrent(OC, infoHash));
}
async function _findTorrent(OC, infoHash) {
const torrents = await OC.cloud.history();
const foundTorrents = torrents.filter(torrent => torrent.originalLink.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
const torrents = await OC.cloud.history();
const foundTorrents = torrents.filter(torrent => torrent.originalLink.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
}
async function _createTorrent(OC, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
return OC.cloud.download(magnetLink)
const magnetLink = await getMagnetLink(infoHash);
return OC.cloud.download(magnetLink)
}
async function _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex) {
const newTorrent = await _createTorrent(OC, infoHash);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(OC, infoHash, newTorrent, cachedEntryInfo, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
const newTorrent = await _createTorrent(OC, infoHash);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(OC, infoHash, newTorrent, cachedEntryInfo, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
async function _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex) {
const targetFileName = decodeURIComponent(cachedEntryInfo);
const files = await _getFileUrls(OC, torrent)
const targetFile = files.find(file => sameFilename(targetFileName, file.split('/').pop()))
|| files.find(file => isVideo(file))
|| files.pop();
const targetFileName = decodeURIComponent(cachedEntryInfo);
const files = await _getFileUrls(OC, torrent)
const targetFile = files.find(file => file.includes(`/${torrent.requestId}/${fileIndex}/`))
|| files.find(file => sameFilename(targetFileName, file.split('/').pop()))
|| files.find(file => isVideo(file))
|| files.pop();
if (!targetFile) {
return Promise.reject(`No Offcloud links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
}
console.log(`Unrestricted Offcloud ${infoHash} [${fileIndex}] to ${targetFile}`);
return targetFile;
if (!targetFile) {
return Promise.reject(`No Offcloud links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
}
console.log(`Unrestricted Offcloud ${infoHash} [${fileIndex}] to ${targetFile}`);
return targetFile;
}
async function _getFileUrls(OC, torrent) {
return OC.cloud.explore(torrent.requestId)
.catch(error => {
if (error === 'Bad archive') {
return [`https://${torrent.server}.offcloud.com/cloud/download/${torrent.requestId}/${torrent.fileName}`];
}
throw error;
})
return OC.cloud.explore(torrent.requestId)
.catch(error => {
if (error === 'Bad archive') {
return [`https://${torrent.server}.offcloud.com/cloud/download/${torrent.requestId}/${torrent.fileName}`];
}
throw error;
})
}
async function getDefaultOptions(ip) {
return { ip, timeout: 10000 };
return { ip, timeout: 10000 };
}
export function toCommonError(error) {
if (error?.error === 'NOAUTH' || error?.message?.startsWith('Cannot read property')) {
return BadTokenError;
}
return undefined;
if (error?.error === 'NOAUTH' || error?.message?.startsWith('Cannot read property')) {
return BadTokenError;
}
return undefined;
}
function statusDownloading(torrent) {
return ['downloading', 'created'].includes(torrent.status);
return ['downloading', 'created'].includes(torrent.status);
}
function statusError(torrent) {
return ['error', 'canceled'].includes(torrent.status);
return ['error', 'canceled'].includes(torrent.status);
}
function statusReady(torrent) {
return torrent.status === 'downloaded';
return torrent.status === 'downloaded';
}
function errorExpiredSubscriptionError(error) {
return error?.includes && (error.includes('not_available') || error.includes('NOAUTH') || error.includes('premium membership'));
return error?.includes && (error.includes('not_available') || error.includes('NOAUTH') || error.includes('premium membership'));
}

View File

@@ -9,187 +9,187 @@ import StaticResponse from './static.js';
const KEY = 'premiumize';
export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return Promise.all(chunkArray(streams, 100)
.map(chunkedStreams => _getCachedStreams(PM, apiKey, chunkedStreams)))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}));
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return Promise.all(chunkArray(streams, 100)
.map(chunkedStreams => _getCachedStreams(PM, apiKey, chunkedStreams)))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}));
}
async function _getCachedStreams(PM, apiKey, streams) {
const hashes = streams.map(stream => stream.infoHash);
return PM.cache.check(hashes)
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed Premiumize cached torrent availability request:', error);
return undefined;
})
.then(available => streams
.reduce((mochStreams, stream, index) => {
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: available?.response[index]
};
return mochStreams;
}, {}));
const hashes = streams.map(stream => stream.infoHash);
return PM.cache.check(hashes)
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed Premiumize cached torrent availability request:', error);
return undefined;
})
.then(available => streams
.reduce((mochStreams, stream, index) => {
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: available?.response[index]
};
return mochStreams;
}, {}));
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return PM.folder.list()
.then(response => response.content)
.then(torrents => (torrents || [])
.filter(torrent => torrent && torrent.type === 'folder')
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name
})));
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return PM.folder.list()
.then(response => response.content)
.then(torrents => (torrents || [])
.filter(torrent => torrent && torrent.type === 'folder')
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name
})));
}
export async function getItemMeta(itemId, apiKey, ip) {
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
const rootFolder = await PM.folder.list(itemId, null);
const infoHash = await _findInfoHash(PM, itemId);
return getFolderContents(PM, itemId, ip)
.then(contents => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: rootFolder.name,
infoHash: infoHash,
videos: contents
.map((file, index) => ({
id: `${KEY}:${file.id}:${index}`,
title: file.name,
released: new Date(file.created_at * 1000 - index).toISOString(),
streams: [{ url: file.link || file.stream_link }]
}))
}))
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
const rootFolder = await PM.folder.list(itemId, null);
const infoHash = await _findInfoHash(PM, itemId);
return getFolderContents(PM, itemId, ip)
.then(contents => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: rootFolder.name,
infoHash: infoHash,
videos: contents
.map((file, index) => ({
id: `${KEY}:${file.id}:${index}`,
title: file.name,
released: new Date(file.created_at * 1000 - index).toISOString(),
streams: [{ url: file.link || file.stream_link }]
}))
}))
}
async function getFolderContents(PM, itemId, ip, folderPrefix = '') {
return PM.folder.list(itemId, null, ip)
.then(response => response.content)
.then(contents => Promise.all(contents
.filter(content => content.type === 'folder')
.map(content => getFolderContents(PM, content.id, ip, [folderPrefix, content.name].join('/'))))
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
.then(otherContents => contents
.filter(content => content.type === 'file' && isVideo(content.name))
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
.concat(otherContents)));
return PM.folder.list(itemId, null, ip)
.then(response => response.content)
.then(contents => Promise.all(contents
.filter(content => content.type === 'folder')
.map(content => getFolderContents(PM, content.id, ip, [folderPrefix, content.name].join('/'))))
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
.then(otherContents => contents
.filter(content => content.type === 'file' && isVideo(content.name))
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
.concat(otherContents)));
}
export async function resolve({ ip, isBrowser, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
console.log(`Unrestricting Premiumize ${infoHash} [${fileIndex}] for IP ${ip} from browser=${isBrowser}`);
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser)
.catch(() => _resolve(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser))
.catch(error => {
if (error?.message?.includes('Account not premium.')) {
console.log(`Access denied to Premiumize ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(error)}`);
});
console.log(`Unrestricting Premiumize ${infoHash} [${fileIndex}] for IP ${ip} from browser=${isBrowser}`);
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser)
.catch(() => _resolve(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser))
.catch(error => {
if (error?.message?.includes('Account not premium.')) {
console.log(`Access denied to Premiumize ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolve(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser) {
const torrent = await _createOrFindTorrent(PM, infoHash);
if (torrent && statusReady(torrent.status)) {
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to Premiumize ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent.status)) {
console.log(`Retrying downloading to Premiumize ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(PM, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(torrent)}`);
const torrent = await _createOrFindTorrent(PM, infoHash);
if (torrent && statusReady(torrent.status)) {
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to Premiumize ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent.status)) {
console.log(`Retrying downloading to Premiumize ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(PM, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(torrent)}`);
}
async function _getCachedLink(PM, infoHash, encodedFileName, fileIndex, ip, isBrowser) {
const cachedTorrent = await PM.transfer.directDownload(magnet.encode({ infoHash }), ip);
if (cachedTorrent?.content?.length) {
const targetFileName = decodeURIComponent(encodedFileName);
const videos = cachedTorrent.content.filter(file => isVideo(file.path));
const targetVideo = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(video.path, targetFileName))
: videos.sort((a, b) => b.size - a.size)[0];
if (!targetVideo && videos.every(video => isArchive(video.path))) {
console.log(`Only Premiumize archive is available for [${infoHash}] ${fileIndex}`)
return StaticResponse.FAILED_RAR;
const cachedTorrent = await PM.transfer.directDownload(magnet.encode({ infoHash }), ip);
if (cachedTorrent?.content?.length) {
const targetFileName = decodeURIComponent(encodedFileName);
const videos = cachedTorrent.content.filter(file => isVideo(file.path)).sort((a, b) => b.size - a.size);
const targetVideo = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(video.path, targetFileName))
: videos[0];
if (!targetVideo && videos.every(video => isArchive(video.path))) {
console.log(`Only Premiumize archive is available for [${infoHash}] ${fileIndex}`)
return StaticResponse.FAILED_RAR;
}
const streamLink = isBrowser && targetVideo.transcode_status === 'finished' && targetVideo.stream_link;
const unrestrictedLink = streamLink || targetVideo.link;
console.log(`Unrestricted Premiumize ${infoHash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
}
const streamLink = isBrowser && targetVideo.transcode_status === 'finished' && targetVideo.stream_link;
const unrestrictedLink = streamLink || targetVideo.link;
console.log(`Unrestricted Premiumize ${infoHash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
}
return Promise.reject('No cached entry found');
return Promise.reject('No cached entry found');
}
async function _createOrFindTorrent(PM, infoHash) {
return _findTorrent(PM, infoHash)
.catch(() => _createTorrent(PM, infoHash));
return _findTorrent(PM, infoHash)
.catch(() => _createTorrent(PM, infoHash));
}
async function _findTorrent(PM, infoHash) {
const torrents = await PM.transfer.list().then(response => response.transfers);
const foundTorrents = torrents.filter(torrent => torrent.src.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
const torrents = await PM.transfer.list().then(response => response.transfers);
const foundTorrents = torrents.filter(torrent => torrent.src.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
}
async function _findInfoHash(PM, itemId) {
const torrents = await PM.transfer.list().then(response => response.transfers);
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === itemId || `${torrent.folder_id}` === itemId);
return foundTorrent?.src ? magnet.decode(foundTorrent.src).infoHash : undefined;
const torrents = await PM.transfer.list().then(response => response.transfers);
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === itemId || `${torrent.folder_id}` === itemId);
return foundTorrent?.src ? magnet.decode(foundTorrent.src).infoHash : undefined;
}
async function _createTorrent(PM, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
return PM.transfer.create(magnetLink).then(() => _findTorrent(PM, infoHash));
const magnetLink = await getMagnetLink(infoHash);
return PM.transfer.create(magnetLink).then(() => _findTorrent(PM, infoHash));
}
async function _retryCreateTorrent(PM, infoHash, encodedFileName, fileIndex) {
const newTorrent = await _createTorrent(PM, infoHash).then(() => _findTorrent(PM, infoHash));
return newTorrent && statusReady(newTorrent.status)
? _getCachedLink(PM, infoHash, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
const newTorrent = await _createTorrent(PM, infoHash).then(() => _findTorrent(PM, infoHash));
return newTorrent && statusReady(newTorrent.status)
? _getCachedLink(PM, infoHash, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
export function toCommonError(error) {
if (error && error.message === 'Not logged in.') {
return BadTokenError;
}
return undefined;
if (error && error.message === 'Not logged in.') {
return BadTokenError;
}
return undefined;
}
function statusError(status) {
return ['deleted', 'error', 'timeout'].includes(status);
return ['deleted', 'error', 'timeout'].includes(status);
}
function statusDownloading(status) {
return ['waiting', 'queued', 'running'].includes(status);
return ['waiting', 'queued', 'running'].includes(status);
}
function statusReady(status) {
return ['finished', 'seeding'].includes(status);
return ['finished', 'seeding'].includes(status);
}
async function getDefaultOptions(ip) {
return { timeout: 5000 };
return { timeout: 5000 };
}

View File

@@ -11,205 +11,205 @@ const PutioAPI = PutioClient.default;
const KEY = 'putio';
export async function getCachedStreams(streams, apiKey) {
return streams
.reduce((mochStreams, stream) => {
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: false
};
return mochStreams;
}, {});
return streams
.reduce((mochStreams, stream) => {
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: false
};
return mochStreams;
}, {});
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const Putio = createPutioAPI(apiKey)
return Putio.Files.Query(0)
.then(response => response?.body?.files)
.then(files => (files || [])
.map(file => ({
id: `${KEY}:${file.id}`,
type: Type.OTHER,
name: file.name
})));
if (offset > 0) {
return [];
}
const Putio = createPutioAPI(apiKey)
return Putio.Files.Query(0)
.then(response => response?.body?.files)
.then(files => (files || [])
.map(file => ({
id: `${KEY}:${file.id}`,
type: Type.OTHER,
name: file.name
})));
}
export async function getItemMeta(itemId, apiKey) {
const Putio = createPutioAPI(apiKey)
const infoHash = await _findInfoHash(Putio, itemId)
return getFolderContents(Putio, itemId)
.then(contents => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: contents.name,
infoHash: infoHash,
videos: contents
.map((file, index) => ({
id: `${KEY}:${file.id}:${index}`,
title: file.name,
released: new Date(file.created_at).toISOString(),
streams: [{ url: `${apiKey}/null/null/${file.id}` }]
}))
}))
const Putio = createPutioAPI(apiKey)
const infoHash = await _findInfoHash(Putio, itemId)
return getFolderContents(Putio, itemId)
.then(contents => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: contents.name,
infoHash: infoHash,
videos: contents
.map((file, index) => ({
id: `${KEY}:${file.id}:${index}`,
title: file.name,
released: new Date(file.created_at).toISOString(),
streams: [{ url: `${apiKey}/null/null/${file.id}` }]
}))
}))
}
async function getFolderContents(Putio, itemId, folderPrefix = '') {
return await Putio.Files.Query(itemId)
.then(response => response?.body)
.then(body => body?.files?.length ? body.files : [body?.parent].filter(x => x))
.then(contents => Promise.all(contents
.filter(content => content.file_type === 'FOLDER')
.map(content => getFolderContents(Putio, content.id, [folderPrefix, content.name].join('/'))))
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
.then(otherContents => contents
.filter(content => content.file_type === 'VIDEO')
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
.concat(otherContents)));
return await Putio.Files.Query(itemId)
.then(response => response?.body)
.then(body => body?.files?.length ? body.files : [body?.parent].filter(x => x))
.then(contents => Promise.all(contents
.filter(content => content.file_type === 'FOLDER')
.map(content => getFolderContents(Putio, content.id, [folderPrefix, content.name].join('/'))))
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
.then(otherContents => contents
.filter(content => content.file_type === 'VIDEO')
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
.concat(otherContents)));
}
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
console.log(`Unrestricting Putio ${infoHash} [${fileIndex}]`);
const Putio = createPutioAPI(apiKey)
console.log(`Unrestricting Putio ${infoHash} [${fileIndex}]`);
const Putio = createPutioAPI(apiKey)
return _resolve(Putio, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (error?.data?.status_code === 401) {
console.log(`Access denied to Putio ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Putio adding torrent ${JSON.stringify(error.data || error)}`);
});
return _resolve(Putio, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (error?.data?.status_code === 401) {
console.log(`Access denied to Putio ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Putio adding torrent ${JSON.stringify(error.data || error)}`);
});
}
async function _resolve(Putio, infoHash, cachedEntryInfo, fileIndex) {
if (infoHash === 'null') {
return _unrestrictVideo(Putio, fileIndex);
}
const torrent = await _createOrFindTorrent(Putio, infoHash);
if (torrent && statusReady(torrent.status)) {
return _unrestrictLink(Putio, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to Putio ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent.status)) {
console.log(`Retrying downloading to Putio ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(Putio, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject("Failed Putio adding torrent");
if (infoHash === 'null') {
return _unrestrictVideo(Putio, fileIndex);
}
const torrent = await _createOrFindTorrent(Putio, infoHash);
if (torrent && statusReady(torrent.status)) {
return _unrestrictLink(Putio, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to Putio ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent.status)) {
console.log(`Retrying downloading to Putio ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(Putio, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject("Failed Putio adding torrent");
}
async function _createOrFindTorrent(Putio, infoHash) {
return _findTorrent(Putio, infoHash)
.catch(() => _createTorrent(Putio, infoHash));
return _findTorrent(Putio, infoHash)
.catch(() => _createTorrent(Putio, infoHash));
}
async function _retryCreateTorrent(Putio, infoHash, encodedFileName, fileIndex) {
const newTorrent = await _createTorrent(Putio, infoHash);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(Putio, newTorrent, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
const newTorrent = await _createTorrent(Putio, infoHash);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(Putio, newTorrent, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
async function _findTorrent(Putio, infoHash) {
const torrents = await Putio.Transfers.Query().then(response => response.data.transfers);
const foundTorrents = torrents.filter(torrent => torrent.source.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.status));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
if (foundTorrents && !foundTorrents.userfile_exists) {
return await Putio.Transfers.Cancel(foundTorrents.id).then(() => Promise.reject())
}
return foundTorrent || Promise.reject('No recent torrent found in Putio');
const torrents = await Putio.Transfers.Query().then(response => response.data.transfers);
const foundTorrents = torrents.filter(torrent => torrent.source.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.status));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
if (foundTorrents && !foundTorrents.userfile_exists) {
return await Putio.Transfers.Cancel(foundTorrents.id).then(() => Promise.reject())
}
return foundTorrent || Promise.reject('No recent torrent found in Putio');
}
async function _findInfoHash(Putio, fileId) {
const torrents = await Putio.Transfers.Query().then(response => response?.data?.transfers);
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === fileId);
return foundTorrent?.source ? decode(foundTorrent.source).infoHash : undefined;
const torrents = await Putio.Transfers.Query().then(response => response?.data?.transfers);
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === fileId);
return foundTorrent?.source ? decode(foundTorrent.source).infoHash : undefined;
}
async function _createTorrent(Putio, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
// Add the torrent and then delay for 3 secs for putio to process it and then check it's status.
return Putio.Transfers.Add({ url: magnetLink })
.then(response => _getNewTorrent(Putio, response.data.transfer.id));
const magnetLink = await getMagnetLink(infoHash);
// Add the torrent and then delay for 3 secs for putio to process it and then check it's status.
return Putio.Transfers.Add({ url: magnetLink })
.then(response => _getNewTorrent(Putio, response.data.transfer.id));
}
async function _getNewTorrent(Putio, torrentId, pollCounter = 0, pollRate = 2000, maxPollNumber = 15) {
return Putio.Transfers.Get(torrentId)
.then(response => response.data.transfer)
.then(torrent => statusProcessing(torrent.status) && pollCounter < maxPollNumber
? delay(pollRate).then(() => _getNewTorrent(Putio, torrentId, pollCounter + 1))
: torrent);
return Putio.Transfers.Get(torrentId)
.then(response => response.data.transfer)
.then(torrent => statusProcessing(torrent.status) && pollCounter < maxPollNumber
? delay(pollRate).then(() => _getNewTorrent(Putio, torrentId, pollCounter + 1))
: torrent);
}
async function _unrestrictLink(Putio, torrent, encodedFileName, fileIndex) {
const targetVideo = await _getTargetFile(Putio, torrent, encodedFileName, fileIndex);
return _unrestrictVideo(Putio, targetVideo.id);
const targetVideo = await _getTargetFile(Putio, torrent, encodedFileName, fileIndex);
return _unrestrictVideo(Putio, targetVideo.id);
}
async function _unrestrictVideo(Putio, videoId) {
const response = await Putio.File.GetStorageURL(videoId);
const downloadUrl = response.data.url
console.log(`Unrestricted Putio [${videoId}] to ${downloadUrl}`);
return downloadUrl;
const response = await Putio.File.GetStorageURL(videoId);
const downloadUrl = response.data.url
console.log(`Unrestricted Putio [${videoId}] to ${downloadUrl}`);
return downloadUrl;
}
async function _getTargetFile(Putio, torrent, encodedFileName, fileIndex) {
const targetFileName = decodeURIComponent(encodedFileName);
let targetFile;
let files = await _getFiles(Putio, torrent.file_id);
let videos = [];
const targetFileName = decodeURIComponent(encodedFileName);
let targetFile;
let files = await _getFiles(Putio, torrent.file_id);
let videos = [];
while (!targetFile && files.length) {
const folders = files.filter(file => file.file_type === 'FOLDER');
videos = videos.concat(files.filter(file => isVideo(file.name)));
// when specific file index is defined search by filename
// when it's not defined find all videos and take the largest one
targetFile = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(targetFileName, video.name))
: !folders.length && videos.sort((a, b) => b.size - a.size)[0];
files = !targetFile
? await Promise.all(folders.map(folder => _getFiles(Putio, folder.id)))
.then(results => results.reduce((a, b) => a.concat(b), []))
: [];
}
return targetFile || Promise.reject(`No target file found for Putio [${torrent.hash}] ${targetFileName}`);
while (!targetFile && files.length) {
const folders = files.filter(file => file.file_type === 'FOLDER');
videos = videos.concat(files.filter(file => isVideo(file.name))).sort((a, b) => b.size - a.size);
// when specific file index is defined search by filename
// when it's not defined find all videos and take the largest one
targetFile = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(targetFileName, video.name))
: !folders.length && videos[0];
files = !targetFile
? await Promise.all(folders.map(folder => _getFiles(Putio, folder.id)))
.then(results => results.reduce((a, b) => a.concat(b), []))
: [];
}
return targetFile || Promise.reject(`No target file found for Putio [${torrent.hash}] ${targetFileName}`);
}
async function _getFiles(Putio, fileId) {
const response = await Putio.Files.Query(fileId)
.catch(error => Promise.reject({ ...error.data, path: error.request.path }));
return response.data.files.length
? response.data.files
: [response.data.parent];
const response = await Putio.Files.Query(fileId)
.catch(error => Promise.reject({ ...error.data, path: error.request.path }));
return response.data.files.length
? response.data.files
: [response.data.parent];
}
function createPutioAPI(apiKey) {
const clientId = apiKey.replace(/@.*/, '');
const token = apiKey.replace(/.*@/, '');
const Putio = new PutioAPI({ clientID: clientId });
Putio.setToken(token);
return Putio;
const clientId = apiKey.replace(/@.*/, '');
const token = apiKey.replace(/.*@/, '');
const Putio = new PutioAPI({ clientID: clientId });
Putio.setToken(token);
return Putio;
}
function statusError(status) {
return ['ERROR'].includes(status);
return ['ERROR'].includes(status);
}
function statusDownloading(status) {
return ['WAITING', 'IN_QUEUE', 'DOWNLOADING'].includes(status);
return ['WAITING', 'IN_QUEUE', 'DOWNLOADING'].includes(status);
}
function statusProcessing(status) {
return ['WAITING', 'IN_QUEUE', 'COMPLETING'].includes(status);
return ['WAITING', 'IN_QUEUE', 'COMPLETING'].includes(status);
}
function statusReady(status) {
return ['COMPLETED', 'SEEDING'].includes(status);
return ['COMPLETED', 'SEEDING'].includes(status);
}

View File

@@ -15,385 +15,385 @@ const KEY = 'realdebrid';
const DEBRID_DOWNLOADS = 'Downloads';
export async function getCachedStreams(streams, apiKey) {
const hashes = streams.map(stream => stream.infoHash);
const available = await _getInstantAvailable(hashes, apiKey);
return available && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available[stream.infoHash];
const cachedIds = _getCachedFileIds(stream.fileIdx, cachedEntry);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
cached: !!cachedIds.length
};
return mochStreams;
}, {})
const hashes = streams.map(stream => stream.infoHash);
const available = await _getInstantAvailable(hashes, apiKey);
return available && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available[stream.infoHash];
const cachedIds = _getCachedFileIds(stream.fileIdx, cachedEntry);
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
cached: !!cachedIds.length
};
return mochStreams;
}, {})
}
async function _getInstantAvailable(hashes, apiKey, retries = 3, maxChunkSize = 150) {
const cachedResults = await getCachedAvailabilityResults(hashes);
const missingHashes = hashes.filter(infoHash => !cachedResults[infoHash]);
if (!missingHashes.length) {
return cachedResults
}
const options = await getDefaultOptions();
const RD = new RealDebridClient(apiKey, options);
const hashBatches = chunkArray(missingHashes, maxChunkSize)
return Promise.all(hashBatches.map(batch => RD.torrents.instantAvailability(batch)
.then(response => {
const cachedResults = await getCachedAvailabilityResults(hashes);
const missingHashes = hashes.filter(infoHash => !cachedResults[infoHash]);
if (!missingHashes.length) {
return cachedResults
}
const options = await getDefaultOptions();
const RD = new RealDebridClient(apiKey, options);
const hashBatches = chunkArray(missingHashes, maxChunkSize)
return Promise.all(hashBatches.map(batch => RD.torrents.instantAvailability(batch)
.then(response => {
if (typeof response !== 'object') {
return Promise.reject(new Error('RD returned non JSON response: ' + response));
return Promise.reject(new Error('RD returned non JSON response: ' + response));
}
return processAvailabilityResults(response);
})))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
.then(results => cacheAvailabilityResults(results))
.then(results => Object.assign(cachedResults, results))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
if (!error && maxChunkSize !== 1) {
// sometimes due to large response size RD responds with an empty body. Reduce chunk size to reduce body
console.log(`Reducing chunk size for availability request: ${hashes[0]}`);
return _getInstantAvailable(hashes, apiKey, retries - 1, Math.ceil(maxChunkSize / 10));
}
if (retries > 0 && NON_BLACKLIST_ERRORS.some(v => error?.message?.includes(v))) {
return _getInstantAvailable(hashes, apiKey, retries - 1);
}
console.warn(`Failed RealDebrid cached [${hashes[0]}] torrent availability request:`, error.message);
return undefined;
});
})))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
.then(results => cacheAvailabilityResults(results))
.then(results => Object.assign(cachedResults, results))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
if (!error && maxChunkSize !== 1) {
// sometimes due to large response size RD responds with an empty body. Reduce chunk size to reduce body
console.log(`Reducing chunk size for availability request: ${hashes[0]}`);
return _getInstantAvailable(hashes, apiKey, retries - 1, Math.ceil(maxChunkSize / 10));
}
if (retries > 0 && NON_BLACKLIST_ERRORS.some(v => error?.message?.includes(v))) {
return _getInstantAvailable(hashes, apiKey, retries - 1);
}
console.warn(`Failed RealDebrid cached [${hashes[0]}] torrent availability request:`, error.message);
return undefined;
});
}
function processAvailabilityResults(availabilityResults) {
const processedResults = {};
Object.entries(availabilityResults)
.forEach(([infoHash, hosterResults]) => processedResults[infoHash] = getCachedIds(hosterResults));
return processedResults;
const processedResults = {};
Object.entries(availabilityResults)
.forEach(([infoHash, hosterResults]) => processedResults[infoHash] = getCachedIds(hosterResults));
return processedResults;
}
function getCachedIds(hosterResults) {
if (!hosterResults || Array.isArray(hosterResults)) {
return [];
}
// if not all cached files are videos, then the torrent will be zipped to a rar
return Object.values(hosterResults)
.reduce((a, b) => a.concat(b), [])
.filter(cached => Object.keys(cached).length && Object.values(cached).every(file => isVideo(file.filename)))
.map(cached => Object.keys(cached))
.sort((a, b) => b.length - a.length)
.filter((cached, index, array) => index === 0 || cached.some(id => !array[0].includes(id)));
if (!hosterResults || Array.isArray(hosterResults)) {
return [];
}
// if not all cached files are videos, then the torrent will be zipped to a rar
return Object.values(hosterResults)
.reduce((a, b) => a.concat(b), [])
.filter(cached => Object.keys(cached).length && Object.values(cached).every(file => isVideo(file.filename)))
.map(cached => Object.keys(cached))
.sort((a, b) => b.length - a.length)
.filter((cached, index, array) => index === 0 || cached.some(id => !array[0].includes(id)));
}
function _getCachedFileIds(fileIndex, cachedResults) {
if (!cachedResults || !Array.isArray(cachedResults)) {
return [];
}
if (!cachedResults || !Array.isArray(cachedResults)) {
return [];
}
const cachedIds = Number.isInteger(fileIndex)
? cachedResults.find(ids => ids.includes(`${fileIndex + 1}`))
: cachedResults[0];
return cachedIds || [];
const cachedIds = Number.isInteger(fileIndex)
? cachedResults.find(ids => ids.includes(`${fileIndex + 1}`))
: cachedResults[0];
return cachedIds || [];
}
export async function getCatalog(apiKey, offset, ip) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
const downloadsMeta = {
id: `${KEY}:${DEBRID_DOWNLOADS}`,
type: Type.OTHER,
name: DEBRID_DOWNLOADS
};
const torrentMetas = await _getAllTorrents(RD)
.then(torrents => Array.isArray(torrents) ? torrents : [])
.then(torrents => torrents
.filter(torrent => torrent && statusReady(torrent.status))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename
})));
return [downloadsMeta].concat(torrentMetas)
if (offset > 0) {
return [];
}
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
const downloadsMeta = {
id: `${KEY}:${DEBRID_DOWNLOADS}`,
type: Type.OTHER,
name: DEBRID_DOWNLOADS
};
const torrentMetas = await _getAllTorrents(RD)
.then(torrents => Array.isArray(torrents) ? torrents : [])
.then(torrents => torrents
.filter(torrent => torrent && statusReady(torrent.status))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename
})));
return [downloadsMeta].concat(torrentMetas)
}
export async function getItemMeta(itemId, apiKey, ip) {
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
if (itemId === DEBRID_DOWNLOADS) {
const videos = await _getAllDownloads(RD)
.then(downloads => downloads
.map(download => ({
id: `${KEY}:${DEBRID_DOWNLOADS}:${download.id}`,
// infoHash: allTorrents
// .filter(torrent => (torrent.links || []).find(link => link === download.link))
// .map(torrent => torrent.hash.toLowerCase())[0],
title: download.filename,
released: new Date(download.generated).toISOString(),
streams: [{ url: download.download }]
})));
return {
id: `${KEY}:${DEBRID_DOWNLOADS}`,
type: Type.OTHER,
name: DEBRID_DOWNLOADS,
videos: videos
};
}
return _getTorrentInfo(RD, itemId)
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename,
infoHash: torrent.hash.toLowerCase(),
videos: torrent.files
.filter(file => file.selected)
.filter(file => isVideo(file.path))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${file.id}`,
title: file.path,
released: new Date(new Date(torrent.added).getTime() - index).toISOString(),
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/null/${file.id - 1}` }]
}))
}))
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
if (itemId === DEBRID_DOWNLOADS) {
const videos = await _getAllDownloads(RD)
.then(downloads => downloads
.map(download => ({
id: `${KEY}:${DEBRID_DOWNLOADS}:${download.id}`,
// infoHash: allTorrents
// .filter(torrent => (torrent.links || []).find(link => link === download.link))
// .map(torrent => torrent.hash.toLowerCase())[0],
title: download.filename,
released: new Date(download.generated).toISOString(),
streams: [{ url: download.download }]
})));
return {
id: `${KEY}:${DEBRID_DOWNLOADS}`,
type: Type.OTHER,
name: DEBRID_DOWNLOADS,
videos: videos
};
}
return _getTorrentInfo(RD, itemId)
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename,
infoHash: torrent.hash.toLowerCase(),
videos: torrent.files
.filter(file => file.selected)
.filter(file => isVideo(file.path))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${file.id}`,
title: file.path,
released: new Date(new Date(torrent.added).getTime() - index).toISOString(),
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/null/${file.id - 1}` }]
}))
}))
}
async function _getAllTorrents(RD, page = 1) {
return RD.torrents.get(page - 1, page, CATALOG_PAGE_SIZE)
.then(torrents => torrents && torrents.length === CATALOG_PAGE_SIZE && page < CATALOG_MAX_PAGE
? _getAllTorrents(RD, page + 1)
.then(nextTorrents => torrents.concat(nextTorrents))
.catch(() => torrents)
: torrents)
return RD.torrents.get(page - 1, page, CATALOG_PAGE_SIZE)
.then(torrents => torrents && torrents.length === CATALOG_PAGE_SIZE && page < CATALOG_MAX_PAGE
? _getAllTorrents(RD, page + 1)
.then(nextTorrents => torrents.concat(nextTorrents))
.catch(() => torrents)
: torrents)
}
async function _getAllDownloads(RD, page = 1) {
return RD.downloads.get(page - 1, page, CATALOG_PAGE_SIZE);
return RD.downloads.get(page - 1, page, CATALOG_PAGE_SIZE);
}
export async function resolve({ ip, isBrowser, apiKey, infoHash, fileIndex }) {
console.log(`Unrestricting RealDebrid ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
const cachedFileIds = await _resolveCachedFileIds(infoHash, fileIndex, apiKey);
console.log(`Unrestricting RealDebrid ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
const cachedFileIds = await _resolveCachedFileIds(infoHash, fileIndex, apiKey);
return _resolve(RD, infoHash, cachedFileIds, fileIndex, isBrowser)
.catch(error => {
if (accessDeniedError(error)) {
console.log(`Access denied to RealDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
if (infringingFile(error)) {
console.log(`Infringing file removed from RealDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_INFRINGEMENT;
}
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(error)}`);
});
return _resolve(RD, infoHash, cachedFileIds, fileIndex, isBrowser)
.catch(error => {
if (accessDeniedError(error)) {
console.log(`Access denied to RealDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
if (infringingFile(error)) {
console.log(`Infringing file removed from RealDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_INFRINGEMENT;
}
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolveCachedFileIds(infoHash, fileIndex, apiKey) {
const available = await _getInstantAvailable([infoHash], apiKey);
const cachedEntry = available?.[infoHash];
const cachedIds = _getCachedFileIds(fileIndex, cachedEntry);
return cachedIds?.join(',');
const available = await _getInstantAvailable([infoHash], apiKey);
const cachedEntry = available?.[infoHash];
const cachedIds = _getCachedFileIds(fileIndex, cachedEntry);
return cachedIds?.join(',');
}
async function _resolve(RD, infoHash, cachedFileIds, fileIndex, isBrowser) {
const torrentId = await _createOrFindTorrentId(RD, infoHash, cachedFileIds, fileIndex);
const torrent = await _getTorrentInfo(RD, torrentId);
if (torrent && statusReady(torrent.status)) {
return _unrestrictLink(RD, torrent, fileIndex, isBrowser);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusMagnetError(torrent.status)) {
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}] due to magnet error`);
return StaticResponse.FAILED_OPENING;
} else if (torrent && statusError(torrent.status)) {
return _retryCreateTorrent(RD, infoHash, fileIndex);
} else if (torrent && (statusWaitingSelection(torrent.status) || statusOpening(torrent.status))) {
console.log(`Trying to select files on RealDebrid ${infoHash} [${fileIndex}]...`);
return _selectTorrentFiles(RD, torrent)
.then(() => {
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING
})
.catch(error => {
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}]:`, error);
return StaticResponse.FAILED_OPENING;
});
}
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(torrent)}`);
const torrentId = await _createOrFindTorrentId(RD, infoHash, cachedFileIds, fileIndex);
const torrent = await _getTorrentInfo(RD, torrentId);
if (torrent && statusReady(torrent.status)) {
return _unrestrictLink(RD, torrent, fileIndex, isBrowser);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusMagnetError(torrent.status)) {
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}] due to magnet error`);
return StaticResponse.FAILED_OPENING;
} else if (torrent && statusError(torrent.status)) {
return _retryCreateTorrent(RD, infoHash, fileIndex);
} else if (torrent && (statusWaitingSelection(torrent.status) || statusOpening(torrent.status))) {
console.log(`Trying to select files on RealDebrid ${infoHash} [${fileIndex}]...`);
return _selectTorrentFiles(RD, torrent)
.then(() => {
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING
})
.catch(error => {
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}]:`, error);
return StaticResponse.FAILED_OPENING;
});
}
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(torrent)}`);
}
async function _createOrFindTorrentId(RD, infoHash, cachedFileIds, fileIndex) {
return _findTorrent(RD, infoHash, fileIndex)
.catch(() => _createTorrentId(RD, infoHash, cachedFileIds));
return _findTorrent(RD, infoHash, fileIndex)
.catch(() => _createTorrentId(RD, infoHash, cachedFileIds));
}
async function _findTorrent(RD, infoHash, fileIndex) {
const torrents = await RD.torrents.get(0, 1) || [];
const foundTorrents = torrents
.filter(torrent => torrent.hash.toLowerCase() === infoHash)
.filter(torrent => !statusError(torrent.status));
const foundTorrent = await _findBestFitTorrent(RD, foundTorrents, fileIndex);
return foundTorrent?.id || Promise.reject('No recent torrent found');
const torrents = await RD.torrents.get(0, 1) || [];
const foundTorrents = torrents
.filter(torrent => torrent.hash.toLowerCase() === infoHash)
.filter(torrent => !statusError(torrent.status));
const foundTorrent = await _findBestFitTorrent(RD, foundTorrents, fileIndex);
return foundTorrent?.id || Promise.reject('No recent torrent found');
}
async function _findBestFitTorrent(RD, torrents, fileIndex) {
if (torrents.length === 1) {
return torrents[0];
}
const torrentInfos = await Promise.all(torrents.map(torrent => _getTorrentInfo(RD, torrent.id)));
const bestFitTorrents = torrentInfos
.filter(torrent => torrent.files.find(f => f.id === fileIndex + 1 && f.selected))
.sort((a, b) => b.links.length - a.links.length);
return bestFitTorrents[0] || torrents[0];
if (torrents.length === 1) {
return torrents[0];
}
const torrentInfos = await Promise.all(torrents.map(torrent => _getTorrentInfo(RD, torrent.id)));
const bestFitTorrents = torrentInfos
.filter(torrent => torrent.files.find(f => f.id === fileIndex + 1 && f.selected))
.sort((a, b) => b.links.length - a.links.length);
return bestFitTorrents[0] || torrents[0];
}
async function _getTorrentInfo(RD, torrentId) {
if (!torrentId || typeof torrentId === 'object') {
return torrentId || Promise.reject('No RealDebrid torrentId provided')
}
return RD.torrents.info(torrentId);
if (!torrentId || typeof torrentId === 'object') {
return torrentId || Promise.reject('No RealDebrid torrentId provided')
}
return RD.torrents.info(torrentId);
}
async function _createTorrentId(RD, infoHash, cachedFileIds) {
const magnetLink = await getMagnetLink(infoHash);
const addedMagnet = await RD.torrents.addMagnet(magnetLink);
if (cachedFileIds && !['null', 'undefined'].includes(cachedFileIds)) {
await RD.torrents.selectFiles(addedMagnet.id, cachedFileIds);
}
return addedMagnet.id;
const magnetLink = await getMagnetLink(infoHash);
const addedMagnet = await RD.torrents.addMagnet(magnetLink);
if (cachedFileIds && !['null', 'undefined'].includes(cachedFileIds)) {
await RD.torrents.selectFiles(addedMagnet.id, cachedFileIds);
}
return addedMagnet.id;
}
async function _recreateTorrentId(RD, infoHash, fileIndex) {
const newTorrentId = await _createTorrentId(RD, infoHash);
await _selectTorrentFiles(RD, { id: newTorrentId }, fileIndex);
return newTorrentId;
const newTorrentId = await _createTorrentId(RD, infoHash);
await _selectTorrentFiles(RD, { id: newTorrentId }, fileIndex);
return newTorrentId;
}
async function _retryCreateTorrent(RD, infoHash, fileIndex) {
console.log(`Retry failed download in RealDebrid ${infoHash} [${fileIndex}]...`);
const newTorrentId = await _recreateTorrentId(RD, infoHash, fileIndex);
const newTorrent = await _getTorrentInfo(RD, newTorrentId);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(RD, newTorrent, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
console.log(`Retry failed download in RealDebrid ${infoHash} [${fileIndex}]...`);
const newTorrentId = await _recreateTorrentId(RD, infoHash, fileIndex);
const newTorrent = await _getTorrentInfo(RD, newTorrentId);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(RD, newTorrent, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
async function _selectTorrentFiles(RD, torrent, fileIndex) {
torrent = statusWaitingSelection(torrent.status) ? torrent : await _openTorrent(RD, torrent.id);
if (torrent?.files && statusWaitingSelection(torrent.status)) {
const videoFileIds = Number.isInteger(fileIndex) ? `${fileIndex + 1}` : torrent.files
.filter(file => isVideo(file.path))
.filter(file => file.bytes > MIN_SIZE)
.map(file => file.id)
.join(',');
return RD.torrents.selectFiles(torrent.id, videoFileIds);
}
return Promise.reject('Failed RealDebrid torrent file selection')
torrent = statusWaitingSelection(torrent.status) ? torrent : await _openTorrent(RD, torrent.id);
if (torrent?.files && statusWaitingSelection(torrent.status)) {
const videoFileIds = Number.isInteger(fileIndex) ? `${fileIndex + 1}` : torrent.files
.filter(file => isVideo(file.path))
.filter(file => file.bytes > MIN_SIZE)
.map(file => file.id)
.join(',');
return RD.torrents.selectFiles(torrent.id, videoFileIds);
}
return Promise.reject('Failed RealDebrid torrent file selection')
}
async function _openTorrent(RD, torrentId, pollCounter = 0, pollRate = 2000, maxPollNumber = 15) {
return _getTorrentInfo(RD, torrentId)
.then(torrent => torrent && statusOpening(torrent.status) && pollCounter < maxPollNumber
? delay(pollRate).then(() => _openTorrent(RD, torrentId, pollCounter + 1))
: torrent);
return _getTorrentInfo(RD, torrentId)
.then(torrent => torrent && statusOpening(torrent.status) && pollCounter < maxPollNumber
? delay(pollRate).then(() => _openTorrent(RD, torrentId, pollCounter + 1))
: torrent);
}
async function _unrestrictLink(RD, torrent, fileIndex, isBrowser) {
const targetFile = torrent.files.find(file => file.id === fileIndex + 1)
|| torrent.files.filter(file => file.selected).sort((a, b) => b.bytes - a.bytes)[0];
if (!targetFile.selected) {
console.log(`Target RealDebrid file is not downloaded: ${JSON.stringify(targetFile)}`);
await _recreateTorrentId(RD, torrent.hash.toLowerCase(), fileIndex);
return StaticResponse.DOWNLOADING;
}
const targetFile = torrent.files.find(file => file.id === fileIndex + 1)
|| torrent.files.filter(file => file.selected).sort((a, b) => b.bytes - a.bytes)[0];
if (!targetFile.selected) {
console.log(`Target RealDebrid file is not downloaded: ${JSON.stringify(targetFile)}`);
await _recreateTorrentId(RD, torrent.hash.toLowerCase(), fileIndex);
return StaticResponse.DOWNLOADING;
}
const selectedFiles = torrent.files.filter(file => file.selected);
const fileLink = torrent.links.length === 1
? torrent.links[0]
: torrent.links[selectedFiles.indexOf(targetFile)];
const selectedFiles = torrent.files.filter(file => file.selected);
const fileLink = torrent.links.length === 1
? torrent.links[0]
: torrent.links[selectedFiles.indexOf(targetFile)];
if (!fileLink?.length) {
console.log(`No RealDebrid links found for ${torrent.hash} [${fileIndex}]`);
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
}
if (!fileLink?.length) {
console.log(`No RealDebrid links found for ${torrent.hash} [${fileIndex}]`);
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
}
return _unrestrictFileLink(RD, fileLink, torrent, fileIndex, isBrowser);
return _unrestrictFileLink(RD, fileLink, torrent, fileIndex, isBrowser);
}
async function _unrestrictFileLink(RD, fileLink, torrent, fileIndex, isBrowser) {
return RD.unrestrict.link(fileLink)
.then(response => {
if (isArchive(response.download)) {
if (torrent.files.filter(file => file.selected).length > 1) {
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
}
return StaticResponse.FAILED_RAR;
}
// if (isBrowser && response.streamable) {
// return RD.streaming.transcode(response.id)
// .then(streamResponse => streamResponse.apple.full)
// }
return response.download;
})
.then(unrestrictedLink => {
console.log(`Unrestricted RealDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
})
.catch(error => {
if (error.code === 19) {
return _retryCreateTorrent(RD, torrent.hash.toLowerCase(), fileIndex);
}
return Promise.reject(error);
});
return RD.unrestrict.link(fileLink)
.then(response => {
if (isArchive(response.download)) {
if (torrent.files.filter(file => file.selected).length > 1) {
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
}
return StaticResponse.FAILED_RAR;
}
// if (isBrowser && response.streamable) {
// return RD.streaming.transcode(response.id)
// .then(streamResponse => streamResponse.apple.full)
// }
return response.download;
})
.then(unrestrictedLink => {
console.log(`Unrestricted RealDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
})
.catch(error => {
if (error.code === 19) {
return _retryCreateTorrent(RD, torrent.hash.toLowerCase(), fileIndex);
}
return Promise.reject(error);
});
}
export function toCommonError(error) {
if (error && error.code === 8) {
return BadTokenError;
}
if (error && accessDeniedError(error)) {
return AccessDeniedError;
}
return undefined;
if (error && error.code === 8) {
return BadTokenError;
}
if (error && accessDeniedError(error)) {
return AccessDeniedError;
}
return undefined;
}
function statusError(status) {
return ['error', 'magnet_error'].includes(status);
return ['error', 'magnet_error'].includes(status);
}
function statusMagnetError(status) {
return status === 'magnet_error';
return status === 'magnet_error';
}
function statusOpening(status) {
return status === 'magnet_conversion';
return status === 'magnet_conversion';
}
function statusWaitingSelection(status) {
return status === 'waiting_files_selection';
return status === 'waiting_files_selection';
}
function statusDownloading(status) {
return ['downloading', 'uploading', 'queued'].includes(status);
return ['downloading', 'uploading', 'queued'].includes(status);
}
function statusReady(status) {
return ['downloaded', 'dead'].includes(status);
return ['downloaded', 'dead'].includes(status);
}
function accessDeniedError(error) {
return [9, 20].includes(error?.code);
return [9, 20].includes(error?.code);
}
function infringingFile(error) {
return error && error.code === 35;
return error && error.code === 35;
}
async function getDefaultOptions(ip) {
return { ip, timeout: 10000 };
return { ip, timeout: 15000 };
}

View File

@@ -17,7 +17,6 @@
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="8.0.3" />
<PackageReference Include="Polly" Version="8.3.1" />
<PackageReference Include="PromKnight.ParseTorrentTitle" Version="1.0.4" />
<PackageReference Include="Serilog" Version="3.1.1" />
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />
@@ -29,10 +28,30 @@
<None Include="Configuration\logging.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="requirements.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<Content Remove="eng\**" />
<None Remove="eng\**" />
</ItemGroup>
<ItemGroup Condition="'$(Configuration)' == 'Debug'">
<Content Remove="python\**" />
<None Include="python\**">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\shared\SharedContracts.csproj" />
</ItemGroup>
<ItemGroup>
<Compile Remove="eng\**" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Remove="eng\**" />
</ItemGroup>
</Project>

View File

@@ -6,6 +6,12 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharedContracts", "..\share
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "shared", "shared", "{2C0A0F53-28E6-404F-9EFE-DADFBEF8338B}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "eng", "eng", "{72A042C3-B4F3-45C5-AC20-041FE8F41EFC}"
ProjectSection(SolutionItems) = preProject
eng\install-python-reqs.ps1 = eng\install-python-reqs.ps1
eng\install-python-reqs.sh = eng\install-python-reqs.sh
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU

View File

@@ -9,12 +9,23 @@ RUN dotnet restore -a $TARGETARCH
RUN dotnet publish -c Release --no-restore -o /src/out -a $TARGETARCH
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine3.19
WORKDIR /app
ENV PYTHONUNBUFFERED=1
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
COPY --from=build /src/out .
RUN rm -rf /app/python && mkdir -p /app/python
RUN pip3 install -r /app/requirements.txt -t /app/python
RUN addgroup -S debrid && adduser -S -G debrid debrid
USER debrid
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
CMD pgrep -f dotnet || exit 1
ENV PYTHONNET_PYDLL=/usr/lib/libpython3.11.so.1.0
ENTRYPOINT ["dotnet", "DebridCollector.dll"]

View File

@@ -1,5 +1,3 @@
using DebridCollector.Features.Configuration;
namespace DebridCollector.Extensions;
public static class ServiceCollectionExtensions
@@ -17,7 +15,8 @@ public static class ServiceCollectionExtensions
var serviceConfiguration = services.LoadConfigurationFromEnv<DebridCollectorConfiguration>();
services.AddRealDebridClient(serviceConfiguration);
services.AddSingleton<IParseTorrentTitle, ParseTorrentTitle>();
services.RegisterPythonEngine();
services.AddSingleton<IRankTorrentName, RankTorrentName>();
services.AddHostedService<DebridRequestProcessor>();
return services;

View File

@@ -1,6 +1,4 @@
using DebridCollector.Features.Configuration;
namespace DebridCollector.Features.Debrid;
namespace DebridCollector.Features.Debrid;
public static class ServiceCollectionExtensions
{

View File

@@ -3,10 +3,11 @@ namespace DebridCollector.Features.Worker;
public static class DebridMetaToTorrentMeta
{
public static IReadOnlyList<TorrentFile> MapMetadataToFilesCollection(
IParseTorrentTitle torrentTitle,
IRankTorrentName rankTorrentName,
Torrent torrent,
string ImdbId,
FileDataDictionary Metadata)
FileDataDictionary Metadata,
ILogger<WriteMetadataConsumer> logger)
{
try
{
@@ -26,23 +27,30 @@ public static class DebridMetaToTorrentMeta
Size = metadataEntry.Value.Filesize.GetValueOrDefault(),
};
var parsedTitle = torrentTitle.Parse(file.Title);
var parsedTitle = rankTorrentName.Parse(file.Title, false);
file.ImdbSeason = parsedTitle.Seasons.FirstOrDefault();
file.ImdbEpisode = parsedTitle.Episodes.FirstOrDefault();
if (!parsedTitle.Success)
{
logger.LogWarning("Failed to parse title {Title} for metadata mapping", file.Title);
continue;
}
file.ImdbSeason = parsedTitle.Response?.Season?.FirstOrDefault() ?? 0;
file.ImdbEpisode = parsedTitle.Response?.Episode?.FirstOrDefault() ?? 0;
files.Add(file);
}
return files;
}
catch (Exception)
catch (Exception ex)
{
logger.LogWarning("Failed to map metadata to files collection: {Exception}", ex.Message);
return [];
}
}
public static async Task<IReadOnlyList<SubtitleFile>> MapMetadataToSubtitlesCollection(IDataStorage storage, string InfoHash, FileDataDictionary Metadata)
public static async Task<IReadOnlyList<SubtitleFile>> MapMetadataToSubtitlesCollection(IDataStorage storage, string InfoHash, FileDataDictionary Metadata, ILogger<WriteMetadataConsumer> logger)
{
try
{
@@ -74,8 +82,9 @@ public static class DebridMetaToTorrentMeta
return files;
}
catch (Exception)
catch (Exception ex)
{
logger.LogWarning("Failed to map metadata to subtitles collection: {Exception}", ex.Message);
return [];
}
}

View File

@@ -53,6 +53,12 @@ public class InfohashMetadataSagaStateMachine : MassTransitStateMachine<Infohash
.Then(
context =>
{
if (!context.Message.WithFiles)
{
logger.LogInformation("No files written for torrent {InfoHash} in Saga {SagaId}", context.Saga.Torrent.InfoHash, context.Saga.CorrelationId);
return;
}
logger.LogInformation("Metadata Written for torrent {InfoHash} in Saga {SagaId}", context.Saga.Torrent.InfoHash, context.Saga.CorrelationId);
})
.TransitionTo(Completed)

View File

@@ -1,22 +1,22 @@
namespace DebridCollector.Features.Worker;
[EntityName("perform-metadata-request")]
[EntityName("perform-metadata-request-debrid-collector")]
public record PerformMetadataRequest(Guid CorrelationId, string InfoHash) : CorrelatedBy<Guid>;
[EntityName("torrent-metadata-response")]
[EntityName("torrent-metadata-response-debrid-collector")]
public record GotMetadata(TorrentMetadataResponse Metadata) : CorrelatedBy<Guid>
{
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;
}
[EntityName("write-metadata")]
[EntityName("write-metadata-debrid-collector")]
public record WriteMetadata(Torrent Torrent, TorrentMetadataResponse Metadata, string ImdbId) : CorrelatedBy<Guid>
{
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;
}
[EntityName("metadata-written")]
public record MetadataWritten(TorrentMetadataResponse Metadata) : CorrelatedBy<Guid>
[EntityName("metadata-written-debrid-colloctor")]
public record MetadataWritten(TorrentMetadataResponse Metadata, bool WithFiles) : CorrelatedBy<Guid>
{
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;
}

View File

@@ -1,25 +1,28 @@
namespace DebridCollector.Features.Worker;
public class WriteMetadataConsumer(IParseTorrentTitle parseTorrentTitle, IDataStorage dataStorage) : IConsumer<WriteMetadata>
public class WriteMetadataConsumer(IRankTorrentName rankTorrentName, IDataStorage dataStorage, ILogger<WriteMetadataConsumer> logger) : IConsumer<WriteMetadata>
{
public async Task Consume(ConsumeContext<WriteMetadata> context)
{
var request = context.Message;
var torrentFiles = DebridMetaToTorrentMeta.MapMetadataToFilesCollection(parseTorrentTitle, request.Torrent, request.ImdbId, request.Metadata.Metadata);
var torrentFiles = DebridMetaToTorrentMeta.MapMetadataToFilesCollection(rankTorrentName, request.Torrent, request.ImdbId, request.Metadata.Metadata, logger);
if (torrentFiles.Any())
if (!torrentFiles.Any())
{
await dataStorage.InsertFiles(torrentFiles);
var subtitles = await DebridMetaToTorrentMeta.MapMetadataToSubtitlesCollection(dataStorage, request.Torrent.InfoHash, request.Metadata.Metadata);
if (subtitles.Any())
{
await dataStorage.InsertSubtitles(subtitles);
}
await context.Publish(new MetadataWritten(request.Metadata, false));
return;
}
await context.Publish(new MetadataWritten(request.Metadata));
await dataStorage.InsertFiles(torrentFiles);
var subtitles = await DebridMetaToTorrentMeta.MapMetadataToSubtitlesCollection(dataStorage, request.Torrent.InfoHash, request.Metadata.Metadata, logger);
if (subtitles.Any())
{
await dataStorage.InsertSubtitles(subtitles);
}
await context.Publish(new MetadataWritten(request.Metadata, true));
}
}

View File

@@ -4,17 +4,18 @@ global using System.Text.Json;
global using System.Text.Json.Serialization;
global using System.Threading.Channels;
global using DebridCollector.Extensions;
global using DebridCollector.Features.Configuration;
global using DebridCollector.Features.Debrid;
global using DebridCollector.Features.Worker;
global using MassTransit;
global using MassTransit.Mediator;
global using Microsoft.AspNetCore.Builder;
global using Microsoft.Extensions.DependencyInjection;
global using Polly;
global using Polly.Extensions.Http;
global using PromKnight.ParseTorrentTitle;
global using SharedContracts.Configuration;
global using SharedContracts.Dapper;
global using SharedContracts.Extensions;
global using SharedContracts.Models;
global using SharedContracts.Python;
global using SharedContracts.Python.RTN;
global using SharedContracts.Requests;

View File

@@ -0,0 +1,2 @@
mkdir -p ../python
python -m pip install -r ../requirements.txt -t ../python/

View File

@@ -0,0 +1,5 @@
#!/bin/bash
rm -rf ../python
mkdir -p ../python
python3 -m pip install -r ../requirements.txt -t ../python/

View File

@@ -0,0 +1 @@
rank-torrent-name==0.2.5

View File

@@ -0,0 +1,19 @@
-- Remove the old search function
DROP FUNCTION IF EXISTS search_imdb_meta(TEXT, TEXT, INT, INT);
-- Add the new search function that allows for searching by year with a plus/minus one year range
CREATE OR REPLACE FUNCTION search_imdb_meta(search_term TEXT, category_param TEXT DEFAULT NULL, year_param INT DEFAULT NULL, limit_param INT DEFAULT 10, similarity_threshold REAL DEFAULT 0.95)
RETURNS TABLE(imdb_id character varying(16), title character varying(1000),category character varying(50),year INT, score REAL) AS $$
BEGIN
SET pg_trgm.similarity_threshold = similarity_threshold;
RETURN QUERY
SELECT imdb_metadata.imdb_id, imdb_metadata.title, imdb_metadata.category, imdb_metadata.year, similarity(imdb_metadata.title, search_term) as score
FROM imdb_metadata
WHERE (imdb_metadata.title % search_term)
AND (imdb_metadata.adult = FALSE)
AND (category_param IS NULL OR imdb_metadata.category = category_param)
AND (year_param IS NULL OR imdb_metadata.year BETWEEN year_param - 1 AND year_param + 1)
ORDER BY score DESC
LIMIT limit_param;
END; $$
LANGUAGE plpgsql;

View File

@@ -0,0 +1,19 @@
-- Remove the old search function
DROP FUNCTION IF EXISTS search_imdb_meta(TEXT, TEXT, INT, INT);
-- Add the new search function that allows for searching by year with a plus/minus one year range
CREATE OR REPLACE FUNCTION search_imdb_meta(search_term TEXT, category_param TEXT DEFAULT NULL, year_param INT DEFAULT NULL, limit_param INT DEFAULT 10, similarity_threshold REAL DEFAULT 0.95)
RETURNS TABLE(imdb_id character varying(16), title character varying(1000),category character varying(50),year INT, score REAL) AS $$
BEGIN
EXECUTE format('SET pg_trgm.similarity_threshold = %L', similarity_threshold);
RETURN QUERY
SELECT imdb_metadata.imdb_id, imdb_metadata.title, imdb_metadata.category, imdb_metadata.year, similarity(imdb_metadata.title, search_term) as score
FROM imdb_metadata
WHERE (imdb_metadata.title % search_term)
AND (imdb_metadata.adult = FALSE)
AND (category_param IS NULL OR imdb_metadata.category = category_param)
AND (year_param IS NULL OR imdb_metadata.year BETWEEN year_param - 1 AND year_param + 1)
ORDER BY score DESC
LIMIT limit_param;
END; $$
LANGUAGE plpgsql;

View File

@@ -108,14 +108,18 @@ public partial class DebridMediaManagerCrawler(
return null;
}
var parsedTorrent = rankTorrentName.Parse(torrentTitle.CleanTorrentTitleForImdb());
var parsedTorrent = rankTorrentName.Parse(torrentTitle);
if (!parsedTorrent.Success)
{
return null;
}
var (cached, cachedResult) = await CheckIfInCacheAndReturn(parsedTorrent.Response.ParsedTitle);
var torrentType = parsedTorrent.Response.IsMovie ? "movie" : "tvSeries";
var cacheKey = GetCacheKey(torrentType, parsedTorrent.Response.ParsedTitle, parsedTorrent.Response.Year);
var (cached, cachedResult) = await CheckIfInCacheAndReturn(cacheKey);
if (cached)
{
@@ -124,14 +128,14 @@ public partial class DebridMediaManagerCrawler(
}
int? year = parsedTorrent.Response.Year != 0 ? parsedTorrent.Response.Year : null;
var imdbEntry = await Storage.FindImdbMetadata(parsedTorrent.Response.ParsedTitle, parsedTorrent.Response.IsMovie ? "movies" : "tv", year);
var imdbEntry = await Storage.FindImdbMetadata(parsedTorrent.Response.ParsedTitle, torrentType, year);
if (imdbEntry is null)
{
return null;
}
await AddToCache(parsedTorrent.Response.ParsedTitle.ToLowerInvariant(), imdbEntry);
await AddToCache(cacheKey, imdbEntry);
logger.LogInformation("[{ImdbId}] Found best match for {Title}: {BestMatch} with score {Score}", imdbEntry.ImdbId, parsedTorrent.Response.ParsedTitle, imdbEntry.Title, imdbEntry.Score);
@@ -148,27 +152,24 @@ public partial class DebridMediaManagerCrawler(
InfoHash = hashElement.ToString(),
Seeders = 0,
Leechers = 0,
Category = parsedTorrent.Response.IsMovie switch
{
true => "movies",
false => "tv",
},
Category = AssignCategory(result),
RtnResponse = parsedTorrent.Response.ToJson(),
};
private Task AddToCache(string lowerCaseTitle, ImdbEntry best)
private Task AddToCache(string cacheKey, ImdbEntry best)
{
var cacheOptions = new DistributedCacheEntryOptions
{
AbsoluteExpirationRelativeToNow = TimeSpan.FromDays(1),
};
return cache.SetStringAsync(lowerCaseTitle, JsonSerializer.Serialize(best), cacheOptions);
return cache.SetStringAsync(cacheKey, JsonSerializer.Serialize(best), cacheOptions);
}
private async Task<(bool Success, ImdbEntry? Entry)> CheckIfInCacheAndReturn(string title)
private async Task<(bool Success, ImdbEntry? Entry)> CheckIfInCacheAndReturn(string cacheKey)
{
var cachedImdbId = await cache.GetStringAsync(title.ToLowerInvariant());
var cachedImdbId = await cache.GetStringAsync(cacheKey);
if (!string.IsNullOrEmpty(cachedImdbId))
{
@@ -208,4 +209,14 @@ public partial class DebridMediaManagerCrawler(
return (pageIngested, name);
}
private static string AssignCategory(ImdbEntry entry) =>
entry.Category.ToLower() switch
{
var category when string.Equals(category, "movie", StringComparison.OrdinalIgnoreCase) => "movies",
var category when string.Equals(category, "tvSeries", StringComparison.OrdinalIgnoreCase) => "tv",
_ => "unknown",
};
private static string GetCacheKey(string category, string title, int year) => $"{category.ToLowerInvariant()}:{year}:{title.ToLowerInvariant()}";
}

View File

@@ -1 +1 @@
rank-torrent-name==0.1.9
rank-torrent-name==0.2.5

View File

@@ -9,12 +9,23 @@ RUN dotnet restore -a $TARGETARCH
RUN dotnet publish -c Release --no-restore -o /src/out -a $TARGETARCH
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine3.19
WORKDIR /app
ENV PYTHONUNBUFFERED=1
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
COPY --from=build /src/out .
RUN rm -rf /app/python && mkdir -p /app/python
RUN pip3 install -r /app/requirements.txt -t /app/python
RUN addgroup -S qbit && adduser -S -G qbit qbit
USER qbit
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
CMD pgrep -f dotnet || exit 1
ENV PYTHONNET_PYDLL=/usr/lib/libpython3.11.so.1.0
ENTRYPOINT ["dotnet", "QBitCollector.dll"]

View File

@@ -13,11 +13,13 @@ public static class ServiceCollectionExtensions
internal static IServiceCollection AddServiceConfiguration(this IServiceCollection services)
{
services.AddQBitTorrentClient();
services.AddSingleton<IParseTorrentTitle, ParseTorrentTitle>();
services.RegisterPythonEngine();
services.AddSingleton<IRankTorrentName, RankTorrentName>();
services.AddSingleton<QbitRequestProcessor>();
services.AddHttpClient();
services.AddSingleton<ITrackersService, TrackersService>();
services.AddHostedService<TrackersBackgroundService>();
services.AddHostedService<HousekeepingBackgroundService>();
return services;
}

View File

@@ -0,0 +1,52 @@
namespace QBitCollector.Features.Qbit;
public class HousekeepingBackgroundService(IQBittorrentClient client, ILogger<HousekeepingBackgroundService> logger) : BackgroundService
{
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
logger.LogInformation("Service is Running.");
await DoWork();
using PeriodicTimer timer = new(TimeSpan.FromMinutes(2));
try
{
while (await timer.WaitForNextTickAsync(stoppingToken))
{
await DoWork();
}
}
catch (OperationCanceledException)
{
logger.LogInformation("Service stopping.");
}
}
private async Task DoWork()
{
try
{
logger.LogInformation("Cleaning Stale Entries in Qbit...");
var torrents = await client.GetTorrentListAsync();
foreach (var torrentInfo in torrents)
{
if (!(torrentInfo.AddedOn < DateTimeOffset.UtcNow.AddMinutes(-1)))
{
continue;
}
logger.LogInformation("Torrent [{InfoHash}] Identified as stale because was added at {AddedOn}", torrentInfo.Hash, torrentInfo.AddedOn);
await client.DeleteAsync(new[] {torrentInfo.Hash}, deleteDownloadedData: true);
logger.LogInformation("Cleaned up stale torrent: [{InfoHash}]", torrentInfo.Hash);
}
}
catch (Exception e)
{
logger.LogError(e, "Error cleaning up stale torrents this interval.");
}
}
}

View File

@@ -3,10 +3,11 @@ namespace QBitCollector.Features.Worker;
public static class QbitMetaToTorrentMeta
{
public static IReadOnlyList<TorrentFile> MapMetadataToFilesCollection(
IParseTorrentTitle torrentTitle,
IRankTorrentName rankTorrentName,
Torrent torrent,
string ImdbId,
IReadOnlyList<TorrentContent> Metadata)
IReadOnlyList<TorrentContent> Metadata,
ILogger<WriteQbitMetadataConsumer> logger)
{
try
{
@@ -24,23 +25,31 @@ public static class QbitMetaToTorrentMeta
Size = metadataEntry.Size,
};
var parsedTitle = torrentTitle.Parse(file.Title);
var parsedTitle = rankTorrentName.Parse(file.Title, false);
if (!parsedTitle.Success)
{
logger.LogWarning("Failed to parse title {Title} for metadata mapping", file.Title);
continue;
}
file.ImdbSeason = parsedTitle.Seasons.FirstOrDefault();
file.ImdbEpisode = parsedTitle.Episodes.FirstOrDefault();
file.ImdbSeason = parsedTitle.Response?.Season?.FirstOrDefault() ?? 0;
file.ImdbEpisode = parsedTitle.Response?.Episode?.FirstOrDefault() ?? 0;
files.Add(file);
}
return files;
}
catch (Exception)
catch (Exception ex)
{
logger.LogWarning("Failed to map metadata to files collection: {Exception}", ex.Message);
return [];
}
}
public static async Task<IReadOnlyList<SubtitleFile>> MapMetadataToSubtitlesCollection(IDataStorage storage, string InfoHash, IReadOnlyList<TorrentContent> Metadata)
public static async Task<IReadOnlyList<SubtitleFile>> MapMetadataToSubtitlesCollection(IDataStorage storage, string InfoHash, IReadOnlyList<TorrentContent> Metadata,
ILogger<WriteQbitMetadataConsumer> logger)
{
try
{
@@ -70,8 +79,9 @@ public static class QbitMetaToTorrentMeta
return files;
}
catch (Exception)
catch (Exception ex)
{
logger.LogWarning("Failed to map metadata to subtitles collection: {Exception}", ex.Message);
return [];
}
}

View File

@@ -53,6 +53,12 @@ public class QbitMetadataSagaStateMachine : MassTransitStateMachine<QbitMetadata
.Then(
context =>
{
if (!context.Message.WithFiles)
{
logger.LogInformation("No files written for torrent {InfoHash} in Saga {SagaId}", context.Saga.Torrent.InfoHash, context.Saga.CorrelationId);
return;
}
logger.LogInformation("Metadata Written for torrent {InfoHash} in Saga {SagaId}", context.Saga.Torrent.InfoHash, context.Saga.CorrelationId);
})
.TransitionTo(Completed)

View File

@@ -1,22 +1,24 @@
namespace QBitCollector.Features.Worker;
[EntityName("perform-metadata-request")]
[EntityName("perform-metadata-request-qbit-collector")]
public record PerformQbitMetadataRequest(Guid CorrelationId, string InfoHash) : CorrelatedBy<Guid>;
[EntityName("torrent-metadata-response")]
[EntityName("torrent-metadata-response-qbit-collector")]
public record GotQbitMetadata(QBitMetadataResponse Metadata) : CorrelatedBy<Guid>
{
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;
}
[EntityName("write-metadata")]
[EntityName("write-metadata-qbit-collector")]
public record WriteQbitMetadata(Torrent Torrent, QBitMetadataResponse Metadata, string ImdbId) : CorrelatedBy<Guid>
{
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;
}
[EntityName("metadata-written")]
public record QbitMetadataWritten(QBitMetadataResponse Metadata) : CorrelatedBy<Guid>
[EntityName("metadata-written-qbit-collector")]
public record QbitMetadataWritten(QBitMetadataResponse Metadata, bool WithFiles) : CorrelatedBy<Guid>
{
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;
public QBitMetadataResponse Metadata { get; init; } = Metadata;
}

View File

@@ -1,25 +1,30 @@
namespace QBitCollector.Features.Worker;
public class WriteQbitMetadataConsumer(IParseTorrentTitle parseTorrentTitle, IDataStorage dataStorage) : IConsumer<WriteQbitMetadata>
public class WriteQbitMetadataConsumer(IRankTorrentName rankTorrentName, IDataStorage dataStorage, ILogger<WriteQbitMetadataConsumer> logger) : IConsumer<WriteQbitMetadata>
{
public async Task Consume(ConsumeContext<WriteQbitMetadata> context)
{
var request = context.Message;
var torrentFiles = QbitMetaToTorrentMeta.MapMetadataToFilesCollection(parseTorrentTitle, request.Torrent, request.ImdbId, request.Metadata.Metadata);
if (torrentFiles.Any())
var torrentFiles = QbitMetaToTorrentMeta.MapMetadataToFilesCollection(
rankTorrentName, request.Torrent, request.ImdbId, request.Metadata.Metadata, logger);
if (!torrentFiles.Any())
{
await dataStorage.InsertFiles(torrentFiles);
var subtitles = await QbitMetaToTorrentMeta.MapMetadataToSubtitlesCollection(dataStorage, request.Torrent.InfoHash, request.Metadata.Metadata);
if (subtitles.Any())
{
await dataStorage.InsertSubtitles(subtitles);
}
await context.Publish(new QbitMetadataWritten(request.Metadata, false));
return;
}
await context.Publish(new QbitMetadataWritten(request.Metadata));
await dataStorage.InsertFiles(torrentFiles);
var subtitles = await QbitMetaToTorrentMeta.MapMetadataToSubtitlesCollection(
dataStorage, request.Torrent.InfoHash, request.Metadata.Metadata, logger);
if (subtitles.Any())
{
await dataStorage.InsertSubtitles(subtitles);
}
await context.Publish(new QbitMetadataWritten(request.Metadata, true));
}
}

View File

@@ -1,17 +1,11 @@
// Global using directives
global using System.Text.Json;
global using System.Text.Json.Serialization;
global using System.Threading.Channels;
global using MassTransit;
global using MassTransit.Mediator;
global using Microsoft.AspNetCore.Builder;
global using Microsoft.Extensions.Caching.Distributed;
global using Microsoft.Extensions.Caching.Memory;
global using Microsoft.Extensions.DependencyInjection;
global using Polly;
global using Polly.Extensions.Http;
global using PromKnight.ParseTorrentTitle;
global using QBitCollector.Extensions;
global using QBitCollector.Features.Qbit;
global using QBitCollector.Features.Trackers;
@@ -21,4 +15,6 @@ global using SharedContracts.Configuration;
global using SharedContracts.Dapper;
global using SharedContracts.Extensions;
global using SharedContracts.Models;
global using SharedContracts.Python;
global using SharedContracts.Python.RTN;
global using SharedContracts.Requests;

View File

@@ -18,7 +18,6 @@
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="8.0.3" />
<PackageReference Include="Polly" Version="8.3.1" />
<PackageReference Include="PromKnight.ParseTorrentTitle" Version="1.0.4" />
<PackageReference Include="QBittorrent.Client" Version="1.9.23349.1" />
<PackageReference Include="Serilog" Version="3.1.1" />
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
@@ -31,10 +30,30 @@
<None Include="Configuration\logging.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<Content Remove="eng\**" />
<None Remove="eng\**" />
<None Update="requirements.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\shared\SharedContracts.csproj" />
</ItemGroup>
<ItemGroup Condition="'$(Configuration)' == 'Debug'">
<Content Remove="python\**" />
<None Include="python\**">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
<ItemGroup>
<Compile Remove="eng\**" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Remove="eng\**" />
</ItemGroup>
</Project>

View File

@@ -6,6 +6,12 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "shared", "shared", "{2C0A0F
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "QBitCollector", "QBitCollector.csproj", "{1EF124BE-6EBE-4D9E-846C-FFF814999F3B}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "eng", "eng", "{2F2EA33A-1303-405D-939B-E9394D262BC9}"
ProjectSection(SolutionItems) = preProject
eng\install-python-reqs.ps1 = eng\install-python-reqs.ps1
eng\install-python-reqs.sh = eng\install-python-reqs.sh
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU

View File

@@ -0,0 +1,3 @@
Remove-Item -Recurse -Force ../python
mkdir -p ../python
python -m pip install -r ../requirements.txt -t ../python/

View File

@@ -0,0 +1,5 @@
#!/bin/bash
rm -rf ../python
mkdir -p ../python
python3 -m pip install -r ../requirements.txt -t ../python/

View File

@@ -0,0 +1 @@
rank-torrent-name==0.2.5

View File

@@ -118,7 +118,7 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
public async Task<ImdbEntry?> FindImdbMetadata(string? parsedTorrentTitle, string torrentType, int? year, CancellationToken cancellationToken = default) =>
await ExecuteCommandAsync(async connection =>
{
var query = $"select \"imdb_id\" as \"ImdbId\", \"title\" as \"Title\", \"year\" as \"Year\", \"score\" as Score from search_imdb_meta('{parsedTorrentTitle.Replace("'", "").Replace("\"", "")}', '{(torrentType.Equals("movie", StringComparison.OrdinalIgnoreCase) ? "movie" : "tvSeries")}'";
var query = $"select \"imdb_id\" as \"ImdbId\", \"title\" as \"Title\", \"year\" as \"Year\", \"score\" as Score, \"category\" as Category from search_imdb_meta('{parsedTorrentTitle.Replace("'", "").Replace("\"", "")}', '{torrentType}'";
query += year is not null ? $", {year}" : ", NULL";
query += ", 1)";
@@ -167,12 +167,7 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
INSERT INTO subtitles
("infoHash", "fileIndex", "fileId", "title")
VALUES
(@InfoHash, @FileIndex, @FileId, @Title)
ON CONFLICT
("infoHash", "fileIndex")
DO UPDATE SET
"fileId" = COALESCE(subtitles."fileId", EXCLUDED."fileId"),
"title" = COALESCE(subtitles."title", EXCLUDED."title");
(@InfoHash, @FileIndex, @FileId, @Title);
""";
await connection.ExecuteAsync(query, subtitles);

View File

@@ -1,5 +1,3 @@
using System.Text.RegularExpressions;
namespace SharedContracts.Extensions;
public static partial class StringExtensions

View File

@@ -2,6 +2,7 @@
global using System.Text.Json;
global using System.Text.Json.Serialization;
global using System.Text.RegularExpressions;
global using Dapper;
global using MassTransit;
global using Microsoft.AspNetCore.Builder;

View File

@@ -2,5 +2,5 @@ namespace SharedContracts.Python.RTN;
public interface IRankTorrentName
{
ParseTorrentTitleResponse Parse(string title);
ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true);
}

View File

@@ -13,15 +13,14 @@ public class RankTorrentName : IRankTorrentName
InitModules();
}
public ParseTorrentTitleResponse Parse(string title) =>
public ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true) =>
_pythonEngineService.ExecutePythonOperationWithDefault(
() =>
{
var result = _rtn?.parse(title);
var result = _rtn?.parse(title, trashGarbage);
return ParseResult(result);
}, new ParseTorrentTitleResponse(false, null), nameof(Parse), throwOnErrors: false, logErrors: false);
private static ParseTorrentTitleResponse ParseResult(dynamic result)
{
if (result == null)
@@ -35,9 +34,18 @@ public class RankTorrentName : IRankTorrentName
{
return new(false, null);
}
var mediaType = result.GetAttr("type")?.As<string>();
if (string.IsNullOrEmpty(mediaType))
{
return new(false, null);
}
var response = JsonSerializer.Deserialize<RtnResponse>(json);
response.IsMovie = mediaType.Equals("movie", StringComparison.OrdinalIgnoreCase);
return new(true, response);
}

View File

@@ -76,8 +76,8 @@ public class RtnResponse
[JsonPropertyName("extended")]
public bool Extended { get; set; }
public bool IsMovie => Season == null && Episode == null;
public bool IsMovie { get; set; }
public string ToJson() => this.AsJson();
}

View File

@@ -82,11 +82,4 @@ public static class ServiceCollectionExtensions
x.AddConsumer<PerformIngestionConsumer>();
}
internal static IServiceCollection AddServiceConfiguration(this IServiceCollection services)
{
services.AddSingleton<IParseTorrentTitle, ParseTorrentTitle>();
return services;
}
}

View File

@@ -5,7 +5,6 @@ global using MassTransit;
global using MassTransit.Mediator;
global using Microsoft.AspNetCore.Builder;
global using Microsoft.Extensions.DependencyInjection;
global using PromKnight.ParseTorrentTitle;
global using SharedContracts.Configuration;
global using SharedContracts.Dapper;
global using SharedContracts.Extensions;

View File

@@ -10,7 +10,6 @@ builder.Host
builder.Services
.RegisterMassTransit()
.AddServiceConfiguration()
.AddDatabase();
var app = builder.Build();

View File

@@ -16,7 +16,6 @@
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="8.0.3" />
<PackageReference Include="Polly" Version="8.3.1" />
<PackageReference Include="PromKnight.ParseTorrentTitle" Version="1.0.4" />
<PackageReference Include="Serilog" Version="3.1.1" />
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />