Pull changes from Torrentio

This commit is contained in:
purple_emily
2024-04-01 17:20:33 +01:00
parent 684dbba2f0
commit 2e65ff9276
8 changed files with 1119 additions and 1119 deletions

View File

@@ -27,7 +27,7 @@ export async function getCachedStreams(streams, apiKey) {
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = {
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: cachedEntry?.instant
}
@@ -142,10 +142,10 @@ async function _createTorrent(AD, infoHash) {
async function _unrestrictLink(AD, torrent, encodedFileName, fileIndex) {
const targetFileName = decodeURIComponent(encodedFileName);
const videos = torrent.links.filter(link => isVideo(link.filename));
const videos = torrent.links.filter(link => isVideo(link.filename)).sort((a, b) => b.size - a.size);
const targetVideo = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(targetFileName, video.filename))
: videos.sort((a, b) => b.size - a.size)[0];
: videos[0];
if (!targetVideo && torrent.links.every(link => isArchive(link.filename))) {
console.log(`Only AllDebrid archive is available for [${torrent.hash}] ${encodedFileName}`)

View File

@@ -25,7 +25,7 @@ export async function getCachedStreams(streams, apiKey) {
return available && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available[stream.infoHash];
mochStreams[stream.infoHash] = {
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
cached: !!cachedEntry
};

View File

@@ -173,7 +173,7 @@ function processMochResults(streams, config, results) {
function populateCachedLinks(streams, mochResult, config) {
return streams.map(stream => {
const cachedEntry = stream.infoHash && mochResult.mochStreams[stream.infoHash];
const cachedEntry = stream.infoHash && mochResult.mochStreams[`${stream.infoHash}@${stream.fileIdx}`];
if (cachedEntry?.cached) {
return {
name: `[${mochResult.moch.shortName}+] ${stream.name}`,
@@ -190,7 +190,7 @@ function populateDownloadLinks(streams, mochResults, config) {
const torrentStreams = streams.filter(stream => stream.infoHash);
const seededStreams = streams.filter(stream => !stream.title.includes('👤 0'));
torrentStreams.forEach(stream => mochResults.forEach(mochResult => {
const cachedEntry = mochResult.mochStreams[stream.infoHash];
const cachedEntry = mochResult.mochStreams[`${stream.infoHash}@${stream.fileIdx}`];
const isCached = cachedEntry?.cached;
if (!isCached && isHealthyStreamForDebrid(seededStreams, stream)) {
streams.push({

View File

@@ -27,10 +27,9 @@ export async function getCachedStreams(streams, apiKey) {
const isCached = available.includes(stream.infoHash);
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${stream.fileIdx}`,
cached: isCached
};
return mochStreams;
@@ -137,7 +136,8 @@ async function _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex) {
async function _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex) {
const targetFileName = decodeURIComponent(cachedEntryInfo);
const files = await _getFileUrls(OC, torrent)
const targetFile = files.find(file => sameFilename(targetFileName, file.split('/').pop()))
const targetFile = files.find(file => file.includes(`/${torrent.requestId}/${fileIndex}/`))
|| files.find(file => sameFilename(targetFileName, file.split('/').pop()))
|| files.find(file => isVideo(file))
|| files.pop();

View File

@@ -124,10 +124,10 @@ async function _getCachedLink(PM, infoHash, encodedFileName, fileIndex, ip, isBr
const cachedTorrent = await PM.transfer.directDownload(magnet.encode({ infoHash }), ip);
if (cachedTorrent?.content?.length) {
const targetFileName = decodeURIComponent(encodedFileName);
const videos = cachedTorrent.content.filter(file => isVideo(file.path));
const videos = cachedTorrent.content.filter(file => isVideo(file.path)).sort((a, b) => b.size - a.size);
const targetVideo = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(video.path, targetFileName))
: videos.sort((a, b) => b.size - a.size)[0];
: videos[0];
if (!targetVideo && videos.every(video => isArchive(video.path))) {
console.log(`Only Premiumize archive is available for [${infoHash}] ${fileIndex}`)
return StaticResponse.FAILED_RAR;

View File

@@ -17,7 +17,7 @@ export async function getCachedStreams(streams, apiKey) {
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = {
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: false
};
@@ -168,12 +168,12 @@ async function _getTargetFile(Putio, torrent, encodedFileName, fileIndex) {
while (!targetFile && files.length) {
const folders = files.filter(file => file.file_type === 'FOLDER');
videos = videos.concat(files.filter(file => isVideo(file.name)));
videos = videos.concat(files.filter(file => isVideo(file.name))).sort((a, b) => b.size - a.size);
// when specific file index is defined search by filename
// when it's not defined find all videos and take the largest one
targetFile = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(targetFileName, video.name))
: !folders.length && videos.sort((a, b) => b.size - a.size)[0];
: !folders.length && videos[0];
files = !targetFile
? await Promise.all(folders.map(folder => _getFiles(Putio, folder.id)))
.then(results => results.reduce((a, b) => a.concat(b), []))

View File

@@ -21,7 +21,7 @@ export async function getCachedStreams(streams, apiKey) {
.reduce((mochStreams, stream) => {
const cachedEntry = available[stream.infoHash];
const cachedIds = _getCachedFileIds(stream.fileIdx, cachedEntry);
mochStreams[stream.infoHash] = {
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
cached: !!cachedIds.length
};
@@ -395,5 +395,5 @@ function infringingFile(error) {
}
async function getDefaultOptions(ip) {
return { ip, timeout: 10000 };
return { ip, timeout: 15000 };
}